diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index b9d3891c751..ec837e980e5 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -25,6 +25,8 @@ + + highlighting (type=" + highlighterType + ") and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(matchQuery("field1", "quick brown fox").operator(Operator.AND)) - .highlighter( - highlight() - .field("field1") - .order("score") - .preTags("") - .postTags("") - .highlighterType(highlighterType)); + SearchSourceBuilder source = searchSource().query(matchQuery("field1", "quick brown fox").operator(Operator.AND)) + .highlighter(highlight().field("field1").order("score").preTags("").postTags("").highlighterType(highlighterType)); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - source = searchSource() - .query(matchQuery("field1", "fast brown fox").operator(Operator.AND)) + source = searchSource().query(matchQuery("field1", "fast brown fox").operator(Operator.AND)) .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); } } @@ -230,93 +264,139 @@ public class HighlighterWithAnalyzersTests extends OpenSearchIntegTestCase { ensureGreen(); - client().prepareIndex("first_test_index", "type1", "0").setSource( - "field0", "The quick brown fox jumps over the lazy dog", - "field1", "The quick brown fox jumps over the lazy dog").get(); - client().prepareIndex("first_test_index", "type1", "1").setSource("field1", - "The quick browse button is a fancy thing, right bro?").get(); + client().prepareIndex("first_test_index", "type1", "0") + .setSource("field0", "The quick brown fox jumps over the lazy dog", "field1", "The quick brown fox jumps over the lazy dog") + .get(); + client().prepareIndex("first_test_index", "type1", "1") + .setSource("field1", "The quick browse button is a fancy thing, right bro?") + .get(); refresh(); logger.info("--> highlighting and searching on field0"); - SearchSourceBuilder source = searchSource() - .query(matchPhrasePrefixQuery("field0", "bro")) + SearchSourceBuilder source = searchSource().query(matchPhrasePrefixQuery("field0", "bro")) .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); SearchResponse searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - source = searchSource() - .query(matchPhrasePrefixQuery("field0", "quick bro")) + source = searchSource().query(matchPhrasePrefixQuery("field0", "quick bro")) .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field0", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); + assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); logger.info("--> highlighting and searching on field1"); - source = searchSource() - .query(boolQuery() - .should(matchPhrasePrefixQuery("field1", "test")) - .should(matchPhrasePrefixQuery("field1", "bro")) - ) - .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); + source = searchSource().query( + boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro")) + ).highlighter(highlight().field("field1").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); for (int i = 0; i < 2; i++) { - assertHighlight(searchResponse, i, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight( + searchResponse, + i, + "field1", + 0, + 1, + anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog") + ) + ); } - source = searchSource() - .query(matchPhrasePrefixQuery("field1", "quick bro")) + source = searchSource().query(matchPhrasePrefixQuery("field1", "quick bro")) .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight( + searchResponse, + 0, + "field1", + 0, + 1, + anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog") + ) + ); + assertHighlight( + searchResponse, + 1, + "field1", + 0, + 1, + anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog") + ) + ); - assertAcked(prepareCreate("second_test_index").setSettings(builder.build()).addMapping("doc", - "field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym", - "field3", "type=text,analyzer=synonym")); + assertAcked( + prepareCreate("second_test_index").setSettings(builder.build()) + .addMapping( + "doc", + "field4", + "type=text,term_vector=with_positions_offsets,analyzer=synonym", + "field3", + "type=text,analyzer=synonym" + ) + ); // with synonyms - client().prepareIndex("second_test_index", "doc", "0").setSource( - "type", "type2", - "field4", "The quick brown fox jumps over the lazy dog", - "field3", "The quick brown fox jumps over the lazy dog").get(); - client().prepareIndex("second_test_index", "doc", "1").setSource( - "type", "type2", - "field4", "The quick browse button is a fancy thing, right bro?").get(); - client().prepareIndex("second_test_index", "doc", "2").setSource( - "type", "type2", - "field4", "a quick fast blue car").get(); + client().prepareIndex("second_test_index", "doc", "0") + .setSource( + "type", + "type2", + "field4", + "The quick brown fox jumps over the lazy dog", + "field3", + "The quick brown fox jumps over the lazy dog" + ) + .get(); + client().prepareIndex("second_test_index", "doc", "1") + .setSource("type", "type2", "field4", "The quick browse button is a fancy thing, right bro?") + .get(); + client().prepareIndex("second_test_index", "doc", "2").setSource("type", "type2", "field4", "a quick fast blue car").get(); refresh(); - source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field3", "fast bro")) + source = searchSource().postFilter(termQuery("type", "type2")) + .query(matchPhrasePrefixQuery("field3", "fast bro")) .highlighter(highlight().field("field3").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field3", 0, 1, - equalTo("The quick brown fox jumps over the lazy dog")); + assertHighlight(searchResponse, 0, "field3", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); logger.info("--> highlighting and searching on field4"); - source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field4", "the fast bro")) + source = searchSource().postFilter(termQuery("type", "type2")) + .query(matchPhrasePrefixQuery("field4", "the fast bro")) .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight( + searchResponse, + 0, + "field4", + 0, + 1, + anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog") + ) + ); + assertHighlight( + searchResponse, + 1, + "field4", + 0, + 1, + anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog") + ) + ); logger.info("--> highlighting and searching on field4"); source = searchSource().postFilter(termQuery("type", "type2")) @@ -324,17 +404,31 @@ public class HighlighterWithAnalyzersTests extends OpenSearchIntegTestCase { .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field4", 0, 1, - anyOf(equalTo("a quick fast blue car"), - equalTo("a quick fast blue car"))); + assertHighlight( + searchResponse, + 0, + "field4", + 0, + 1, + anyOf(equalTo("a quick fast blue car"), equalTo("a quick fast blue car")) + ); } public static XContentBuilder type1TermVectorMapping() throws IOException { - return XContentFactory.jsonBuilder().startObject().startObject("type1") + return XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") .startObject("properties") - .startObject("field1").field("type", "text").field("term_vector", "with_positions_offsets").endObject() - .startObject("field2").field("type", "text").field("term_vector", "with_positions_offsets").endObject() + .startObject("field1") + .field("type", "text") + .field("term_vector", "with_positions_offsets") .endObject() - .endObject().endObject(); + .startObject("field2") + .field("type", "text") + .field("term_vector", "with_positions_offsets") + .endObject() + .endObject() + .endObject() + .endObject(); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HtmlStripCharFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HtmlStripCharFilterFactoryTests.java index 1e5cd039b53..63a7ffdfc18 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HtmlStripCharFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HtmlStripCharFilterFactoryTests.java @@ -47,25 +47,29 @@ import java.io.IOException; import java.io.StringReader; import java.util.Map; - public class HtmlStripCharFilterFactoryTests extends OpenSearchTestCase { /** * Check that the deprecated name "htmlStrip" issues a deprecation warning for indices created since 6.3.0 */ public void testDeprecationWarning() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetadata.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_3_0, Version.CURRENT)) - .build(); + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_3_0, Version.CURRENT) + ) + .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { Map charFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).charFilter; CharFilterFactory charFilterFactory = charFilters.get("htmlStrip"); assertNotNull(charFilterFactory.create(new StringReader("input"))); - assertWarnings("The [htmpStrip] char filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [html_strip] instead."); + assertWarnings( + "The [htmpStrip] char filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [html_strip] instead." + ); } } @@ -73,10 +77,13 @@ public class HtmlStripCharFilterFactoryTests extends OpenSearchTestCase { * Check that the deprecated name "htmlStrip" does NOT issues a deprecation warning for indices created before 6.3.0 */ public void testNoDeprecationWarningPre6_3() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetadata.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_2_4)) - .build(); + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_2_4) + ) + .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java index e12bd3185f7..0b094e52df8 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java @@ -52,51 +52,49 @@ public class KeepFilterFactoryTests extends OpenSearchTokenStreamTestCase { public void testLoadWithoutSettings() throws IOException { OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath( - createTempDir(), RESOURCE, new CommonAnalysisPlugin()); + createTempDir(), + RESOURCE, + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep"); Assert.assertNull(tokenFilter); } public void testLoadOverConfiguredSettings() { Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.broken_keep_filter.type", "keep") - .put("index.analysis.filter.broken_keep_filter.keep_words_path", "does/not/exists.txt") - .put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]") - .build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.broken_keep_filter.type", "keep") + .put("index.analysis.filter.broken_keep_filter.keep_words_path", "does/not/exists.txt") + .put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]") + .build(); try { AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); Assert.fail("path and array are configured"); - } catch (IllegalArgumentException e) { - } catch (IOException e) { + } catch (IllegalArgumentException e) {} catch (IOException e) { fail("expected IAE"); } } public void testKeepWordsPathSettings() { Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.non_broken_keep_filter.type", "keep") - .put("index.analysis.filter.non_broken_keep_filter.keep_words_path", "does/not/exists.txt") - .build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.non_broken_keep_filter.type", "keep") + .put("index.analysis.filter.non_broken_keep_filter.keep_words_path", "does/not/exists.txt") + .build(); try { // test our none existing setup is picked up AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); fail("expected an exception due to non existent keep_words_path"); - } catch (IllegalArgumentException e) { - } catch (IOException e) { + } catch (IllegalArgumentException e) {} catch (IOException e) { fail("expected IAE"); } - settings = Settings.builder().put(settings) - .putList("index.analysis.filter.non_broken_keep_filter.keep_words", "test") - .build(); + settings = Settings.builder().put(settings).putList("index.analysis.filter.non_broken_keep_filter.keep_words", "test").build(); try { // test our none existing setup is picked up AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); fail("expected an exception indicating that you can't use [keep_words_path] with [keep_words] "); - } catch (IllegalArgumentException e) { - } catch (IOException e) { + } catch (IllegalArgumentException e) {} catch (IOException e) { fail("expected IAE"); } @@ -104,25 +102,31 @@ public class KeepFilterFactoryTests extends OpenSearchTokenStreamTestCase { public void testCaseInsensitiveMapping() throws IOException { OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath( - createTempDir(), RESOURCE, new CommonAnalysisPlugin()); + createTempDir(), + RESOURCE, + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_keep_filter"); assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class)); String source = "hello small world"; - String[] expected = new String[]{"hello", "world"}; + String[] expected = new String[] { "hello", "world" }; Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[]{1, 2}); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 1, 2 }); } public void testCaseSensitiveMapping() throws IOException { OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath( - createTempDir(), RESOURCE, new CommonAnalysisPlugin()); + createTempDir(), + RESOURCE, + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_case_sensitive_keep_filter"); assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class)); String source = "Hello small world"; - String[] expected = new String[]{"Hello"}; + String[] expected = new String[] { "Hello" }; Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[]{1}); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 1 }); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java index 26c4516dc7d..1f1021b4bfe 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java @@ -51,13 +51,16 @@ public class KeepTypesFilterFactoryTests extends OpenSearchTokenStreamTestCase { private static final String BASE_SETTING = "index.analysis.filter.keep_numbers"; public void testKeepTypesInclude() throws IOException { - Settings.Builder settingsBuilder = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(BASE_SETTING + ".type", "keep_types") - .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }); + Settings.Builder settingsBuilder = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }); // either use default mode or set "include" mode explicitly if (random().nextBoolean()) { - settingsBuilder.put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, - KeepTypesFilterFactory.KeepTypesMode.INCLUDE); + settingsBuilder.put( + BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, + KeepTypesFilterFactory.KeepTypesMode.INCLUDE + ); } Settings settings = settingsBuilder.build(); OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); @@ -71,10 +74,12 @@ public class KeepTypesFilterFactoryTests extends OpenSearchTokenStreamTestCase { } public void testKeepTypesExclude() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(BASE_SETTING + ".type", "keep_types") - .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) - .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, KeepTypesFilterFactory.KeepTypesMode.EXCLUDE).build(); + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, KeepTypesFilterFactory.KeepTypesMode.EXCLUDE) + .build(); OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class)); @@ -86,12 +91,16 @@ public class KeepTypesFilterFactoryTests extends OpenSearchTokenStreamTestCase { } public void testKeepTypesException() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(BASE_SETTING + ".type", "keep_types") - .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) - .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, "bad_parameter").build(); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())); + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, "bad_parameter") + .build(); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()) + ); assertEquals("`keep_types` tokenfilter mode can only be [include] or [exclude] but was [bad_parameter].", ex.getMessage()); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java index a4507729fa2..40e354785dd 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java @@ -72,8 +72,7 @@ public class KeywordMarkerFilterFactoryTests extends OpenSearchTokenStreamTestCa assertThat(filter, instanceOf(SetKeywordMarkerFilter.class)); NamedAnalyzer analyzer = analysis.indexAnalyzers.get("my_keyword"); // jogging is not part of the keywords set, so verify that its the only stemmed word - assertAnalyzesTo(analyzer, "running jogging sleeping", - new String[] { "running", "jog", "sleeping" }); + assertAnalyzesTo(analyzer, "running jogging sleeping", new String[] { "running", "jog", "sleeping" }); } /** @@ -111,9 +110,10 @@ public class KeywordMarkerFilterFactoryTests extends OpenSearchTokenStreamTestCa .put("index.analysis.analyzer.my_keyword.filter", "my_keyword, porter_stem") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())); - assertEquals("cannot specify both `keywords_pattern` and `keywords` or `keywords_path`", - e.getMessage()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()) + ); + assertEquals("cannot specify both `keywords_pattern` and `keywords` or `keywords_path`", e.getMessage()); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java index bece55849ad..76471fd98e5 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java @@ -47,9 +47,9 @@ import java.io.StringReader; public class LimitTokenCountFilterFactoryTests extends OpenSearchTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.builder() - .put("index.analysis.filter.limit_default.type", "limit") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + .put("index.analysis.filter.limit_default.type", "limit") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); OpenSearchTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); { TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_default"); @@ -72,11 +72,11 @@ public class LimitTokenCountFilterFactoryTests extends OpenSearchTokenStreamTest public void testSettings() throws IOException { { Settings settings = Settings.builder() - .put("index.analysis.filter.limit_1.type", "limit") - .put("index.analysis.filter.limit_1.max_token_count", 3) - .put("index.analysis.filter.limit_1.consume_all_tokens", true) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + .put("index.analysis.filter.limit_1.type", "limit") + .put("index.analysis.filter.limit_1.max_token_count", 3) + .put("index.analysis.filter.limit_1.consume_all_tokens", true) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); OpenSearchTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_1"); String source = "the quick brown fox"; @@ -87,11 +87,11 @@ public class LimitTokenCountFilterFactoryTests extends OpenSearchTokenStreamTest } { Settings settings = Settings.builder() - .put("index.analysis.filter.limit_1.type", "limit") - .put("index.analysis.filter.limit_1.max_token_count", 3) - .put("index.analysis.filter.limit_1.consume_all_tokens", false) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + .put("index.analysis.filter.limit_1.type", "limit") + .put("index.analysis.filter.limit_1.max_token_count", 3) + .put("index.analysis.filter.limit_1.consume_all_tokens", false) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); OpenSearchTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_1"); String source = "the quick brown fox"; @@ -103,11 +103,11 @@ public class LimitTokenCountFilterFactoryTests extends OpenSearchTokenStreamTest { Settings settings = Settings.builder() - .put("index.analysis.filter.limit_1.type", "limit") - .put("index.analysis.filter.limit_1.max_token_count", 17) - .put("index.analysis.filter.limit_1.consume_all_tokens", true) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + .put("index.analysis.filter.limit_1.type", "limit") + .put("index.analysis.filter.limit_1.max_token_count", 17) + .put("index.analysis.filter.limit_1.consume_all_tokens", true) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); OpenSearchTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_1"); String source = "the quick brown fox"; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java index a89693ce609..390e36c4ca0 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java @@ -51,13 +51,18 @@ public class MassiveWordListTests extends OpenSearchSingleNodeTestCase { for (int i = 0; i < wordList.length; i++) { wordList[i] = "hello world"; } - client().admin().indices().prepareCreate("test").setSettings(Settings.builder() - .put("index.number_of_shards", 1) - .put("analysis.analyzer.test_analyzer.type", "custom") - .put("analysis.analyzer.test_analyzer.tokenizer", "standard") - .putList("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase") - .put("analysis.filter.dictionary_decompounder.type", "dictionary_decompounder") - .putList("analysis.filter.dictionary_decompounder.word_list", wordList) - ).get(); + client().admin() + .indices() + .prepareCreate("test") + .setSettings( + Settings.builder() + .put("index.number_of_shards", 1) + .put("analysis.analyzer.test_analyzer.type", "custom") + .put("analysis.analyzer.test_analyzer.tokenizer", "standard") + .putList("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase") + .put("analysis.filter.dictionary_decompounder.type", "dictionary_decompounder") + .putList("analysis.filter.dictionary_decompounder.word_list", wordList) + ) + .get(); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java index a16d74dbfa9..514c53f1745 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java @@ -49,9 +49,7 @@ public class MinHashFilterFactoryTests extends OpenSearchTokenStreamTestCase { int default_hash_count = 1; int default_bucket_size = 512; int default_hash_set_size = 1; - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("min_hash"); String source = "the quick brown fox"; @@ -60,8 +58,7 @@ public class MinHashFilterFactoryTests extends OpenSearchTokenStreamTestCase { // with_rotation is true by default, and hash_set_size is 1, so even though the source doesn't // have enough tokens to fill all the buckets, we still expect 512 tokens. - assertStreamHasNumberOfTokens(tokenFilter.create(tokenizer), - default_hash_count * default_bucket_size * default_hash_set_size); + assertStreamHasNumberOfTokens(tokenFilter.create(tokenizer), default_hash_count * default_bucket_size * default_hash_set_size); } public void testSettings() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java index 922c10e8e85..167f61464da 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java @@ -50,9 +50,7 @@ import java.util.Collections; public class MultiplexerTokenFilterTests extends OpenSearchTokenStreamTestCase { public void testMultiplexingFilter() throws IOException { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.t.type", "truncate") @@ -65,30 +63,27 @@ public class MultiplexerTokenFilterTests extends OpenSearchTokenStreamTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); + IndexAnalyzers indexAnalyzers = new AnalysisModule( + TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin()) + ).getAnalysisRegistry().build(idxSettings); try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "ONe tHree", new String[]{ - "ONe", "on", "ONE", "tHree", "th", "THREE" - }, new int[]{ - 1, 0, 0, 1, 0, 0 - }); + assertAnalyzesTo( + analyzer, + "ONe tHree", + new String[] { "ONe", "on", "ONE", "tHree", "th", "THREE" }, + new int[] { 1, 0, 0, 1, 0, 0 } + ); // Duplicates are removed - assertAnalyzesTo(analyzer, "ONe THREE", new String[]{ - "ONe", "on", "ONE", "THREE", "th" - }, new int[]{ - 1, 0, 0, 1, 0, 0 - }); + assertAnalyzesTo(analyzer, "ONe THREE", new String[] { "ONe", "on", "ONE", "THREE", "th" }, new int[] { 1, 0, 0, 1, 0, 0 }); } } public void testMultiplexingNoOriginal() throws IOException { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.t.type", "truncate") @@ -102,16 +97,14 @@ public class MultiplexerTokenFilterTests extends OpenSearchTokenStreamTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); + IndexAnalyzers indexAnalyzers = new AnalysisModule( + TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin()) + ).getAnalysisRegistry().build(idxSettings); try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "ONe tHree", new String[]{ - "on", "ONE", "th", "THREE" - }, new int[]{ - 1, 0, 1, 0, - }); + assertAnalyzesTo(analyzer, "ONe tHree", new String[] { "on", "ONE", "th", "THREE" }, new int[] { 1, 0, 1, 0, }); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java index 45ac5f58138..85090648096 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java @@ -52,10 +52,11 @@ public class NGramTokenFilterFactoryTests extends OpenSearchTokenStreamTestCase .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_ngram.type", "ngram") .build(), - new CommonAnalysisPlugin()); + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ngram"); String source = "foo"; - String[] expected = new String[]{"f", "fo", "o", "oo", "o"}; + String[] expected = new String[] { "f", "fo", "o", "oo", "o" }; Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); @@ -68,10 +69,11 @@ public class NGramTokenFilterFactoryTests extends OpenSearchTokenStreamTestCase .put("index.analysis.filter.my_ngram.type", "ngram") .put("index.analysis.filter.my_ngram.preserve_original", true) .build(), - new CommonAnalysisPlugin()); + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ngram"); String source = "foo"; - String[] expected = new String[]{"f", "fo", "o", "oo", "o", "foo"}; + String[] expected = new String[] { "f", "fo", "o", "oo", "o", "foo" }; Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java index 656c8afb27f..c23b2c6082e 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java @@ -61,30 +61,43 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { final Settings indexSettings = newAnalysisSettingsBuilder().build(); final IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(index, indexSettings); for (String tokenChars : Arrays.asList("letter", " digit ", "punctuation", "DIGIT", "CoNtRoL", "dash_punctuation")) { - final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3) - .put("token_chars", tokenChars).build(); + final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 3) + .put("token_chars", tokenChars) + .build(); new NGramTokenizerFactory(indexProperties, null, name, settings).create(); // no exception } { - final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3) - .put("token_chars", "DIRECTIONALITY_UNDEFINED").build(); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> new NGramTokenizerFactory(indexProperties, null, name, settings).create()); + final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 3) + .put("token_chars", "DIRECTIONALITY_UNDEFINED") + .build(); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new NGramTokenizerFactory(indexProperties, null, name, settings).create() + ); assertEquals("Unknown token type: 'directionality_undefined'", ex.getMessage().substring(0, 46)); assertTrue(ex.getMessage().contains("custom")); } { - final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "custom") - .put("custom_token_chars", "_-").build(); + final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 3) + .put("token_chars", "custom") + .put("custom_token_chars", "_-") + .build(); new NGramTokenizerFactory(indexProperties, null, name, settings).create(); // no exception } { - final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "custom") - .build(); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> new NGramTokenizerFactory(indexProperties, null, name, settings).create()); + final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 3) + .put("token_chars", "custom") + .build(); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new NGramTokenizerFactory(indexProperties, null, name, settings).create() + ); assertEquals("Token type: 'custom' requires setting `custom_token_chars`", ex.getMessage()); } } @@ -94,12 +107,14 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().put(IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey(), 2).build(); - final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 4) - .putList("token_chars", new String[0]).build(); + final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 4) + .putList("token_chars", new String[0]) + .build(); Tokenizer tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings) .create(); tokenizer.setReader(new StringReader("1.34")); - assertTokenStreamContents(tokenizer, new String[] {"1.", "1.3", "1.34", ".3", ".34", "34"}); + assertTokenStreamContents(tokenizer, new String[] { "1.", "1.3", "1.34", ".3", ".34", "34" }); } public void testCustomTokenChars() throws IOException { @@ -107,12 +122,15 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().put(IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey(), 2).build(); - final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3) - .putList("token_chars", "letter", "custom").put("custom_token_chars","_-").build(); + final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 3) + .putList("token_chars", "letter", "custom") + .put("custom_token_chars", "_-") + .build(); Tokenizer tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings) .create(); tokenizer.setReader(new StringReader("Abc -gh _jk =lm")); - assertTokenStreamContents(tokenizer, new String[] {"Ab", "Abc", "bc", "-g", "-gh", "gh", "_j", "_jk", "jk", "lm"}); + assertTokenStreamContents(tokenizer, new String[] { "Ab", "Abc", "bc", "-g", "-gh", "gh", "_j", "_jk", "jk", "lm" }); } public void testPreTokenization() throws IOException { @@ -120,19 +138,21 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { final Index index = new Index("test", "_na_"); final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); - Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3) - .put("token_chars", "letter,digit").build(); + Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "letter,digit").build(); Tokenizer tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings) .create(); tokenizer.setReader(new StringReader("Åbc déf g\uD801\uDC00f ")); - assertTokenStreamContents(tokenizer, - new String[] {"Åb", "Åbc", "bc", "dé", "déf", "éf", "g\uD801\uDC00", "g\uD801\uDC00f", "\uD801\uDC00f"}); - settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3) - .put("token_chars", "letter,digit,punctuation,whitespace,symbol").build(); + assertTokenStreamContents( + tokenizer, + new String[] { "Åb", "Åbc", "bc", "dé", "déf", "éf", "g\uD801\uDC00", "g\uD801\uDC00f", "\uD801\uDC00f" } + ); + settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 3) + .put("token_chars", "letter,digit,punctuation,whitespace,symbol") + .build(); tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(); tokenizer.setReader(new StringReader(" a!$ 9")); - assertTokenStreamContents(tokenizer, - new String[] {" a", " a!", "a!", "a!$", "!$", "!$ ", "$ ", "$ 9", " 9"}); + assertTokenStreamContents(tokenizer, new String[] { " a", " a!", "a!", "a!$", "!$", "!$ ", "$ ", "$ 9", " 9" }); } public void testPreTokenizationEdge() throws IOException { @@ -141,18 +161,22 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "letter,digit").build(); - Tokenizer tokenizer = - new EdgeNGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings).create(); + Tokenizer tokenizer = new EdgeNGramTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + name, + settings + ).create(); tokenizer.setReader(new StringReader("Åbc déf g\uD801\uDC00f ")); - assertTokenStreamContents(tokenizer, - new String[] {"Åb", "Åbc", "dé", "déf", "g\uD801\uDC00", "g\uD801\uDC00f"}); - settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3) - .put("token_chars", "letter,digit,punctuation,whitespace,symbol").build(); + assertTokenStreamContents(tokenizer, new String[] { "Åb", "Åbc", "dé", "déf", "g\uD801\uDC00", "g\uD801\uDC00f" }); + settings = newAnalysisSettingsBuilder().put("min_gram", 2) + .put("max_gram", 3) + .put("token_chars", "letter,digit,punctuation,whitespace,symbol") + .build(); tokenizer = new EdgeNGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings) .create(); tokenizer.setReader(new StringReader(" a!$ 9")); - assertTokenStreamContents(tokenizer, - new String[] {" a", " a!"}); + assertTokenStreamContents(tokenizer, new String[] { " a", " a!" }); } public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws Exception { @@ -170,9 +194,12 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetadata.SETTING_VERSION_CREATED, v.id).build(); Tokenizer tokenizer = new MockTokenizer(); tokenizer.setReader(new StringReader("foo bar")); - TokenStream edgeNGramTokenFilter = - new EdgeNGramTokenFilterFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings) - .create(tokenizer); + TokenStream edgeNGramTokenFilter = new EdgeNGramTokenFilterFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + name, + settings + ).create(tokenizer); if (reverse) { assertThat(edgeNGramTokenFilter, instanceOf(ReverseStringFilter.class)); } else { @@ -185,7 +212,7 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { * test that throws an error when trying to get a NGramTokenizer where difference between max_gram and min_gram * is greater than the allowed value of max_ngram_diff */ - public void testMaxNGramDiffException() throws Exception{ + public void testMaxNGramDiffException() throws Exception { final Index index = new Index("test", "_na_"); final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); @@ -197,12 +224,19 @@ public class NGramTokenizerFactoryTests extends OpenSearchTokenStreamTestCase { int max_gram = min_gram + ngramDiff; final Settings settings = newAnalysisSettingsBuilder().put("min_gram", min_gram).put("max_gram", max_gram).build(); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - new NGramTokenizerFactory(indexProperties, null, name, settings).create()); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new NGramTokenizerFactory(indexProperties, null, name, settings).create() + ); assertEquals( "The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: [" - + maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the [" - + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting.", - ex.getMessage()); + + maxAllowedNgramDiff + + "] but was [" + + ngramDiff + + "]. This limit can be set by changing the [" + + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + + "] index level setting.", + ex.getMessage() + ); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchSolrSynonymParserTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchSolrSynonymParserTests.java index 4e3bf297750..4a5ee5d3856 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchSolrSynonymParserTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchSolrSynonymParserTests.java @@ -52,23 +52,20 @@ public class OpenSearchSolrSynonymParserTests extends OpenSearchTokenStreamTestC public void testLenientParser() throws IOException, ParseException { OpenSearchSolrSynonymParser parser = new OpenSearchSolrSynonymParser(true, false, true, new StandardAnalyzer()); - String rules = - "&,and\n" + - "come,advance,approach\n"; + String rules = "&,and\n" + "come,advance,approach\n"; StringReader rulesReader = new StringReader(rules); parser.parse(rulesReader); SynonymMap synonymMap = parser.build(); Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader("approach quietly then advance & destroy")); TokenStream ts = new SynonymFilter(tokenizer, synonymMap, false); - assertTokenStreamContents(ts, new String[]{"come", "quietly", "then", "come", "destroy"}); + assertTokenStreamContents(ts, new String[] { "come", "quietly", "then", "come", "destroy" }); } public void testLenientParserWithSomeIncorrectLines() throws IOException, ParseException { CharArraySet stopSet = new CharArraySet(1, true); stopSet.add("bar"); - OpenSearchSolrSynonymParser parser = - new OpenSearchSolrSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); + OpenSearchSolrSynonymParser parser = new OpenSearchSolrSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); String rules = "foo,bar,baz"; StringReader rulesReader = new StringReader(rules); parser.parse(rulesReader); @@ -76,14 +73,12 @@ public class OpenSearchSolrSynonymParserTests extends OpenSearchTokenStreamTestC Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader("first word is foo, then bar and lastly baz")); TokenStream ts = new SynonymFilter(new StopFilter(tokenizer, stopSet), synonymMap, false); - assertTokenStreamContents(ts, new String[]{"first", "word", "is", "foo", "then", "and", "lastly", "foo"}); + assertTokenStreamContents(ts, new String[] { "first", "word", "is", "foo", "then", "and", "lastly", "foo" }); } public void testNonLenientParser() { OpenSearchSolrSynonymParser parser = new OpenSearchSolrSynonymParser(true, false, false, new StandardAnalyzer()); - String rules = - "&,and=>and\n" + - "come,advance,approach\n"; + String rules = "&,and=>and\n" + "come,advance,approach\n"; StringReader rulesReader = new StringReader(rules); ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader)); assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1")); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchWordnetSynonymParserTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchWordnetSynonymParserTests.java index ac5f8c24056..d74641bd16d 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchWordnetSynonymParserTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/OpenSearchWordnetSynonymParserTests.java @@ -52,47 +52,41 @@ public class OpenSearchWordnetSynonymParserTests extends OpenSearchTokenStreamTe public void testLenientParser() throws IOException, ParseException { OpenSearchWordnetSynonymParser parser = new OpenSearchWordnetSynonymParser(true, false, true, new StandardAnalyzer()); - String rules = - "s(100000001,1,'&',a,1,0).\n" + - "s(100000001,2,'and',a,1,0).\n" + - "s(100000002,1,'come',v,1,0).\n" + - "s(100000002,2,'advance',v,1,0).\n" + - "s(100000002,3,'approach',v,1,0)."; + String rules = "s(100000001,1,'&',a,1,0).\n" + + "s(100000001,2,'and',a,1,0).\n" + + "s(100000002,1,'come',v,1,0).\n" + + "s(100000002,2,'advance',v,1,0).\n" + + "s(100000002,3,'approach',v,1,0)."; StringReader rulesReader = new StringReader(rules); parser.parse(rulesReader); SynonymMap synonymMap = parser.build(); Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader("approach quietly then advance & destroy")); TokenStream ts = new SynonymFilter(tokenizer, synonymMap, false); - assertTokenStreamContents(ts, new String[]{"come", "quietly", "then", "come", "destroy"}); + assertTokenStreamContents(ts, new String[] { "come", "quietly", "then", "come", "destroy" }); } public void testLenientParserWithSomeIncorrectLines() throws IOException, ParseException { CharArraySet stopSet = new CharArraySet(1, true); stopSet.add("bar"); - OpenSearchWordnetSynonymParser parser = - new OpenSearchWordnetSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); - String rules = - "s(100000001,1,'foo',v,1,0).\n" + - "s(100000001,2,'bar',v,1,0).\n" + - "s(100000001,3,'baz',v,1,0)."; + OpenSearchWordnetSynonymParser parser = new OpenSearchWordnetSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); + String rules = "s(100000001,1,'foo',v,1,0).\n" + "s(100000001,2,'bar',v,1,0).\n" + "s(100000001,3,'baz',v,1,0)."; StringReader rulesReader = new StringReader(rules); parser.parse(rulesReader); SynonymMap synonymMap = parser.build(); Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader("first word is foo, then bar and lastly baz")); TokenStream ts = new SynonymFilter(new StopFilter(tokenizer, stopSet), synonymMap, false); - assertTokenStreamContents(ts, new String[]{"first", "word", "is", "foo", "then", "and", "lastly", "foo"}); + assertTokenStreamContents(ts, new String[] { "first", "word", "is", "foo", "then", "and", "lastly", "foo" }); } public void testNonLenientParser() { OpenSearchWordnetSynonymParser parser = new OpenSearchWordnetSynonymParser(true, false, false, new StandardAnalyzer()); - String rules = - "s(100000001,1,'&',a,1,0).\n" + - "s(100000001,2,'and',a,1,0).\n" + - "s(100000002,1,'come',v,1,0).\n" + - "s(100000002,2,'advance',v,1,0).\n" + - "s(100000002,3,'approach',v,1,0)."; + String rules = "s(100000001,1,'&',a,1,0).\n" + + "s(100000001,2,'and',a,1,0).\n" + + "s(100000002,1,'come',v,1,0).\n" + + "s(100000002,2,'advance',v,1,0).\n" + + "s(100000002,3,'approach',v,1,0)."; StringReader rulesReader = new StringReader(rules); ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader)); assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1")); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java index 958f04ed339..73c104a5b72 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PathHierarchyTokenizerFactoryTests.java @@ -48,73 +48,105 @@ public class PathHierarchyTokenizerFactoryTests extends OpenSearchTokenStreamTes public void testDefaults() throws IOException { final Index index = new Index("test", "_na_"); final Settings indexSettings = newAnalysisSettingsBuilder().build(); - Tokenizer tokenizer = new PathHierarchyTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, - "path-hierarchy-tokenizer", Settings.EMPTY).create(); + Tokenizer tokenizer = new PathHierarchyTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + "path-hierarchy-tokenizer", + Settings.EMPTY + ).create(); tokenizer.setReader(new StringReader("/one/two/three")); - assertTokenStreamContents(tokenizer, new String[] {"/one", "/one/two", "/one/two/three"}); + assertTokenStreamContents(tokenizer, new String[] { "/one", "/one/two", "/one/two/three" }); } public void testReverse() throws IOException { final Index index = new Index("test", "_na_"); final Settings indexSettings = newAnalysisSettingsBuilder().build(); Settings settings = newAnalysisSettingsBuilder().put("reverse", true).build(); - Tokenizer tokenizer = new PathHierarchyTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, - "path-hierarchy-tokenizer", settings).create(); + Tokenizer tokenizer = new PathHierarchyTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + "path-hierarchy-tokenizer", + settings + ).create(); tokenizer.setReader(new StringReader("/one/two/three")); - assertTokenStreamContents(tokenizer, new String[] {"/one/two/three", "one/two/three", "two/three", "three"}); + assertTokenStreamContents(tokenizer, new String[] { "/one/two/three", "one/two/three", "two/three", "three" }); } public void testDelimiter() throws IOException { final Index index = new Index("test", "_na_"); final Settings indexSettings = newAnalysisSettingsBuilder().build(); Settings settings = newAnalysisSettingsBuilder().put("delimiter", "-").build(); - Tokenizer tokenizer = new PathHierarchyTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, - "path-hierarchy-tokenizer", settings).create(); + Tokenizer tokenizer = new PathHierarchyTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + "path-hierarchy-tokenizer", + settings + ).create(); tokenizer.setReader(new StringReader("/one/two/three")); - assertTokenStreamContents(tokenizer, new String[] {"/one/two/three"}); + assertTokenStreamContents(tokenizer, new String[] { "/one/two/three" }); tokenizer.setReader(new StringReader("one-two-three")); - assertTokenStreamContents(tokenizer, new String[] {"one", "one-two", "one-two-three"}); + assertTokenStreamContents(tokenizer, new String[] { "one", "one-two", "one-two-three" }); } public void testReplace() throws IOException { final Index index = new Index("test", "_na_"); final Settings indexSettings = newAnalysisSettingsBuilder().build(); Settings settings = newAnalysisSettingsBuilder().put("replacement", "-").build(); - Tokenizer tokenizer = new PathHierarchyTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, - "path-hierarchy-tokenizer", settings).create(); + Tokenizer tokenizer = new PathHierarchyTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + "path-hierarchy-tokenizer", + settings + ).create(); tokenizer.setReader(new StringReader("/one/two/three")); - assertTokenStreamContents(tokenizer, new String[] {"-one", "-one-two", "-one-two-three"}); + assertTokenStreamContents(tokenizer, new String[] { "-one", "-one-two", "-one-two-three" }); tokenizer.setReader(new StringReader("one-two-three")); - assertTokenStreamContents(tokenizer, new String[] {"one-two-three"}); + assertTokenStreamContents(tokenizer, new String[] { "one-two-three" }); } public void testSkip() throws IOException { final Index index = new Index("test", "_na_"); final Settings indexSettings = newAnalysisSettingsBuilder().build(); Settings settings = newAnalysisSettingsBuilder().put("skip", 2).build(); - Tokenizer tokenizer = new PathHierarchyTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, - "path-hierarchy-tokenizer", settings).create(); + Tokenizer tokenizer = new PathHierarchyTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + "path-hierarchy-tokenizer", + settings + ).create(); tokenizer.setReader(new StringReader("/one/two/three/four/five")); - assertTokenStreamContents(tokenizer, new String[] {"/three", "/three/four", "/three/four/five"}); + assertTokenStreamContents(tokenizer, new String[] { "/three", "/three/four", "/three/four/five" }); } public void testDelimiterExceptions() { final Index index = new Index("test", "_na_"); final Settings indexSettings = newAnalysisSettingsBuilder().build(); { - String delimiter = RandomPicks.randomFrom(random(), new String[] {"--", ""}); + String delimiter = RandomPicks.randomFrom(random(), new String[] { "--", "" }); Settings settings = newAnalysisSettingsBuilder().put("delimiter", delimiter).build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new PathHierarchyTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, - "path-hierarchy-tokenizer", settings).create()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new PathHierarchyTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + "path-hierarchy-tokenizer", + settings + ).create() + ); assertEquals("delimiter must be a one char value", e.getMessage()); } { - String replacement = RandomPicks.randomFrom(random(), new String[] {"--", ""}); + String replacement = RandomPicks.randomFrom(random(), new String[] { "--", "" }); Settings settings = newAnalysisSettingsBuilder().put("replacement", replacement).build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new PathHierarchyTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, - "path-hierarchy-tokenizer", settings).create()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new PathHierarchyTokenizerFactory( + IndexSettingsModule.newIndexSettings(index, indexSettings), + null, + "path-hierarchy-tokenizer", + settings + ).create() + ); assertEquals("replacement must be a one char value", e.getMessage()); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternAnalyzerTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternAnalyzerTests.java index eef8c8b86a3..162c7e19efb 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternAnalyzerTests.java @@ -46,90 +46,96 @@ import java.util.regex.Pattern; */ public class PatternAnalyzerTests extends OpenSearchTokenStreamTestCase { - /** - * Test PatternAnalyzer when it is configured with a non-word pattern. - */ - public void testNonWordPattern() throws IOException { - // Split on non-letter pattern, do not lowercase, no stopwords - PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\W+"), false, null); - assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", - new String[] { "The", "quick", "brown", "Fox", "the", "abcd1234", "56", "78", "dc" }); + /** + * Test PatternAnalyzer when it is configured with a non-word pattern. + */ + public void testNonWordPattern() throws IOException { + // Split on non-letter pattern, do not lowercase, no stopwords + PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\W+"), false, null); + assertAnalyzesTo( + a, + "The quick brown Fox,the abcd1234 (56.78) dc.", + new String[] { "The", "quick", "brown", "Fox", "the", "abcd1234", "56", "78", "dc" } + ); - // split on non-letter pattern, lowercase, english stopwords - PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, - EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", - new String[] { "quick", "brown", "fox", "abcd1234", "56", "78", "dc" }); - } + // split on non-letter pattern, lowercase, english stopwords + PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); + assertAnalyzesTo( + b, + "The quick brown Fox,the abcd1234 (56.78) dc.", + new String[] { "quick", "brown", "fox", "abcd1234", "56", "78", "dc" } + ); + } - /** - * Test PatternAnalyzer when it is configured with a whitespace pattern. - * Behavior can be similar to WhitespaceAnalyzer (depending upon options) - */ - public void testWhitespacePattern() throws IOException { - // Split on whitespace patterns, do not lowercase, no stopwords - PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); - assertAnalyzesTo(a, "The quick brown Fox,the abcd1234 (56.78) dc.", - new String[] { "The", "quick", "brown", "Fox,the", "abcd1234", "(56.78)", "dc." }); + /** + * Test PatternAnalyzer when it is configured with a whitespace pattern. + * Behavior can be similar to WhitespaceAnalyzer (depending upon options) + */ + public void testWhitespacePattern() throws IOException { + // Split on whitespace patterns, do not lowercase, no stopwords + PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); + assertAnalyzesTo( + a, + "The quick brown Fox,the abcd1234 (56.78) dc.", + new String[] { "The", "quick", "brown", "Fox,the", "abcd1234", "(56.78)", "dc." } + ); - // Split on whitespace patterns, lowercase, english stopwords - PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, - EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", - new String[] { "quick", "brown", "fox,the", "abcd1234", "(56.78)", "dc." }); - } + // Split on whitespace patterns, lowercase, english stopwords + PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); + assertAnalyzesTo( + b, + "The quick brown Fox,the abcd1234 (56.78) dc.", + new String[] { "quick", "brown", "fox,the", "abcd1234", "(56.78)", "dc." } + ); + } - /** - * Test PatternAnalyzer when it is configured with a custom pattern. In this - * case, text is tokenized on the comma "," - */ - public void testCustomPattern() throws IOException { - // Split on comma, do not lowercase, no stopwords - PatternAnalyzer a = new PatternAnalyzer(Pattern.compile(","), false, null); - assertAnalyzesTo(a, "Here,Are,some,Comma,separated,words,", - new String[] { "Here", "Are", "some", "Comma", "separated", "words" }); + /** + * Test PatternAnalyzer when it is configured with a custom pattern. In this + * case, text is tokenized on the comma "," + */ + public void testCustomPattern() throws IOException { + // Split on comma, do not lowercase, no stopwords + PatternAnalyzer a = new PatternAnalyzer(Pattern.compile(","), false, null); + assertAnalyzesTo(a, "Here,Are,some,Comma,separated,words,", new String[] { "Here", "Are", "some", "Comma", "separated", "words" }); - // split on comma, lowercase, english stopwords - PatternAnalyzer b = new PatternAnalyzer(Pattern.compile(","), true, - EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", - new String[] { "here", "some", "comma", "separated", "words" }); - } + // split on comma, lowercase, english stopwords + PatternAnalyzer b = new PatternAnalyzer(Pattern.compile(","), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); + assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", new String[] { "here", "some", "comma", "separated", "words" }); + } - /** - * Test PatternAnalyzer against a large document. - */ - public void testHugeDocument() throws IOException { - StringBuilder document = new StringBuilder(); - // 5000 a's - char largeWord[] = new char[5000]; - Arrays.fill(largeWord, 'a'); - document.append(largeWord); + /** + * Test PatternAnalyzer against a large document. + */ + public void testHugeDocument() throws IOException { + StringBuilder document = new StringBuilder(); + // 5000 a's + char largeWord[] = new char[5000]; + Arrays.fill(largeWord, 'a'); + document.append(largeWord); - // a space - document.append(' '); + // a space + document.append(' '); - // 2000 b's - char largeWord2[] = new char[2000]; - Arrays.fill(largeWord2, 'b'); - document.append(largeWord2); + // 2000 b's + char largeWord2[] = new char[2000]; + Arrays.fill(largeWord2, 'b'); + document.append(largeWord2); - // Split on whitespace patterns, do not lowercase, no stopwords - PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); - assertAnalyzesTo(a, document.toString(), - new String[] { new String(largeWord), new String(largeWord2) }); - } + // Split on whitespace patterns, do not lowercase, no stopwords + PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); + assertAnalyzesTo(a, document.toString(), new String[] { new String(largeWord), new String(largeWord2) }); + } - /** blast some random strings through the analyzer */ - public void testRandomStrings() throws Exception { - Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER); - } + /** blast some random strings through the analyzer */ + public void testRandomStrings() throws Exception { + Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); + checkRandomData(random(), a, 10000 * RANDOM_MULTIPLIER); + } - public void testNormalize() { - PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); - assertEquals(new BytesRef("FooBar"), a.normalize("dummy", "FooBar")); - a = new PatternAnalyzer(Pattern.compile("\\s+"), true, null); - assertEquals(new BytesRef("foobar"), a.normalize("dummy", "FooBar")); - } + public void testNormalize() { + PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); + assertEquals(new BytesRef("FooBar"), a.normalize("dummy", "FooBar")); + a = new PatternAnalyzer(Pattern.compile("\\s+"), true, null); + assertEquals(new BytesRef("foobar"), a.normalize("dummy", "FooBar")); + } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java index 2e03dac4329..5cd18a5b01f 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java @@ -49,31 +49,35 @@ public class PatternCaptureTokenFilterTests extends OpenSearchTokenStreamTestCas public void testPatternCaptureTokenFilter() throws Exception { String json = "/org/opensearch/analysis/common/pattern_capture.json"; Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .loadFromStream(json, getClass().getResourceAsStream(json), false) - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .loadFromStream(json, getClass().getResourceAsStream(json), false) + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; NamedAnalyzer analyzer1 = indexAnalyzers.get("single"); - assertTokenStreamContents(analyzer1.tokenStream("test", "foobarbaz"), new String[]{"foobarbaz","foobar","foo"}); + assertTokenStreamContents(analyzer1.tokenStream("test", "foobarbaz"), new String[] { "foobarbaz", "foobar", "foo" }); NamedAnalyzer analyzer2 = indexAnalyzers.get("multi"); - assertTokenStreamContents(analyzer2.tokenStream("test", "abc123def"), new String[]{"abc123def","abc","123","def"}); + assertTokenStreamContents(analyzer2.tokenStream("test", "abc123def"), new String[] { "abc123def", "abc", "123", "def" }); NamedAnalyzer analyzer3 = indexAnalyzers.get("preserve"); - assertTokenStreamContents(analyzer3.tokenStream("test", "foobarbaz"), new String[]{"foobar","foo"}); + assertTokenStreamContents(analyzer3.tokenStream("test", "foobarbaz"), new String[] { "foobar", "foo" }); } public void testNoPatterns() { try { - new PatternCaptureGroupTokenFilterFactory(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), null, - "pattern_capture", Settings.builder().put("pattern", "foobar").build()); - fail ("Expected IllegalArgumentException"); + new PatternCaptureGroupTokenFilterFactory( + IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), + null, + "pattern_capture", + Settings.builder().put("pattern", "foobar").build() + ); + fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("required setting 'patterns' is missing")); } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java index 5e7ae3e29c8..c16f4f37846 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java @@ -53,9 +53,7 @@ import java.util.Collections; public class PredicateTokenScriptFilterTests extends OpenSearchTokenStreamTestCase { public void testSimpleFilter() throws IOException { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.f.type", "predicate_token_filter") @@ -74,7 +72,7 @@ public class PredicateTokenScriptFilterTests extends OpenSearchTokenStreamTestCa }; @SuppressWarnings("unchecked") - ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()){ + ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()) { @Override public FactoryType compile(Script script, ScriptContext context) { assertEquals(context, AnalysisPredicateScript.CONTEXT); @@ -85,16 +83,13 @@ public class PredicateTokenScriptFilterTests extends OpenSearchTokenStreamTestCa CommonAnalysisPlugin plugin = new CommonAnalysisPlugin(); plugin.createComponents(null, null, null, null, scriptService, null, null, null, null, null, null); - AnalysisModule module - = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); + AnalysisModule module = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "Oh what a wonderful thing to be", new String[]{ - "Oh", "what", "to", "be" - }); + assertAnalyzesTo(analyzer, "Oh what a wonderful thing to be", new String[] { "Oh", "what", "to", "be" }); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java index 7aed6f021b8..cb22835c364 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java @@ -64,11 +64,7 @@ public class RemoveDuplicatesFilterFactoryTests extends OpenSearchTokenStreamTes new Token("d", 1, 4, 5) ); - assertTokenStreamContents(tokenFilter.create(cts), new String[]{ - "a", "b", "c", "d" - }, new int[]{ - 1, 1, 0, 1 - }); + assertTokenStreamContents(tokenFilter.create(cts), new String[] { "a", "b", "c", "d" }, new int[] { 1, 1, 0, 1 }); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ScriptedConditionTokenFilterTests.java index f2a5af92e91..9212fcad285 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -52,9 +52,7 @@ import java.util.Collections; public class ScriptedConditionTokenFilterTests extends OpenSearchTokenStreamTestCase { public void testSimpleCondition() throws Exception { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.cond.type", "condition") @@ -74,7 +72,7 @@ public class ScriptedConditionTokenFilterTests extends OpenSearchTokenStreamTest }; @SuppressWarnings("unchecked") - ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()){ + ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()) { @Override public FactoryType compile(Script script, ScriptContext context) { assertEquals(context, AnalysisPredicateScript.CONTEXT); @@ -85,16 +83,13 @@ public class ScriptedConditionTokenFilterTests extends OpenSearchTokenStreamTest CommonAnalysisPlugin plugin = new CommonAnalysisPlugin(); plugin.createComponents(null, null, null, null, scriptService, null, null, null, null, null, null); - AnalysisModule module - = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); + AnalysisModule module = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "Vorsprung Durch Technik", new String[]{ - "Vorsprung", "Durch", "TECHNIK" - }); + assertAnalyzesTo(analyzer, "Vorsprung Durch Technik", new String[] { "Vorsprung", "Durch", "TECHNIK" }); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ShingleTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ShingleTokenFilterTests.java index f62d7fc55d8..0646fed4730 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ShingleTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ShingleTokenFilterTests.java @@ -52,7 +52,8 @@ public class ShingleTokenFilterTests extends OpenSearchTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_ascii_folding.type", "asciifolding") .build(), - new CommonAnalysisPlugin()); + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("shingle"); Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader("this is a test")); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SnowballAnalyzerTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SnowballAnalyzerTests.java index 6660f0837be..ebf72425ebb 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SnowballAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SnowballAnalyzerTests.java @@ -38,35 +38,29 @@ import org.opensearch.test.OpenSearchTokenStreamTestCase; public class SnowballAnalyzerTests extends OpenSearchTokenStreamTestCase { - public void testEnglish() throws Exception { - Analyzer a = new SnowballAnalyzer("English"); - assertAnalyzesTo(a, "he abhorred accents", - new String[]{"he", "abhor", "accent"}); - } + public void testEnglish() throws Exception { + Analyzer a = new SnowballAnalyzer("English"); + assertAnalyzesTo(a, "he abhorred accents", new String[] { "he", "abhor", "accent" }); + } - public void testStopwords() throws Exception { - Analyzer a = new SnowballAnalyzer("English", - EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - assertAnalyzesTo(a, "the quick brown fox jumped", - new String[]{"quick", "brown", "fox", "jump"}); - } + public void testStopwords() throws Exception { + Analyzer a = new SnowballAnalyzer("English", EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); + assertAnalyzesTo(a, "the quick brown fox jumped", new String[] { "quick", "brown", "fox", "jump" }); + } - /** - * Test turkish lowercasing - */ - public void testTurkish() throws Exception { - Analyzer a = new SnowballAnalyzer("Turkish"); + /** + * Test turkish lowercasing + */ + public void testTurkish() throws Exception { + Analyzer a = new SnowballAnalyzer("Turkish"); - assertAnalyzesTo(a, "ağacı", new String[] { "ağaç" }); - assertAnalyzesTo(a, "AĞACI", new String[] { "ağaç" }); - } + assertAnalyzesTo(a, "ağacı", new String[] { "ağaç" }); + assertAnalyzesTo(a, "AĞACI", new String[] { "ağaç" }); + } - - public void testReusableTokenStream() throws Exception { - Analyzer a = new SnowballAnalyzer("English"); - assertAnalyzesTo(a, "he abhorred accents", - new String[]{"he", "abhor", "accent"}); - assertAnalyzesTo(a, "she abhorred him", - new String[]{"she", "abhor", "him"}); - } + public void testReusableTokenStream() throws Exception { + Analyzer a = new SnowballAnalyzer("English"); + assertAnalyzesTo(a, "he abhorred accents", new String[] { "he", "abhor", "accent" }); + assertAnalyzesTo(a, "she abhorred him", new String[] { "she", "abhor", "him" }); + } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java index 92d8fd5807d..96e05efa977 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java @@ -59,7 +59,8 @@ public class StemmerOverrideTokenFilterFactoryTests extends OpenSearchTokenStrea .putList("index.analysis.filter.my_stemmer_override.rules", rules) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(), - new CommonAnalysisPlugin()); + new CommonAnalysisPlugin() + ); return analysis.tokenFilter.get("my_stemmer_override"); } @@ -75,19 +76,18 @@ public class StemmerOverrideTokenFilterFactoryTests extends OpenSearchTokenStrea "=>a", // no keys "a,=>b" // empty key )) { - expectThrows(RuntimeException.class, String.format( - Locale.ROOT, "Should fail for invalid rule: '%s'", rule - ), () -> create(rule)); + expectThrows( + RuntimeException.class, + String.format(Locale.ROOT, "Should fail for invalid rule: '%s'", rule), + () -> create(rule) + ); } } public void testRulesOk() throws IOException { - TokenFilterFactory tokenFilterFactory = create( - "a => 1", - "b,c => 2" - ); + TokenFilterFactory tokenFilterFactory = create("a => 1", "b,c => 2"); Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader("a b c")); - assertTokenStreamContents(tokenFilterFactory.create(tokenizer), new String[]{"1", "2", "2"}); + assertTokenStreamContents(tokenFilterFactory.create(tokenizer), new String[] { "1", "2", "2" }); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java index a65ec662792..fca64f4915c 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -63,13 +63,13 @@ public class StemmerTokenFilterFactoryTests extends OpenSearchTokenStreamTestCas for (int i = 0; i < iters; i++) { Version v = VersionUtils.randomVersion(random()); Settings settings = Settings.builder() - .put("index.analysis.filter.my_english.type", "stemmer") - .put("index.analysis.filter.my_english.language", "english") - .put("index.analysis.analyzer.my_english.tokenizer","whitespace") - .put("index.analysis.analyzer.my_english.filter","my_english") - .put(SETTING_VERSION_CREATED,v) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + .put("index.analysis.filter.my_english.type", "stemmer") + .put("index.analysis.filter.my_english.language", "english") + .put("index.analysis.analyzer.my_english.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_english.filter", "my_english") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_english"); @@ -80,7 +80,7 @@ public class StemmerTokenFilterFactoryTests extends OpenSearchTokenStreamTestCas IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers; NamedAnalyzer analyzer = indexAnalyzers.get("my_english"); assertThat(create, instanceOf(PorterStemFilter.class)); - assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"}); + assertAnalyzesTo(analyzer, "consolingly", new String[] { "consolingli" }); } } @@ -90,13 +90,13 @@ public class StemmerTokenFilterFactoryTests extends OpenSearchTokenStreamTestCas Version v = VersionUtils.randomVersion(random()); Settings settings = Settings.builder() - .put("index.analysis.filter.my_porter2.type", "stemmer") - .put("index.analysis.filter.my_porter2.language", "porter2") - .put("index.analysis.analyzer.my_porter2.tokenizer","whitespace") - .put("index.analysis.analyzer.my_porter2.filter","my_porter2") - .put(SETTING_VERSION_CREATED,v) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + .put("index.analysis.filter.my_porter2.type", "stemmer") + .put("index.analysis.filter.my_porter2.language", "porter2") + .put("index.analysis.analyzer.my_porter2.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_porter2.filter", "my_porter2") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_porter2"); @@ -107,18 +107,23 @@ public class StemmerTokenFilterFactoryTests extends OpenSearchTokenStreamTestCas IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers; NamedAnalyzer analyzer = indexAnalyzers.get("my_porter2"); assertThat(create, instanceOf(SnowballFilter.class)); - assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"}); + assertAnalyzesTo(analyzer, "possibly", new String[] { "possibl" }); } } public void testMultipleLanguagesThrowsException() throws IOException { Version v = VersionUtils.randomVersion(random()); - Settings settings = Settings.builder().put("index.analysis.filter.my_english.type", "stemmer") - .putList("index.analysis.filter.my_english.language", "english", "light_english").put(SETTING_VERSION_CREATED, v) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); + Settings settings = Settings.builder() + .put("index.analysis.filter.my_english.type", "stemmer") + .putList("index.analysis.filter.my_english.language", "english", "light_english") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN) + ); assertEquals("Invalid stemmer class specified: [english, light_english]", e.getMessage()); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java index bb1ceb4663d..9df477c0f4a 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java @@ -80,10 +80,11 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { Files.copy(synonymsWordnet, config.resolve("synonyms_wordnet.txt")); String json = "/org/opensearch/analysis/common/synonyms.json"; - Settings settings = Settings.builder(). - loadFromStream(json, getClass().getResourceAsStream(json), false) - .put(Environment.PATH_HOME_SETTING.getKey(), home) - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); + Settings settings = Settings.builder() + .loadFromStream(json, getClass().getResourceAsStream(json), false) + .put(Environment.PATH_HOME_SETTING.getKey(), home) + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; @@ -110,7 +111,7 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { .put("index.analysis.filter.stop_within_synonym.type", "stop") .putList("index.analysis.filter.stop_within_synonym.stopwords", "foobar", "opensearch") .put("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.tokenizer", "whitespace") - .putList("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym") + .putList("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym", "synonym") .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { @@ -131,7 +132,7 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { .put("index.analysis.filter.stop_within_synonym.type", "stop") .putList("index.analysis.filter.stop_within_synonym.stopwords", "foobar", "opensearch") .put("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.tokenizer", "whitespace") - .putList("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand") + .putList("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym", "synonym_expand") .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { @@ -159,9 +160,12 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; - BaseTokenStreamTestCase.assertAnalyzesTo(indexAnalyzers.get("synonymAnalyzer"), "Some developers are odd", - new String[]{ "some", "developers", "develop", "programm", "are", "odd" }, - new int[]{ 1, 1, 0, 0, 1, 1 }); + BaseTokenStreamTestCase.assertAnalyzesTo( + indexAnalyzers.get("synonymAnalyzer"), + "Some developers are odd", + new String[] { "some", "developers", "develop", "programm", "are", "odd" }, + new int[] { 1, 1, 0, 0, 1, 1 } + ); } public void testAsciiFoldingFilterForSynonyms() throws IOException { @@ -176,9 +180,12 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; - BaseTokenStreamTestCase.assertAnalyzesTo(indexAnalyzers.get("synonymAnalyzer"), "høj", - new String[]{ "hoj", "height" }, - new int[]{ 1, 0 }); + BaseTokenStreamTestCase.assertAnalyzesTo( + indexAnalyzers.get("synonymAnalyzer"), + "høj", + new String[] { "hoj", "height" }, + new int[] { 1, 0 } + ); } public void testPreconfigured() throws IOException { @@ -193,9 +200,12 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; - BaseTokenStreamTestCase.assertAnalyzesTo(indexAnalyzers.get("my_analyzer"), "würst", - new String[]{ "wurst", "sausage"}, - new int[]{ 1, 0 }); + BaseTokenStreamTestCase.assertAnalyzesTo( + indexAnalyzers.get("my_analyzer"), + "würst", + new String[] { "wurst", "sausage" }, + new int[] { 1, 0 } + ); } public void testChainedSynonymFilters() throws IOException { @@ -212,15 +222,21 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; - BaseTokenStreamTestCase.assertAnalyzesTo(indexAnalyzers.get("syn"), "term1", - new String[]{ "term1", "term3", "term2" }, new int[]{ 1, 0, 0 }); + BaseTokenStreamTestCase.assertAnalyzesTo( + indexAnalyzers.get("syn"), + "term1", + new String[] { "term1", "term3", "term2" }, + new int[] { 1, 0, 0 } + ); } public void testShingleFilters() { Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT) + ) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonyms.type", "synonym") .putList("index.analysis.filter.synonyms.synonyms", "programmer, developer") @@ -230,9 +246,10 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - expectThrows(IllegalArgumentException.class, () -> { - indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; - }); + expectThrows( + IllegalArgumentException.class, + () -> { indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; } + ); } @@ -246,9 +263,7 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - String[] bypassingFactories = new String[]{ - "dictionary_decompounder" - }; + String[] bypassingFactories = new String[] { "dictionary_decompounder" }; CommonAnalysisPlugin plugin = new CommonAnalysisPlugin(); for (String factory : bypassingFactories) { @@ -265,14 +280,25 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { } public void testPreconfiguredTokenFilters() throws IOException { - Set disallowedFilters = new HashSet<>(Arrays.asList( - "common_grams", "edge_ngram", "edgeNGram", "keyword_repeat", "ngram", "nGram", - "shingle", "word_delimiter", "word_delimiter_graph" - )); + Set disallowedFilters = new HashSet<>( + Arrays.asList( + "common_grams", + "edge_ngram", + "edgeNGram", + "keyword_repeat", + "ngram", + "nGram", + "shingle", + "word_delimiter", + "word_delimiter_graph" + ) + ); Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT) + ) .put("path.home", createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); @@ -281,23 +307,26 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { for (PreConfiguredTokenFilter tf : plugin.getPreConfiguredTokenFilters()) { if (disallowedFilters.contains(tf.getName())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - "Expected exception for factory " + tf.getName(), () -> { - tf.get(idxSettings, null, tf.getName(), settings).getSynonymFilter(); - }); - assertEquals(tf.getName(), "Token filter [" + tf.getName() - + "] cannot be used to parse synonyms", - e.getMessage()); - } - else { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + "Expected exception for factory " + tf.getName(), + () -> { tf.get(idxSettings, null, tf.getName(), settings).getSynonymFilter(); } + ); + assertEquals(tf.getName(), "Token filter [" + tf.getName() + "] cannot be used to parse synonyms", e.getMessage()); + } else { tf.get(idxSettings, null, tf.getName(), settings).getSynonymFilter(); } } Settings settings2 = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, + .put( + IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween( - random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0))) + random(), + LegacyESVersion.V_6_0_0, + VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0) + ) + ) .put("path.home", createTempDir().toString()) .putList("common_words", "a", "b") .put("output_unigrams", "true") @@ -309,8 +338,7 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { if (disallowedFilters.contains(tf.getName())) { tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter(); expectedWarnings.add("Token filter [" + tf.getName() + "] will not be usable to parse synonyms after v7.0"); - } - else { + } else { tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter(); } } @@ -320,8 +348,10 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { public void testDisallowedTokenFilters() throws IOException { Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)) + .put( + IndexMetadata.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT) + ) .put("path.home", createTempDir().toString()) .putList("common_words", "a", "b") .put("output_unigrams", "true") @@ -329,29 +359,39 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); CommonAnalysisPlugin plugin = new CommonAnalysisPlugin(); - String[] disallowedFactories = new String[]{ - "multiplexer", "cjk_bigram", "common_grams", "ngram", "edge_ngram", - "word_delimiter", "word_delimiter_graph", "fingerprint" - }; + String[] disallowedFactories = new String[] { + "multiplexer", + "cjk_bigram", + "common_grams", + "ngram", + "edge_ngram", + "word_delimiter", + "word_delimiter_graph", + "fingerprint" }; for (String factory : disallowedFactories) { TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings); TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings); SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, "Expected IllegalArgumentException for factory " + factory, - () -> stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null)); + () -> stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null) + ); - assertEquals(factory, "Token filter [" + factory - + "] cannot be used to parse synonyms", - e.getMessage()); + assertEquals(factory, "Token filter [" + factory + "] cannot be used to parse synonyms", e.getMessage()); } settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, + .put( + IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween( - random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0))) + random(), + LegacyESVersion.V_6_0_0, + VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0) + ) + ) .put("path.home", createTempDir().toString()) .putList("common_words", "a", "b") .put("output_unigrams", "true") @@ -365,16 +405,20 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings); stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null); - expectedWarnings.add("Token filter [" + factory - + "] will not be usable to parse synonyms after v7.0"); + expectedWarnings.add("Token filter [" + factory + "] will not be usable to parse synonyms after v7.0"); } assertWarnings(expectedWarnings.toArray(new String[0])); settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, + .put( + IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween( - random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0))) + random(), + LegacyESVersion.V_6_0_0, + VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0) + ) + ) .put("path.home", createTempDir().toString()) .put("preserve_original", "false") .build(); @@ -383,11 +427,12 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase { TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings); SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null) + ); - assertEquals("Token filter [multiplexer] cannot be used to parse synonyms unless [preserve_original] is [true]", - e.getMessage()); + assertEquals("Token filter [multiplexer] cannot be used to parse synonyms unless [preserve_original] is [true]", e.getMessage()); } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java index 34ef77aebb3..3ea9c526052 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java @@ -54,7 +54,7 @@ public class TrimTokenFilterTests extends OpenSearchTokenStreamTestCase { NamedAnalyzer normalizer = analysis.indexAnalyzers.getNormalizer("my_normalizer"); assertNotNull(normalizer); assertEquals("my_normalizer", normalizer.name()); - assertTokenStreamContents(normalizer.tokenStream("foo", " bar "), new String[] {"bar"}); + assertTokenStreamContents(normalizer.tokenStream("foo", " bar "), new String[] { "bar" }); assertEquals(new BytesRef("bar"), normalizer.normalize("foo", " bar ")); } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java index 98b7348296c..d98e358621d 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java @@ -54,8 +54,12 @@ public class WhitespaceTokenizerFactoryTests extends OpenSearchTestCase { public void testSimpleWhiteSpaceTokenizer() throws IOException { final Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(new Index("test", "_na_"), indexSettings); - WhitespaceTokenizer tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", - Settings.EMPTY).create(); + WhitespaceTokenizer tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory( + indexProperties, + null, + "whitespace_maxlen", + Settings.EMPTY + ).create(); try (Reader reader = new StringReader("one, two, three")) { tokenizer.setReader(reader); @@ -67,8 +71,12 @@ public class WhitespaceTokenizerFactoryTests extends OpenSearchTestCase { final Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(new Index("test", "_na_"), indexSettings); final Settings settings = Settings.builder().put(WhitespaceTokenizerFactory.MAX_TOKEN_LENGTH, 2).build(); - WhitespaceTokenizer tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", - settings).create(); + WhitespaceTokenizer tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory( + indexProperties, + null, + "whitespace_maxlen", + settings + ).create(); try (Reader reader = new StringReader("one, two, three")) { tokenizer.setReader(reader); assertTokenStreamContents(tokenizer, new String[] { "on", "e,", "tw", "o,", "th", "re", "e" }); @@ -76,7 +84,7 @@ public class WhitespaceTokenizerFactoryTests extends OpenSearchTestCase { final Settings defaultSettings = Settings.EMPTY; tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", defaultSettings) - .create(); + .create(); String veryLongToken = RandomStrings.randomAsciiAlphanumOfLength(random(), 256); try (Reader reader = new StringReader(veryLongToken)) { tokenizer.setReader(reader); @@ -84,13 +92,17 @@ public class WhitespaceTokenizerFactoryTests extends OpenSearchTestCase { } final Settings tooLongSettings = Settings.builder().put(WhitespaceTokenizerFactory.MAX_TOKEN_LENGTH, 1024 * 1024 + 1).build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", tooLongSettings).create()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", tooLongSettings).create() + ); assertEquals("maxTokenLen must be greater than 0 and less than 1048576 passed: 1048577", e.getMessage()); final Settings negativeSettings = Settings.builder().put(WhitespaceTokenizerFactory.MAX_TOKEN_LENGTH, -1).build(); - e = expectThrows(IllegalArgumentException.class, - () -> new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", negativeSettings).create()); + e = expectThrows( + IllegalArgumentException.class, + () -> new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen", negativeSettings).create() + ); assertEquals("maxTokenLen must be greater than 0 and less than 1048576 passed: -1", e.getMessage()); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java index 851f2bfebaa..6129971a69e 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java @@ -53,35 +53,56 @@ import java.io.IOException; import java.io.StringReader; import java.util.Collections; -public class WordDelimiterGraphTokenFilterFactoryTests - extends BaseWordDelimiterTokenFilterFactoryTestCase { +public class WordDelimiterGraphTokenFilterFactoryTests extends BaseWordDelimiterTokenFilterFactoryTestCase { public WordDelimiterGraphTokenFilterFactoryTests() { super("word_delimiter_graph"); } public void testMultiTerms() throws IOException { OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( - Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.my_word_delimiter.type", type) - .put("index.analysis.filter.my_word_delimiter.catenate_all", "true") - .put("index.analysis.filter.my_word_delimiter.preserve_original", "true") - .build(), - new CommonAnalysisPlugin()); + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_word_delimiter.type", type) + .put("index.analysis.filter.my_word_delimiter.catenate_all", "true") + .put("index.analysis.filter.my_word_delimiter.preserve_original", "true") + .build(), + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter"); String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's"; - String[] expected = new String[] { "PowerShot", "PowerShot", "Power", "Shot", "500-42", - "50042", "500", "42", "wi-fi", "wifi", "wi", "fi", "wi-fi-4000", "wifi4000", "wi", - "fi", "4000", "j2se", "j2se", "j", "2", "se", "O'Neil's", "ONeil", "O", "Neil" }; + String[] expected = new String[] { + "PowerShot", + "PowerShot", + "Power", + "Shot", + "500-42", + "50042", + "500", + "42", + "wi-fi", + "wifi", + "wi", + "fi", + "wi-fi-4000", + "wifi4000", + "wi", + "fi", + "4000", + "j2se", + "j2se", + "j", + "2", + "se", + "O'Neil's", + "ONeil", + "O", + "Neil" }; Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader(source)); - int[] expectedIncr = new int[] { 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, - 1, 1, 1, 0, 0, 1 }; - int[] expectedPosLen = new int[] { 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 1, 3, 3, 1, 1, 1, 3, 3, - 1, 1, 1, 2, 2, 1, 1 }; - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, null, null, null, - expectedIncr, expectedPosLen, null); + int[] expectedIncr = new int[] { 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1 }; + int[] expectedPosLen = new int[] { 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 1, 3, 3, 1, 1, 1, 3, 3, 1, 1, 1, 2, 2, 1, 1 }; + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, null, null, null, expectedIncr, expectedPosLen, null); } /** @@ -89,24 +110,33 @@ public class WordDelimiterGraphTokenFilterFactoryTests */ public void testPartsAndCatenate() throws IOException { OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( - Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.my_word_delimiter.type", type) - .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") - .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") - .build(), - new CommonAnalysisPlugin()); + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_word_delimiter.type", type) + .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") + .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") + .build(), + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter"); String source = "PowerShot"; - int[] expectedIncr = new int[]{1, 0, 1}; - int[] expectedPosLen = new int[]{2, 1, 1}; - int[] expectedStartOffsets = new int[]{0, 0, 5}; - int[] expectedEndOffsets = new int[]{9, 5, 9}; - String[] expected = new String[]{"PowerShot", "Power", "Shot" }; + int[] expectedIncr = new int[] { 1, 0, 1 }; + int[] expectedPosLen = new int[] { 2, 1, 1 }; + int[] expectedStartOffsets = new int[] { 0, 0, 5 }; + int[] expectedEndOffsets = new int[] { 9, 5, 9 }; + String[] expected = new String[] { "PowerShot", "Power", "Shot" }; Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, expectedStartOffsets, expectedEndOffsets, null, - expectedIncr, expectedPosLen, null); + assertTokenStreamContents( + tokenFilter.create(tokenizer), + expected, + expectedStartOffsets, + expectedEndOffsets, + null, + expectedIncr, + expectedPosLen, + null + ); } public void testAdjustingOffsets() throws IOException { @@ -118,82 +148,94 @@ public class WordDelimiterGraphTokenFilterFactoryTests .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") .put("index.analysis.filter.my_word_delimiter.adjust_offsets", "false") .build(), - new CommonAnalysisPlugin()); + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter"); String source = "PowerShot"; - int[] expectedIncr = new int[]{1, 0, 1}; - int[] expectedPosLen = new int[]{2, 1, 1}; - int[] expectedStartOffsets = new int[]{0, 0, 0}; - int[] expectedEndOffsets = new int[]{9, 9, 9}; - String[] expected = new String[]{"PowerShot", "Power", "Shot" }; + int[] expectedIncr = new int[] { 1, 0, 1 }; + int[] expectedPosLen = new int[] { 2, 1, 1 }; + int[] expectedStartOffsets = new int[] { 0, 0, 0 }; + int[] expectedEndOffsets = new int[] { 9, 9, 9 }; + String[] expected = new String[] { "PowerShot", "Power", "Shot" }; Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, expectedStartOffsets, expectedEndOffsets, null, - expectedIncr, expectedPosLen, null); + assertTokenStreamContents( + tokenFilter.create(tokenizer), + expected, + expectedStartOffsets, + expectedEndOffsets, + null, + expectedIncr, + expectedPosLen, + null + ); } public void testIgnoreKeywords() throws IOException { - //test with keywords but ignore is false (default behavior) + // test with keywords but ignore is false (default behavior) Settings settings = Settings.builder() - .put("index.analysis.filter.my_word_delimiter.type", type) - .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") - .put("index.analysis.filter.my_keyword.type", "keyword_marker") - .put("index.analysis.filter.my_keyword.keywords", "PowerHungry") - .put("index.analysis.analyzer.my_analyzer.type", "custom") - .put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace") - .put("index.analysis.analyzer.my_analyzer.filter", "my_keyword, my_word_delimiter") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + .put("index.analysis.filter.my_word_delimiter.type", type) + .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") + .put("index.analysis.filter.my_keyword.type", "keyword_marker") + .put("index.analysis.filter.my_keyword.keywords", "PowerHungry") + .put("index.analysis.analyzer.my_analyzer.type", "custom") + .put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_analyzer.filter", "my_keyword, my_word_delimiter") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); String source = "PowerShot PowerHungry"; - int[] expectedStartOffsets = new int[]{0, 5, 10, 15}; - int[] expectedEndOffsets = new int[]{5, 9, 15, 21}; - String[] expected = new String[]{"Power", "Shot", "Power", "Hungry"}; + int[] expectedStartOffsets = new int[] { 0, 5, 10, 15 }; + int[] expectedEndOffsets = new int[] { 5, 9, 15, 21 }; + String[] expected = new String[] { "Power", "Shot", "Power", "Hungry" }; NamedAnalyzer analyzer = analysis.indexAnalyzers.get("my_analyzer"); assertAnalyzesTo(analyzer, source, expected, expectedStartOffsets, expectedEndOffsets); - //test with keywords but ignore_keywords is set as true - settings = Settings.builder().put(settings) - .put("index.analysis.filter.my_word_delimiter.ignore_keywords", "true") - .build(); + // test with keywords but ignore_keywords is set as true + settings = Settings.builder().put(settings).put("index.analysis.filter.my_word_delimiter.ignore_keywords", "true").build(); analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); analyzer = analysis.indexAnalyzers.get("my_analyzer"); - expectedStartOffsets = new int[]{0, 5, 10}; - expectedEndOffsets = new int[]{5, 9, 21}; - expected = new String[]{"Power", "Shot", "PowerHungry"}; + expectedStartOffsets = new int[] { 0, 5, 10 }; + expectedEndOffsets = new int[] { 5, 9, 21 }; + expected = new String[] { "Power", "Shot", "PowerHungry" }; assertAnalyzesTo(analyzer, source, expected, expectedStartOffsets, expectedEndOffsets); } public void testPreconfiguredFilter() throws IOException { // Before 7.3 we don't adjust offsets { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, + .put( + IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween( - random(), LegacyESVersion.V_7_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_3_0))) + random(), + LegacyESVersion.V_7_0_0, + VersionUtils.getPreviousVersion(LegacyESVersion.V_7_3_0) + ) + ) .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - try (IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings)) { + try ( + IndexAnalyzers indexAnalyzers = new AnalysisModule( + TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin()) + ).getAnalysisRegistry().build(idxSettings) + ) { NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "h100", new String[]{"h", "100"}, new int[]{ 0, 0 }, new int[]{ 4, 4 }); + assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 0 }, new int[] { 4, 4 }); } } // Afger 7.3 we do adjust offsets { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") @@ -201,12 +243,16 @@ public class WordDelimiterGraphTokenFilterFactoryTests .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - try (IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings)) { + try ( + IndexAnalyzers indexAnalyzers = new AnalysisModule( + TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin()) + ).getAnalysisRegistry().build(idxSettings) + ) { NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "h100", new String[]{"h", "100"}, new int[]{ 0, 1 }, new int[]{ 1, 4 }); + assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 1 }, new int[] { 1, 4 }); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java index e5a5697e61a..ea37fd5ce95 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java @@ -31,7 +31,6 @@ package org.opensearch.analysis.common; - import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.opensearch.common.settings.Settings; @@ -43,8 +42,7 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; import java.io.StringReader; -public class WordDelimiterTokenFilterFactoryTests - extends BaseWordDelimiterTokenFilterFactoryTestCase { +public class WordDelimiterTokenFilterFactoryTests extends BaseWordDelimiterTokenFilterFactoryTestCase { public WordDelimiterTokenFilterFactoryTests() { super("word_delimiter"); } @@ -54,16 +52,17 @@ public class WordDelimiterTokenFilterFactoryTests */ public void testPartsAndCatenate() throws IOException { OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( - Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.my_word_delimiter.type", type) - .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") - .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") - .build(), - new CommonAnalysisPlugin()); + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_word_delimiter.type", type) + .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") + .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") + .build(), + new CommonAnalysisPlugin() + ); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter"); String source = "PowerShot"; - String[] expected = new String[]{"Power", "PowerShot", "Shot" }; + String[] expected = new String[] { "Power", "PowerShot", "Shot" }; Tokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); diff --git a/modules/analysis-common/src/yamlRestTest/java/org/opensearch/analysis/common/CommonAnalysisClientYamlTestSuiteIT.java b/modules/analysis-common/src/yamlRestTest/java/org/opensearch/analysis/common/CommonAnalysisClientYamlTestSuiteIT.java index eea331fe19a..90bd7097171 100644 --- a/modules/analysis-common/src/yamlRestTest/java/org/opensearch/analysis/common/CommonAnalysisClientYamlTestSuiteIT.java +++ b/modules/analysis-common/src/yamlRestTest/java/org/opensearch/analysis/common/CommonAnalysisClientYamlTestSuiteIT.java @@ -38,7 +38,7 @@ import org.opensearch.test.rest.yaml.ClientYamlTestCandidate; import org.opensearch.test.rest.yaml.OpenSearchClientYamlSuiteTestCase; public class CommonAnalysisClientYamlTestSuiteIT extends OpenSearchClientYamlSuiteTestCase { - public CommonAnalysisClientYamlTestSuiteIT(@Name("yaml")ClientYamlTestCandidate testCandidate) { + public CommonAnalysisClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } diff --git a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java index 06cc1c7797b..babf024da01 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/opensearch/ingest/common/IngestRestartIT.java @@ -79,9 +79,7 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { ctx.put("z", 0); return null; }); - pluginScripts.put("throwing_script", ctx -> { - throw new RuntimeException("this script always fails"); - }); + pluginScripts.put("throwing_script", ctx -> { throw new RuntimeException("this script always fails"); }); return pluginScripts; } } @@ -90,30 +88,44 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { internalCluster().ensureAtLeastNumDataNodes(1); internalCluster().startMasterOnlyNode(); final String pipelineId = "foo"; - client().admin().cluster().preparePutPipeline(pipelineId, - new BytesArray("{\n" + - " \"processors\" : [\n" + - " {\"set\" : {\"field\": \"any_field\", \"value\": \"any_value\"}},\n" + - " {\"set\" : {" + "" + - " \"if\" : " + "{\"lang\": \"" + MockScriptEngine.NAME + "\", \"source\": \"throwing_script\"}," + - " \"field\": \"any_field2\"," + - " \"value\": \"any_value2\"}" + - " }\n" + - " ]\n" + - "}"), XContentType.JSON).get(); + client().admin() + .cluster() + .preparePutPipeline( + pipelineId, + new BytesArray( + "{\n" + + " \"processors\" : [\n" + + " {\"set\" : {\"field\": \"any_field\", \"value\": \"any_value\"}},\n" + + " {\"set\" : {" + + "" + + " \"if\" : " + + "{\"lang\": \"" + + MockScriptEngine.NAME + + "\", \"source\": \"throwing_script\"}," + + " \"field\": \"any_field2\"," + + " \"value\": \"any_value2\"}" + + " }\n" + + " ]\n" + + "}" + ), + XContentType.JSON + ) + .get(); Exception e = expectThrows( Exception.class, - () -> - client().prepareIndex("index", "doc").setId("1") - .setSource("x", 0) - .setPipeline(pipelineId) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get() + () -> client().prepareIndex("index", "doc") + .setId("1") + .setSource("x", 0) + .setPipeline(pipelineId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get() ); assertTrue(e.getMessage().contains("this script always fails")); - NodesStatsResponse r = client().admin().cluster().prepareNodesStats(internalCluster().getNodeNames()) + NodesStatsResponse r = client().admin() + .cluster() + .prepareNodesStats(internalCluster().getNodeNames()) .addMetric(INGEST.metricName()) .get(); int nodeCount = r.getNodes().size(); @@ -130,19 +142,23 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { String pipelineIdWithScript = pipelineIdWithoutScript + "_script"; internalCluster().startNode(); - BytesReference pipelineWithScript = new BytesArray("{\n" + - " \"processors\" : [\n" + - " {\"script\" : {\"lang\": \"" + MockScriptEngine.NAME + "\", \"source\": \"my_script\"}}\n" + - " ]\n" + - "}"); - BytesReference pipelineWithoutScript = new BytesArray("{\n" + - " \"processors\" : [\n" + - " {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" + - " ]\n" + - "}"); + BytesReference pipelineWithScript = new BytesArray( + "{\n" + + " \"processors\" : [\n" + + " {\"script\" : {\"lang\": \"" + + MockScriptEngine.NAME + + "\", \"source\": \"my_script\"}}\n" + + " ]\n" + + "}" + ); + BytesReference pipelineWithoutScript = new BytesArray( + "{\n" + " \"processors\" : [\n" + " {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" + " ]\n" + "}" + ); - Consumer checkPipelineExists = (id) -> assertThat(client().admin().cluster().prepareGetPipeline(id) - .get().pipelines().get(0).getId(), equalTo(id)); + Consumer checkPipelineExists = (id) -> assertThat( + client().admin().cluster().prepareGetPipeline(id).get().pipelines().get(0).getId(), + equalTo(id) + ); client().admin().cluster().preparePutPipeline(pipelineIdWithScript, pipelineWithScript, XContentType.JSON).get(); client().admin().cluster().preparePutPipeline(pipelineIdWithoutScript, pipelineWithoutScript, XContentType.JSON).get(); @@ -150,7 +166,6 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { checkPipelineExists.accept(pipelineIdWithScript); checkPipelineExists.accept(pipelineIdWithoutScript); - internalCluster().restartNode(internalCluster().getMasterName(), new InternalTestCluster.RestartCallback() { @Override @@ -169,20 +184,30 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - IllegalStateException exception = expectThrows(IllegalStateException.class, + IllegalStateException exception = expectThrows( + IllegalStateException.class, () -> client().prepareIndex("index", "doc", "2") .setSource("x", 0) .setPipeline(pipelineIdWithScript) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get()); - assertThat(exception.getMessage(), - equalTo("pipeline with id [" + pipelineIdWithScript + "] could not be loaded, caused by " + - "[OpenSearchParseException[Error updating pipeline with id [" + pipelineIdWithScript + "]]; " + - "nested: OpenSearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " + - "nested: IllegalArgumentException[cannot execute [inline] scripts];; " + - "OpenSearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " + - "nested: IllegalArgumentException[cannot execute [inline] scripts];; java.lang.IllegalArgumentException: " + - "cannot execute [inline] scripts]")); + .get() + ); + assertThat( + exception.getMessage(), + equalTo( + "pipeline with id [" + + pipelineIdWithScript + + "] could not be loaded, caused by " + + "[OpenSearchParseException[Error updating pipeline with id [" + + pipelineIdWithScript + + "]]; " + + "nested: OpenSearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " + + "nested: IllegalArgumentException[cannot execute [inline] scripts];; " + + "OpenSearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " + + "nested: IllegalArgumentException[cannot execute [inline] scripts];; java.lang.IllegalArgumentException: " + + "cannot execute [inline] scripts]" + ) + ); Map source = client().prepareGet("index", "doc", "1").get().getSource(); assertThat(source.get("x"), equalTo(0)); @@ -192,24 +217,30 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exception { internalCluster().startNode(); - client().admin().cluster().preparePutStoredScript() - .setId("1") - .setContent(new BytesArray("{\"script\": {\"lang\": \"" + MockScriptEngine.NAME + - "\", \"source\": \"my_script\"} }"), XContentType.JSON) - .get(); - BytesReference pipeline = new BytesArray("{\n" + - " \"processors\" : [\n" + - " {\"set\" : {\"field\": \"y\", \"value\": 0}},\n" + - " {\"script\" : {\"id\": \"1\"}}\n" + - " ]\n" + - "}"); + client().admin() + .cluster() + .preparePutStoredScript() + .setId("1") + .setContent( + new BytesArray("{\"script\": {\"lang\": \"" + MockScriptEngine.NAME + "\", \"source\": \"my_script\"} }"), + XContentType.JSON + ) + .get(); + BytesReference pipeline = new BytesArray( + "{\n" + + " \"processors\" : [\n" + + " {\"set\" : {\"field\": \"y\", \"value\": 0}},\n" + + " {\"script\" : {\"id\": \"1\"}}\n" + + " ]\n" + + "}" + ); client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); client().prepareIndex("index", "doc", "1") - .setSource("x", 0) - .setPipeline("_id") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); + .setSource("x", 0) + .setPipeline("_id") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); Map source = client().prepareGet("index", "doc", "1").get().getSource(); assertThat(source.get("x"), equalTo(0)); @@ -224,10 +255,10 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { ensureYellow("index"); client().prepareIndex("index", "doc", "2") - .setSource("x", 0) - .setPipeline("_id") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); + .setSource("x", 0) + .setPipeline("_id") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); source = client().prepareGet("index", "doc", "2").get().getSource(); assertThat(source.get("x"), equalTo(0)); @@ -237,23 +268,18 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { public void testWithDedicatedIngestNode() throws Exception { String node = internalCluster().startNode(); - String ingestNode = internalCluster().startNode(Settings.builder() - .put("node.master", false) - .put("node.data", false) - ); + String ingestNode = internalCluster().startNode(Settings.builder().put("node.master", false).put("node.data", false)); - BytesReference pipeline = new BytesArray("{\n" + - " \"processors\" : [\n" + - " {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" + - " ]\n" + - "}"); + BytesReference pipeline = new BytesArray( + "{\n" + " \"processors\" : [\n" + " {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" + " ]\n" + "}" + ); client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); client().prepareIndex("index", "doc", "1") - .setSource("x", 0) - .setPipeline("_id") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); + .setSource("x", 0) + .setPipeline("_id") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); Map source = client().prepareGet("index", "doc", "1").get().getSource(); assertThat(source.get("x"), equalTo(0)); @@ -263,10 +289,10 @@ public class IngestRestartIT extends OpenSearchIntegTestCase { internalCluster().restartNode(node, new InternalTestCluster.RestartCallback()); client(ingestNode).prepareIndex("index", "doc", "2") - .setSource("x", 0) - .setPipeline("_id") - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); + .setSource("x", 0) + .setPipeline("_id") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); source = client(ingestNode).prepareGet("index", "doc", "2").get().getSource(); assertThat(source.get("x"), equalTo(0)); diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AbstractStringProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AbstractStringProcessor.java index b04d8801c91..48bab9c9edc 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AbstractStringProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AbstractStringProcessor.java @@ -89,8 +89,17 @@ abstract class AbstractStringProcessor extends AbstractProcessor { if (value instanceof String) { newList.add(process((String) value)); } else { - throw new IllegalArgumentException("value [" + value + "] of type [" + value.getClass().getName() + - "] in list field [" + field + "] cannot be cast to [" + String.class.getName() + "]"); + throw new IllegalArgumentException( + "value [" + + value + + "] of type [" + + value.getClass().getName() + + "] in list field [" + + field + + "] cannot be cast to [" + + String.class.getName() + + "]" + ); } } newValue = newList; @@ -98,8 +107,9 @@ abstract class AbstractStringProcessor extends AbstractProcessor { if (val instanceof String) { newValue = process((String) val); } else { - throw new IllegalArgumentException("field [" + field + "] of type [" + val.getClass().getName() + "] cannot be cast to [" + - String.class.getName() + "]"); + throw new IllegalArgumentException( + "field [" + field + "] of type [" + val.getClass().getName() + "] cannot be cast to [" + String.class.getName() + "]" + ); } } @@ -118,8 +128,12 @@ abstract class AbstractStringProcessor extends AbstractProcessor { } @Override - public AbstractStringProcessor create(Map registry, String tag, - String description, Map config) throws Exception { + public AbstractStringProcessor create( + Map registry, + String tag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(processorType, tag, config, "field"); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(processorType, tag, config, "ignore_missing", false); String targetField = ConfigurationUtils.readStringProperty(processorType, tag, config, "target_field", field); @@ -127,8 +141,13 @@ abstract class AbstractStringProcessor extends AbstractProcessor { return newProcessor(tag, description, config, field, ignoreMissing, targetField); } - protected abstract AbstractStringProcessor newProcessor(String processorTag, String description, - Map config, String field, - boolean ignoreMissing, String targetField); + protected abstract AbstractStringProcessor newProcessor( + String processorTag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AppendProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AppendProcessor.java index a9db05ee612..392746f09bd 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AppendProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/AppendProcessor.java @@ -90,15 +90,23 @@ public final class AppendProcessor extends AbstractProcessor { } @Override - public AppendProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public AppendProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value"); boolean allowDuplicates = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "allow_duplicates", true); - TemplateScript.Factory compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, - "field", field, scriptService); - return new AppendProcessor(processorTag, description, compiledTemplate, ValueSource.wrap(value, scriptService), - allowDuplicates); + TemplateScript.Factory compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", field, scriptService); + return new AppendProcessor( + processorTag, + description, + compiledTemplate, + ValueSource.wrap(value, scriptService), + allowDuplicates + ); } } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java index abcde8aa47d..3bd11379758 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java @@ -69,8 +69,14 @@ public final class BytesProcessor extends AbstractStringProcessor { } @Override - protected BytesProcessor newProcessor(String tag, String description, Map config, String field, - boolean ignoreMissing, String targetField) { + protected BytesProcessor newProcessor( + String tag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ) { return new BytesProcessor(tag, description, field, ignoreMissing, targetField); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ConvertProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ConvertProcessor.java index b038455fd49..2a81fa5f498 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ConvertProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ConvertProcessor.java @@ -60,12 +60,13 @@ public final class ConvertProcessor extends AbstractProcessor { return Integer.decode(strValue); } return Integer.parseInt(strValue); - } catch(NumberFormatException e) { + } catch (NumberFormatException e) { throw new IllegalArgumentException("unable to convert [" + value + "] to integer", e); } } - }, LONG { + }, + LONG { @Override public Object convert(Object value) { try { @@ -74,29 +75,32 @@ public final class ConvertProcessor extends AbstractProcessor { return Long.decode(strValue); } return Long.parseLong(strValue); - } catch(NumberFormatException e) { + } catch (NumberFormatException e) { throw new IllegalArgumentException("unable to convert [" + value + "] to long", e); } } - }, DOUBLE { + }, + DOUBLE { @Override public Object convert(Object value) { try { return Double.parseDouble(value.toString()); - } catch(NumberFormatException e) { + } catch (NumberFormatException e) { throw new IllegalArgumentException("unable to convert [" + value + "] to double", e); } } - }, FLOAT { + }, + FLOAT { @Override public Object convert(Object value) { try { return Float.parseFloat(value.toString()); - } catch(NumberFormatException e) { + } catch (NumberFormatException e) { throw new IllegalArgumentException("unable to convert [" + value + "] to float", e); } } - }, BOOLEAN { + }, + BOOLEAN { @Override public Object convert(Object value) { if (value.toString().equalsIgnoreCase("true")) { @@ -107,16 +111,18 @@ public final class ConvertProcessor extends AbstractProcessor { throw new IllegalArgumentException("[" + value + "] is not a boolean value, cannot convert to boolean"); } } - }, STRING { + }, + STRING { @Override public Object convert(Object value) { return value.toString(); } - }, AUTO { + }, + AUTO { @Override public Object convert(Object value) { if (!(value instanceof String)) { - return value; + return value; } try { return BOOLEAN.convert(value); @@ -147,9 +153,13 @@ public final class ConvertProcessor extends AbstractProcessor { public static Type fromString(String processorTag, String propertyName, String type) { try { return Type.valueOf(type.toUpperCase(Locale.ROOT)); - } catch(IllegalArgumentException e) { - throw newConfigurationException(TYPE, processorTag, propertyName, "type [" + type + - "] not supported, cannot convert field."); + } catch (IllegalArgumentException e) { + throw newConfigurationException( + TYPE, + processorTag, + propertyName, + "type [" + type + "] not supported, cannot convert field." + ); } } } @@ -217,8 +227,12 @@ public final class ConvertProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { @Override - public ConvertProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public ConvertProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String typeProperty = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "type"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", field); diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvParser.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvParser.java index 726de87a225..66bc01c32e1 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvParser.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvParser.java @@ -42,7 +42,10 @@ final class CsvParser { private static final char TAB = '\t'; private enum State { - START, UNQUOTED, QUOTED, QUOTED_END + START, + UNQUOTED, + QUOTED, + QUOTED_END } private final char quote; @@ -94,7 +97,7 @@ final class CsvParser { } } - //we've reached end of string, we need to handle last field + // we've reached end of string, we need to handle last field switch (state) { case UNQUOTED: setField(length); diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvProcessor.java index 4cc8b718667..6c487f2dee4 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CsvProcessor.java @@ -59,7 +59,7 @@ public final class CsvProcessor extends AbstractProcessor { public static final String TYPE = "csv"; - //visible for testing + // visible for testing final String field; final String[] headers; final boolean trim; @@ -68,8 +68,17 @@ public final class CsvProcessor extends AbstractProcessor { final boolean ignoreMissing; final Object emptyValue; - CsvProcessor(String tag, String description, String field, String[] headers, boolean trim, char separator, char quote, - boolean ignoreMissing, Object emptyValue) { + CsvProcessor( + String tag, + String description, + String field, + String[] headers, + boolean trim, + char separator, + char quote, + boolean ignoreMissing, + Object emptyValue + ) { super(tag, description); this.field = field; this.headers = headers; @@ -103,8 +112,12 @@ public final class CsvProcessor extends AbstractProcessor { public static final class Factory implements org.opensearch.ingest.Processor.Factory { @Override - public CsvProcessor create(Map registry, String processorTag, - String description, Map config) { + public CsvProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String quote = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "quote", "\""); if (quote.length() != 1) { @@ -116,7 +129,7 @@ public final class CsvProcessor extends AbstractProcessor { } boolean trim = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "trim", false); Object emptyValue = null; - if(config.containsKey("empty_value")){ + if (config.containsKey("empty_value")) { emptyValue = ConfigurationUtils.readObject(TYPE, processorTag, config, "empty_value"); } boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); @@ -124,8 +137,17 @@ public final class CsvProcessor extends AbstractProcessor { if (targetFields.isEmpty()) { throw newConfigurationException(TYPE, processorTag, "target_fields", "target fields list can't be empty"); } - return new CsvProcessor(processorTag, description, field, targetFields.toArray(new String[0]), trim, separator.charAt(0), - quote.charAt(0), ignoreMissing, emptyValue); + return new CsvProcessor( + processorTag, + description, + field, + targetFields.toArray(new String[0]), + trim, + separator.charAt(0), + quote.charAt(0), + ignoreMissing, + emptyValue + ); } } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateFormat.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateFormat.java index 77bf0a6d54b..7af26a8cc4c 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateFormat.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateFormat.java @@ -61,9 +61,8 @@ enum DateFormat { Function getFunction(String format, ZoneId timezone, Locale locale) { return (date) -> { TemporalAccessor accessor = DateFormatter.forPattern("iso8601").parse(date); - //even though locale could be set to en-us, Locale.ROOT (following iso8601 calendar data rules) should be used - return DateFormatters.from(accessor, Locale.ROOT, timezone) - .withZoneSameInstant(timezone); + // even though locale could be set to en-us, Locale.ROOT (following iso8601 calendar data rules) should be used + return DateFormatters.from(accessor, Locale.ROOT, timezone).withZoneSameInstant(timezone); }; } @@ -93,19 +92,24 @@ enum DateFormat { long base = Long.parseLong(date.substring(1, 16), 16); // 1356138046000 long rest = Long.parseLong(date.substring(16, 24), 16); - return ((base * 1000) - 10000) + (rest/1000000); + return ((base * 1000) - 10000) + (rest / 1000000); } }, Java { - private final List FIELDS = - Arrays.asList(NANO_OF_SECOND, SECOND_OF_DAY, MINUTE_OF_DAY, HOUR_OF_DAY, DAY_OF_MONTH, MONTH_OF_YEAR); + private final List FIELDS = Arrays.asList( + NANO_OF_SECOND, + SECOND_OF_DAY, + MINUTE_OF_DAY, + HOUR_OF_DAY, + DAY_OF_MONTH, + MONTH_OF_YEAR + ); @Override Function getFunction(String format, ZoneId zoneId, Locale locale) { boolean isUtc = ZoneOffset.UTC.equals(zoneId); - DateFormatter dateFormatter = DateFormatter.forPattern(format) - .withLocale(locale); + DateFormatter dateFormatter = DateFormatter.forPattern(format).withLocale(locale); // if UTC zone is set here, the time zone specified in the format will be ignored, leading to wrong dates if (isUtc == false) { dateFormatter = dateFormatter.withZone(zoneId); diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateIndexNameProcessor.java index a801a02b28e..e925db879a4 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateIndexNameProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateIndexNameProcessor.java @@ -63,9 +63,16 @@ public final class DateIndexNameProcessor extends AbstractProcessor { private final ZoneId timezone; private final List> dateFormats; - DateIndexNameProcessor(String tag, String description, String field, List> dateFormats, - ZoneId timezone, TemplateScript.Factory indexNamePrefixTemplate, TemplateScript.Factory dateRoundingTemplate, - TemplateScript.Factory indexNameFormatTemplate) { + DateIndexNameProcessor( + String tag, + String description, + String field, + List> dateFormats, + ZoneId timezone, + TemplateScript.Factory indexNamePrefixTemplate, + TemplateScript.Factory dateRoundingTemplate, + TemplateScript.Factory indexNameFormatTemplate + ) { super(tag, description); this.field = field; this.timezone = timezone; @@ -91,7 +98,7 @@ public final class DateIndexNameProcessor extends AbstractProcessor { try { dateTime = dateParser.apply(date); } catch (Exception e) { - //try the next parser and keep track of the exceptions + // try the next parser and keep track of the exceptions lastException = ExceptionsHelper.useOrSuppress(lastException, e); } } @@ -106,15 +113,20 @@ public final class DateIndexNameProcessor extends AbstractProcessor { DateFormatter formatter = DateFormatter.forPattern(indexNameFormat); // use UTC instead of Z is string representation of UTC, so behaviour is the same between 6.x and 7 String zone = timezone.equals(ZoneOffset.UTC) ? "UTC" : timezone.getId(); - StringBuilder builder = new StringBuilder() - .append('<') - .append(indexNamePrefix) - .append('{') - .append(formatter.format(dateTime)).append("||/").append(dateRounding) - .append('{').append(indexNameFormat).append('|').append(zone).append('}') - .append('}') - .append('>'); - String dynamicIndexName = builder.toString(); + StringBuilder builder = new StringBuilder().append('<') + .append(indexNamePrefix) + .append('{') + .append(formatter.format(dateTime)) + .append("||/") + .append(dateRounding) + .append('{') + .append(indexNameFormat) + .append('|') + .append(zone) + .append('}') + .append('}') + .append('>'); + String dynamicIndexName = builder.toString(); ingestDocument.setFieldValue(IngestDocument.Metadata.INDEX.getFieldName(), dynamicIndexName); return ingestDocument; } @@ -157,8 +169,12 @@ public final class DateIndexNameProcessor extends AbstractProcessor { } @Override - public DateIndexNameProcessor create(Map registry, String tag, - String description, Map config) throws Exception { + public DateIndexNameProcessor create( + Map registry, + String tag, + String description, + Map config + ) throws Exception { String localeString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "locale"); String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "timezone"); ZoneId timezone = timezoneString == null ? ZoneOffset.UTC : ZoneId.of(timezoneString); @@ -182,16 +198,39 @@ public final class DateIndexNameProcessor extends AbstractProcessor { String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", ""); - TemplateScript.Factory indexNamePrefixTemplate = - ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_prefix", indexNamePrefix, scriptService); + TemplateScript.Factory indexNamePrefixTemplate = ConfigurationUtils.compileTemplate( + TYPE, + tag, + "index_name_prefix", + indexNamePrefix, + scriptService + ); String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding"); - TemplateScript.Factory dateRoundingTemplate = - ConfigurationUtils.compileTemplate(TYPE, tag, "date_rounding", dateRounding, scriptService); + TemplateScript.Factory dateRoundingTemplate = ConfigurationUtils.compileTemplate( + TYPE, + tag, + "date_rounding", + dateRounding, + scriptService + ); String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd"); - TemplateScript.Factory indexNameFormatTemplate = - ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_format", indexNameFormat, scriptService); - return new DateIndexNameProcessor(tag, description, field, dateFormats, timezone, indexNamePrefixTemplate, - dateRoundingTemplate, indexNameFormatTemplate); + TemplateScript.Factory indexNameFormatTemplate = ConfigurationUtils.compileTemplate( + TYPE, + tag, + "index_name_format", + indexNameFormat, + scriptService + ); + return new DateIndexNameProcessor( + tag, + description, + field, + dateFormats, + timezone, + indexNamePrefixTemplate, + dateRoundingTemplate, + indexNameFormatTemplate + ); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateProcessor.java index c29877e596d..4b90ace3563 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DateProcessor.java @@ -67,13 +67,28 @@ public final class DateProcessor extends AbstractProcessor { private final List, Function>> dateParsers; private final String outputFormat; - DateProcessor(String tag, String description, @Nullable TemplateScript.Factory timezone, @Nullable TemplateScript.Factory locale, - String field, List formats, String targetField) { + DateProcessor( + String tag, + String description, + @Nullable TemplateScript.Factory timezone, + @Nullable TemplateScript.Factory locale, + String field, + List formats, + String targetField + ) { this(tag, description, timezone, locale, field, formats, targetField, DEFAULT_OUTPUT_FORMAT); } - DateProcessor(String tag, String description, @Nullable TemplateScript.Factory timezone, @Nullable TemplateScript.Factory locale, - String field, List formats, String targetField, String outputFormat) { + DateProcessor( + String tag, + String description, + @Nullable TemplateScript.Factory timezone, + @Nullable TemplateScript.Factory locale, + String field, + List formats, + String targetField, + String outputFormat + ) { super(tag, description); this.timezone = timezone; this.locale = locale; @@ -112,7 +127,7 @@ public final class DateProcessor extends AbstractProcessor { try { dateTime = dateParser.apply(ingestDocument.getSourceAndMetadata()).apply(value); } catch (Exception e) { - //try the next parser and keep track of the exceptions + // try the next parser and keep track of the exceptions lastException = ExceptionsHelper.useOrSuppress(lastException, e); } } @@ -162,33 +177,48 @@ public final class DateProcessor extends AbstractProcessor { this.scriptService = scriptService; } - public DateProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public DateProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD); String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone"); TemplateScript.Factory compiledTimezoneTemplate = null; if (timezoneString != null) { - compiledTimezoneTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, - "timezone", timezoneString, scriptService); + compiledTimezoneTemplate = ConfigurationUtils.compileTemplate( + TYPE, + processorTag, + "timezone", + timezoneString, + scriptService + ); } String localeString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "locale"); TemplateScript.Factory compiledLocaleTemplate = null; if (localeString != null) { - compiledLocaleTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, - "locale", localeString, scriptService); + compiledLocaleTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, "locale", localeString, scriptService); } List formats = ConfigurationUtils.readList(TYPE, processorTag, config, "formats"); - String outputFormat = - ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "output_format", DEFAULT_OUTPUT_FORMAT); + String outputFormat = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "output_format", DEFAULT_OUTPUT_FORMAT); try { DateFormatter.forPattern(outputFormat); } catch (Exception e) { throw new IllegalArgumentException("invalid output format [" + outputFormat + "]", e); } - return new DateProcessor(processorTag, description, compiledTimezoneTemplate, compiledLocaleTemplate, field, formats, - targetField, outputFormat); + return new DateProcessor( + processorTag, + description, + compiledTimezoneTemplate, + compiledLocaleTemplate, + field, + formats, + targetField, + outputFormat + ); } } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DissectProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DissectProcessor.java index 5a46855e6b9..408982a74fc 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DissectProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DissectProcessor.java @@ -43,7 +43,7 @@ import java.util.Map; public final class DissectProcessor extends AbstractProcessor { public static final String TYPE = "dissect"; - //package private members for testing + // package private members for testing final String field; final boolean ignoreMissing; final String pattern; @@ -79,8 +79,12 @@ public final class DissectProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { @Override - public DissectProcessor create(Map registry, String processorTag, String description, - Map config) { + public DissectProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String pattern = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "pattern"); String appendSeparator = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "append_separator", ""); diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java index a79a64ed746..39c2d67ac0b 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/DotExpanderProcessor.java @@ -79,8 +79,9 @@ public final class DotExpanderProcessor extends AbstractProcessor { if (ingestDocument.hasField(partialPath)) { Object val = ingestDocument.getFieldValue(partialPath, Object.class); if ((val instanceof Map) == false) { - throw new IllegalArgumentException("cannot expend [" + path + "], because [" + partialPath + - "] is not an object field, but a value field"); + throw new IllegalArgumentException( + "cannot expend [" + path + "], because [" + partialPath + "] is not an object field, but a value field" + ); } } else { break; @@ -109,22 +110,33 @@ public final class DotExpanderProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { @Override - public Processor create(Map processorFactories, String tag, String description, - Map config) throws Exception { + public Processor create( + Map processorFactories, + String tag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); if (field.contains(".") == false) { - throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", - "field does not contain a dot"); + throw ConfigurationUtils.newConfigurationException( + ConfigurationUtils.TAG_KEY, + tag, + "field", + "field does not contain a dot" + ); } if (field.indexOf('.') == 0 || field.lastIndexOf('.') == field.length() - 1) { - throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", - "Field can't start or end with a dot"); + throw ConfigurationUtils.newConfigurationException( + ConfigurationUtils.TAG_KEY, + tag, + "field", + "Field can't start or end with a dot" + ); } int firstIndex = -1; for (int index = field.indexOf('.'); index != -1; index = field.indexOf('.', index + 1)) { if (index - firstIndex == 1) { - throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", - "No space between dots"); + throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field", "No space between dots"); } firstIndex = index; } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessor.java index 033981bafcf..2f2e4f719dc 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessor.java @@ -79,13 +79,21 @@ public final class FailProcessor extends AbstractProcessor { } @Override - public FailProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public FailProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String message = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "message"); - TemplateScript.Factory compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, - "message", message, scriptService); + TemplateScript.Factory compiledTemplate = ConfigurationUtils.compileTemplate( + TYPE, + processorTag, + "message", + message, + scriptService + ); return new FailProcessor(processorTag, description, compiledTemplate); } } } - diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java index a02ab8d8699..37320c0e900 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FailProcessorException.java @@ -48,4 +48,3 @@ public class FailProcessorException extends RuntimeException { super(message); } } - diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java index 53b04f2829d..741a4fb29cf 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ForEachProcessor.java @@ -92,8 +92,13 @@ public final class ForEachProcessor extends AbstractProcessor implements Wrappin } } - void innerExecute(int index, List values, List newValues, IngestDocument document, - BiConsumer handler) { + void innerExecute( + int index, + List values, + List newValues, + IngestDocument document, + BiConsumer handler + ) { for (; index < values.size(); index++) { AtomicBoolean shouldContinueHere = new AtomicBoolean(); Object value = values.get(index); @@ -146,8 +151,8 @@ public final class ForEachProcessor extends AbstractProcessor implements Wrappin } @Override - public ForEachProcessor create(Map factories, String tag, - String description, Map config) throws Exception { + public ForEachProcessor create(Map factories, String tag, String description, Map config) + throws Exception { String field = readStringProperty(TYPE, tag, config, "field"); boolean ignoreMissing = readBooleanProperty(TYPE, tag, config, "ignore_missing", false); Map> processorConfig = readMap(TYPE, tag, config, "processor"); @@ -156,8 +161,7 @@ public final class ForEachProcessor extends AbstractProcessor implements Wrappin throw newConfigurationException(TYPE, tag, "processor", "Must specify exactly one processor type"); } Map.Entry> entry = entries.iterator().next(); - Processor processor = - ConfigurationUtils.readProcessor(factories, scriptService, entry.getKey(), entry.getValue()); + Processor processor = ConfigurationUtils.readProcessor(factories, scriptService, entry.getKey(), entry.getValue()); return new ForEachProcessor(tag, description, field, processor, ignoreMissing); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessor.java index 5d56b1da06f..a2fe199c24d 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessor.java @@ -59,8 +59,16 @@ public final class GrokProcessor extends AbstractProcessor { private final boolean traceMatch; private final boolean ignoreMissing; - GrokProcessor(String tag, String description, Map patternBank, List matchPatterns, String matchField, - boolean traceMatch, boolean ignoreMissing, MatcherWatchdog matcherWatchdog) { + GrokProcessor( + String tag, + String description, + Map patternBank, + List matchPatterns, + String matchField, + boolean traceMatch, + boolean ignoreMissing, + MatcherWatchdog matcherWatchdog + ) { super(tag, description); this.matchField = matchField; this.matchPatterns = matchPatterns; @@ -93,9 +101,7 @@ public final class GrokProcessor extends AbstractProcessor { if (matchPatterns.size() > 1) { @SuppressWarnings("unchecked") HashMap matchMap = (HashMap) ingestDocument.getFieldValue(PATTERN_MATCH_KEY, Object.class); - matchMap.keySet().stream().findFirst().ifPresent((index) -> { - ingestDocument.setFieldValue(PATTERN_MATCH_KEY, index); - }); + matchMap.keySet().stream().findFirst().ifPresent((index) -> { ingestDocument.setFieldValue(PATTERN_MATCH_KEY, index); }); } else { ingestDocument.setFieldValue(PATTERN_MATCH_KEY, "0"); } @@ -142,7 +148,7 @@ public final class GrokProcessor extends AbstractProcessor { combinedPattern = combinedPattern + "|" + valueWrap; } } - } else { + } else { combinedPattern = patterns.get(0); } @@ -160,8 +166,12 @@ public final class GrokProcessor extends AbstractProcessor { } @Override - public GrokProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public GrokProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String matchField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); List matchPatterns = ConfigurationUtils.readList(TYPE, processorTag, config, "patterns"); boolean traceMatch = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "trace_match", false); @@ -177,11 +187,23 @@ public final class GrokProcessor extends AbstractProcessor { } try { - return new GrokProcessor(processorTag, description, patternBank, matchPatterns, matchField, traceMatch, ignoreMissing, - matcherWatchdog); + return new GrokProcessor( + processorTag, + description, + patternBank, + matchPatterns, + matchField, + traceMatch, + ignoreMissing, + matcherWatchdog + ); } catch (Exception e) { - throw newConfigurationException(TYPE, processorTag, "patterns", - "Invalid regex pattern found in: " + matchPatterns + ". " + e.getMessage()); + throw newConfigurationException( + TYPE, + processorTag, + "patterns", + "Invalid regex pattern found in: " + matchPatterns + ". " + e.getMessage() + ); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GsubProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GsubProcessor.java index 670e3f918f3..df620afe119 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GsubProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GsubProcessor.java @@ -49,8 +49,15 @@ public final class GsubProcessor extends AbstractStringProcessor { private final Pattern pattern; private final String replacement; - GsubProcessor(String tag, String description, String field, Pattern pattern, String replacement, boolean ignoreMissing, - String targetField) { + GsubProcessor( + String tag, + String description, + String field, + Pattern pattern, + String replacement, + boolean ignoreMissing, + String targetField + ) { super(tag, description, ignoreMissing, targetField, field); this.pattern = pattern; this.replacement = replacement; @@ -81,8 +88,14 @@ public final class GsubProcessor extends AbstractStringProcessor { } @Override - protected GsubProcessor newProcessor(String processorTag, String description, Map config, String field, - boolean ignoreMissing, String targetField) { + protected GsubProcessor newProcessor( + String processorTag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ) { String pattern = readStringProperty(TYPE, processorTag, config, "pattern"); String replacement = readStringProperty(TYPE, processorTag, config, "replacement"); Pattern searchPattern; diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/HtmlStripProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/HtmlStripProcessor.java index 2faaf6e6e99..836e56fc900 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/HtmlStripProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/HtmlStripProcessor.java @@ -79,8 +79,14 @@ public final class HtmlStripProcessor extends AbstractStringProcessor { } @Override - protected HtmlStripProcessor newProcessor(String tag, String description, Map config, String field, - boolean ignoreMissing, String targetField) { + protected HtmlStripProcessor newProcessor( + String tag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ) { return new HtmlStripProcessor(tag, description, field, ignoreMissing, targetField); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonPlugin.java index 1ffb0fbb21d..969f77aa851 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonPlugin.java @@ -62,13 +62,18 @@ import java.util.function.Supplier; public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPlugin { - static final Setting WATCHDOG_INTERVAL = - Setting.timeSetting("ingest.grok.watchdog.interval", TimeValue.timeValueSeconds(1), Setting.Property.NodeScope); - static final Setting WATCHDOG_MAX_EXECUTION_TIME = - Setting.timeSetting("ingest.grok.watchdog.max_execution_time", TimeValue.timeValueSeconds(1), Setting.Property.NodeScope); + static final Setting WATCHDOG_INTERVAL = Setting.timeSetting( + "ingest.grok.watchdog.interval", + TimeValue.timeValueSeconds(1), + Setting.Property.NodeScope + ); + static final Setting WATCHDOG_MAX_EXECUTION_TIME = Setting.timeSetting( + "ingest.grok.watchdog.max_execution_time", + TimeValue.timeValueSeconds(1), + Setting.Property.NodeScope + ); - public IngestCommonPlugin() { - } + public IngestCommonPlugin() {} @Override public Map getProcessors(Processor.Parameters parameters) { @@ -110,10 +115,15 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl } @Override - public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster) { + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { return Collections.singletonList(new GrokProcessorGetAction.RestAction()); } @@ -125,8 +135,12 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl private static MatcherWatchdog createGrokThreadWatchdog(Processor.Parameters parameters) { long intervalMillis = WATCHDOG_INTERVAL.get(parameters.env.settings()).getMillis(); long maxExecutionTimeMillis = WATCHDOG_MAX_EXECUTION_TIME.get(parameters.env.settings()).getMillis(); - return MatcherWatchdog.newInstance(intervalMillis, maxExecutionTimeMillis, - parameters.relativeTimeSupplier, parameters.scheduler::apply); + return MatcherWatchdog.newInstance( + intervalMillis, + maxExecutionTimeMillis, + parameters.relativeTimeSupplier, + parameters.scheduler::apply + ); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JoinProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JoinProcessor.java index c915cf3d869..672a7167245 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JoinProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JoinProcessor.java @@ -78,9 +78,7 @@ public final class JoinProcessor extends AbstractProcessor { if (list == null) { throw new IllegalArgumentException("field [" + field + "] is null, cannot join."); } - String joined = list.stream() - .map(Object::toString) - .collect(Collectors.joining(separator)); + String joined = list.stream().map(Object::toString).collect(Collectors.joining(separator)); document.setFieldValue(targetField, joined); return document; } @@ -92,8 +90,12 @@ public final class JoinProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { @Override - public JoinProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public JoinProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String separator = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "separator"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", field); @@ -101,4 +103,3 @@ public final class JoinProcessor extends AbstractProcessor { } } } - diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JsonProcessor.java index f72fad141b2..a2b740291d0 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/JsonProcessor.java @@ -82,9 +82,14 @@ public final class JsonProcessor extends AbstractProcessor { public static Object apply(Object fieldValue) { BytesReference bytesRef = fieldValue == null ? new BytesArray("null") : new BytesArray(fieldValue.toString()); - try (InputStream stream = bytesRef.streamInput(); - XContentParser parser = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { + try ( + InputStream stream = bytesRef.streamInput(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + stream + ) + ) { XContentParser.Token token = parser.nextToken(); Object value = null; if (token == XContentParser.Token.VALUE_NULL) { @@ -112,8 +117,8 @@ public final class JsonProcessor extends AbstractProcessor { Object value = apply(ctx.get(fieldName)); if (value instanceof Map) { @SuppressWarnings("unchecked") - Map map = (Map) value; - ctx.putAll(map); + Map map = (Map) value; + ctx.putAll(map); } else { throw new IllegalArgumentException("cannot add non-map fields to root of document"); } @@ -122,7 +127,7 @@ public final class JsonProcessor extends AbstractProcessor { @Override public IngestDocument execute(IngestDocument document) throws Exception { if (addToRoot) { - apply(document.getSourceAndMetadata(), field); + apply(document.getSourceAndMetadata(), field); } else { document.setFieldValue(targetField, apply(document.getFieldValue(field, Object.class))); } @@ -136,15 +141,23 @@ public final class JsonProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { @Override - public JsonProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public JsonProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field"); boolean addToRoot = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "add_to_root", false); if (addToRoot && targetField != null) { - throw newConfigurationException(TYPE, processorTag, "target_field", - "Cannot set a target field while also setting `add_to_root` to true"); + throw newConfigurationException( + TYPE, + processorTag, + "target_field", + "Cannot set a target field while also setting `add_to_root` to true" + ); } if (targetField == null) { @@ -155,4 +168,3 @@ public final class JsonProcessor extends AbstractProcessor { } } } - diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java index 192bee08601..ff3cca4ce11 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/KeyValueProcessor.java @@ -65,9 +65,21 @@ public final class KeyValueProcessor extends AbstractProcessor { private final boolean ignoreMissing; private final Consumer execution; - KeyValueProcessor(String tag, String description, String field, String fieldSplit, String valueSplit, Set includeKeys, - Set excludeKeys, String targetField, boolean ignoreMissing, - String trimKey, String trimValue, boolean stripBrackets, String prefix) { + KeyValueProcessor( + String tag, + String description, + String field, + String fieldSplit, + String valueSplit, + Set includeKeys, + Set excludeKeys, + String targetField, + boolean ignoreMissing, + String trimKey, + String trimValue, + boolean stripBrackets, + String prefix + ) { super(tag, description); this.field = field; this.targetField = targetField; @@ -77,16 +89,33 @@ public final class KeyValueProcessor extends AbstractProcessor { this.excludeKeys = excludeKeys; this.ignoreMissing = ignoreMissing; this.execution = buildExecution( - fieldSplit, valueSplit, field, includeKeys, excludeKeys, targetField, ignoreMissing, trimKey, trimValue, - stripBrackets, prefix + fieldSplit, + valueSplit, + field, + includeKeys, + excludeKeys, + targetField, + ignoreMissing, + trimKey, + trimValue, + stripBrackets, + prefix ); } - private static Consumer buildExecution(String fieldSplit, String valueSplit, String field, - Set includeKeys, Set excludeKeys, - String targetField, boolean ignoreMissing, - String trimKey, String trimValue, boolean stripBrackets, - String prefix) { + private static Consumer buildExecution( + String fieldSplit, + String valueSplit, + String field, + Set includeKeys, + Set excludeKeys, + String targetField, + boolean ignoreMissing, + String trimKey, + String trimValue, + boolean stripBrackets, + String prefix + ) { final Predicate keyFilter; if (includeKeys == null) { if (excludeKeys == null) { @@ -213,8 +242,12 @@ public final class KeyValueProcessor extends AbstractProcessor { public static class Factory implements Processor.Factory { @Override - public KeyValueProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public KeyValueProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field"); String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split"); @@ -222,8 +255,7 @@ public final class KeyValueProcessor extends AbstractProcessor { String trimKey = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_key"); String trimValue = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_value"); String prefix = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "prefix"); - boolean stripBrackets = - ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "strip_brackets", false); + boolean stripBrackets = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "strip_brackets", false); Set includeKeys = null; Set excludeKeys = null; List includeKeysList = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys"); @@ -236,8 +268,19 @@ public final class KeyValueProcessor extends AbstractProcessor { } boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); return new KeyValueProcessor( - processorTag, description, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, - trimKey, trimValue, stripBrackets, prefix + processorTag, + description, + field, + fieldSplit, + valueSplit, + includeKeys, + excludeKeys, + targetField, + ignoreMissing, + trimKey, + trimValue, + stripBrackets, + prefix ); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/LowercaseProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/LowercaseProcessor.java index ec01344d882..a2ea102dfe9 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/LowercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/LowercaseProcessor.java @@ -69,8 +69,14 @@ public final class LowercaseProcessor extends AbstractStringProcessor { } @Override - protected LowercaseProcessor newProcessor(String tag, String description, Map config, String field, - boolean ignoreMissing, String targetField) { + protected LowercaseProcessor newProcessor( + String tag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ) { return new LowercaseProcessor(tag, description, field, ignoreMissing, targetField); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java index 8750e9ac479..93cb60c5b52 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ProcessorsWhitelistExtension.java @@ -44,8 +44,10 @@ import java.util.Map; public class ProcessorsWhitelistExtension implements PainlessExtension { - private static final Whitelist WHITELIST = - WhitelistLoader.loadFromResourceFiles(ProcessorsWhitelistExtension.class, "processors_whitelist.txt"); + private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + ProcessorsWhitelistExtension.class, + "processors_whitelist.txt" + ); @Override public Map, List> getContextWhitelists() { diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java index adb17d41d18..5da3b6bea7b 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java @@ -93,8 +93,12 @@ public final class RemoveProcessor extends AbstractProcessor { } @Override - public RemoveProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public RemoveProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { final List fields = new ArrayList<>(); final Object field = ConfigurationUtils.readObject(TYPE, processorTag, config, "field"); if (field instanceof List) { @@ -113,4 +117,3 @@ public final class RemoveProcessor extends AbstractProcessor { } } } - diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java index 925cf9a3903..af356eb10d7 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RenameProcessor.java @@ -52,8 +52,13 @@ public final class RenameProcessor extends AbstractProcessor { private final TemplateScript.Factory targetField; private final boolean ignoreMissing; - RenameProcessor(String tag, String description, TemplateScript.Factory field, TemplateScript.Factory targetField, - boolean ignoreMissing) { + RenameProcessor( + String tag, + String description, + TemplateScript.Factory field, + TemplateScript.Factory targetField, + boolean ignoreMissing + ) { super(tag, description); this.field = field; this.targetField = targetField; @@ -117,16 +122,24 @@ public final class RenameProcessor extends AbstractProcessor { } @Override - public RenameProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public RenameProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); - TemplateScript.Factory fieldTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, - "field", field, scriptService); + TemplateScript.Factory fieldTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", field, scriptService); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); - TemplateScript.Factory targetFieldTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, - "target_field", targetField, scriptService); + TemplateScript.Factory targetFieldTemplate = ConfigurationUtils.compileTemplate( + TYPE, + processorTag, + "target_field", + targetField, + scriptService + ); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new RenameProcessor(processorTag, description, fieldTemplate, targetFieldTemplate , ignoreMissing); + return new RenameProcessor(processorTag, description, fieldTemplate, targetFieldTemplate, ignoreMissing); } } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java index 291f0db93c4..f2568826fa4 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java @@ -64,14 +64,11 @@ import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException */ public final class ScriptProcessor extends AbstractProcessor { - private static final DeprecationLogger deprecationLogger = - DeprecationLogger.getLogger(DynamicMap.class); - private static final Map> PARAMS_FUNCTIONS = org.opensearch.common.collect.Map.of( - "_type", value -> { - deprecationLogger.deprecate("script_processor", - "[types removal] Looking up doc types [_type] in scripts is deprecated."); - return value; - }); + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class); + private static final Map> PARAMS_FUNCTIONS = org.opensearch.common.collect.Map.of("_type", value -> { + deprecationLogger.deprecate("script_processor", "[types removal] Looking up doc types [_type] in scripts is deprecated."); + return value; + }); public static final String TYPE = "script"; @@ -87,8 +84,13 @@ public final class ScriptProcessor extends AbstractProcessor { * @param precompiledIngestScript The {@link Script} precompiled * @param scriptService The {@link ScriptService} used to execute the script. */ - ScriptProcessor(String tag, String description, Script script, @Nullable IngestScript precompiledIngestScript, - ScriptService scriptService) { + ScriptProcessor( + String tag, + String description, + Script script, + @Nullable IngestScript precompiledIngestScript, + ScriptService scriptService + ) { super(tag, description); this.script = script; this.precompiledIngestScript = precompiledIngestScript; @@ -135,12 +137,18 @@ public final class ScriptProcessor extends AbstractProcessor { } @Override - public ScriptProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config); - InputStream stream = BytesReference.bytes(builder).streamInput(); - XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, stream)) { + public ScriptProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { + try ( + XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config); + InputStream stream = BytesReference.bytes(builder).streamInput(); + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream) + ) { Script script = Script.parse(parser); Arrays.asList("id", "source", "inline", "lang", "params", "options").forEach(config::remove); diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SetProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SetProcessor.java index 2938dc4f261..949aef5c2d2 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SetProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SetProcessor.java @@ -55,12 +55,18 @@ public final class SetProcessor extends AbstractProcessor { private final ValueSource value; private final boolean ignoreEmptyValue; - SetProcessor(String tag, String description, TemplateScript.Factory field, ValueSource value) { + SetProcessor(String tag, String description, TemplateScript.Factory field, ValueSource value) { this(tag, description, field, value, true, false); } - SetProcessor(String tag, String description, TemplateScript.Factory field, ValueSource value, boolean overrideEnabled, - boolean ignoreEmptyValue) { + SetProcessor( + String tag, + String description, + TemplateScript.Factory field, + ValueSource value, + boolean overrideEnabled, + boolean ignoreEmptyValue + ) { super(tag, description); this.overrideEnabled = overrideEnabled; this.field = field; @@ -106,21 +112,25 @@ public final class SetProcessor extends AbstractProcessor { } @Override - public SetProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public SetProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value"); boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override", true); - TemplateScript.Factory compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, - "field", field, scriptService); + TemplateScript.Factory compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", field, scriptService); boolean ignoreEmptyValue = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_empty_value", false); return new SetProcessor( - processorTag, - description, - compiledTemplate, - ValueSource.wrap(value, scriptService), - overrideEnabled, - ignoreEmptyValue); + processorTag, + description, + compiledTemplate, + ValueSource.wrap(value, scriptService), + overrideEnabled, + ignoreEmptyValue + ); } } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SortProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SortProcessor.java index c5f97a2253f..1f64cc9056e 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SortProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SortProcessor.java @@ -54,7 +54,8 @@ public final class SortProcessor extends AbstractProcessor { public static final String DEFAULT_ORDER = "asc"; public enum SortOrder { - ASCENDING("asc"), DESCENDING("desc"); + ASCENDING("asc"), + DESCENDING("desc"); private final String direction; @@ -77,8 +78,7 @@ public final class SortProcessor extends AbstractProcessor { } else if (value.equals(DESCENDING.toString())) { return DESCENDING; } - throw new IllegalArgumentException("Sort direction [" + value + "] not recognized." - + " Valid values are: [asc, desc]"); + throw new IllegalArgumentException("Sort direction [" + value + "] not recognized." + " Valid values are: [asc, desc]"); } } @@ -134,18 +134,18 @@ public final class SortProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { @Override - public SortProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public SortProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, FIELD); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", field); try { SortOrder direction = SortOrder.fromString( - ConfigurationUtils.readStringProperty( - TYPE, - processorTag, - config, - ORDER, - DEFAULT_ORDER)); + ConfigurationUtils.readStringProperty(TYPE, processorTag, config, ORDER, DEFAULT_ORDER) + ); return new SortProcessor(processorTag, description, field, direction, targetField); } catch (IllegalArgumentException e) { throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, ORDER, e.getMessage()); @@ -153,4 +153,3 @@ public final class SortProcessor extends AbstractProcessor { } } } - diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SplitProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SplitProcessor.java index 174cf36431f..ba1b1b3a8d7 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SplitProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/SplitProcessor.java @@ -57,8 +57,15 @@ public final class SplitProcessor extends AbstractProcessor { private final boolean preserveTrailing; private final String targetField; - SplitProcessor(String tag, String description, String field, String separator, boolean ignoreMissing, boolean preserveTrailing, - String targetField) { + SplitProcessor( + String tag, + String description, + String field, + String separator, + boolean ignoreMissing, + boolean preserveTrailing, + String targetField + ) { super(tag, description); this.field = field; this.separator = separator; @@ -79,7 +86,9 @@ public final class SplitProcessor extends AbstractProcessor { return ignoreMissing; } - boolean isPreserveTrailing() { return preserveTrailing; } + boolean isPreserveTrailing() { + return preserveTrailing; + } String getTargetField() { return targetField; @@ -109,8 +118,12 @@ public final class SplitProcessor extends AbstractProcessor { public static class Factory implements Processor.Factory { @Override - public SplitProcessor create(Map registry, String processorTag, - String description, Map config) throws Exception { + public SplitProcessor create( + Map registry, + String processorTag, + String description, + Map config + ) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); boolean preserveTrailing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "preserve_trailing", false); diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/TrimProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/TrimProcessor.java index 98ca1cc880a..38f33942eb7 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/TrimProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/TrimProcessor.java @@ -63,10 +63,15 @@ public final class TrimProcessor extends AbstractStringProcessor { } @Override - protected TrimProcessor newProcessor(String tag, String description, Map config, String field, - boolean ignoreMissing, String targetField) { + protected TrimProcessor newProcessor( + String tag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ) { return new TrimProcessor(tag, description, field, ignoreMissing, targetField); } } } - diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java index 81e154f16c9..bf80c5b0647 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java @@ -72,8 +72,14 @@ public final class URLDecodeProcessor extends AbstractStringProcessor { } @Override - protected URLDecodeProcessor newProcessor(String tag, String description, Map config, String field, - boolean ignoreMissing, String targetField) { + protected URLDecodeProcessor newProcessor( + String tag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ) { return new URLDecodeProcessor(tag, description, field, ignoreMissing, targetField); } } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/UppercaseProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/UppercaseProcessor.java index 720d25e6ffd..8cfb2a5f7ce 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/UppercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/UppercaseProcessor.java @@ -68,8 +68,14 @@ public final class UppercaseProcessor extends AbstractStringProcessor { } @Override - protected UppercaseProcessor newProcessor(String tag, String description, Map config, String field, - boolean ignoreMissing, String targetField) { + protected UppercaseProcessor newProcessor( + String tag, + String description, + Map config, + String field, + boolean ignoreMissing, + String targetField + ) { return new UppercaseProcessor(tag, description, field, ignoreMissing, targetField); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorFactoryTestCase.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorFactoryTestCase.java index 14078936e3e..f15701da9d9 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorFactoryTestCase.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorFactoryTestCase.java @@ -109,7 +109,7 @@ public abstract class AbstractStringProcessorFactoryTestCase extends OpenSearchT try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorTestCase.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorTestCase.java index 97f68776f1a..ff7605bd688 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorTestCase.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AbstractStringProcessorTestCase.java @@ -120,16 +120,23 @@ public abstract class AbstractStringProcessorTestCase extends OpenSearchTestC IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue(fieldName, randomInt()); Exception e = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); - assertThat(e.getMessage(), equalTo("field [" + fieldName + - "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); List fieldValueList = new ArrayList<>(); int randomValue = randomInt(); fieldValueList.add(randomValue); ingestDocument.setFieldValue(fieldName, fieldValueList); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); - assertThat(exception.getMessage(), equalTo("value [" + randomValue + "] of type [java.lang.Integer] in list field [" + fieldName + - "] cannot be cast to [java.lang.String]")); + assertThat( + exception.getMessage(), + equalTo( + "value [" + + randomValue + + "] of type [java.lang.Integer] in list field [" + + fieldName + + "] cannot be cast to [java.lang.String]" + ) + ); } public void testNonStringValueWithIgnoreMissing() throws Exception { @@ -138,16 +145,23 @@ public abstract class AbstractStringProcessorTestCase extends OpenSearchTestC IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue(fieldName, randomInt()); Exception e = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); - assertThat(e.getMessage(), equalTo("field [" + fieldName + - "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); + assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); List fieldValueList = new ArrayList<>(); int randomValue = randomInt(); fieldValueList.add(randomValue); ingestDocument.setFieldValue(fieldName, fieldValueList); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); - assertThat(exception.getMessage(), equalTo("value [" + randomValue + "] of type [java.lang.Integer] in list field [" + fieldName + - "] cannot be cast to [java.lang.String]")); + assertThat( + exception.getMessage(), + equalTo( + "value [" + + randomValue + + "] of type [java.lang.Integer] in list field [" + + fieldName + + "] cannot be cast to [java.lang.String]" + ) + ); } public void testTargetField() throws Exception { diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorFactoryTests.java index a5fe014641c..8d293230b46 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorFactoryTests.java @@ -77,7 +77,7 @@ public class AppendProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } @@ -88,7 +88,7 @@ public class AppendProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); } } @@ -100,7 +100,7 @@ public class AppendProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); } } @@ -111,8 +111,7 @@ public class AppendProcessorFactoryTests extends OpenSearchTestCase { config.put("field", "{{field1}}"); config.put("value", "value1"); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchException.class, - () -> factory.create(null, processorTag, null, config)); + OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, null, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java index aa5bd62550b..9a507338df3 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java @@ -236,9 +236,13 @@ public class AppendProcessorTests extends OpenSearchTestCase { } private static Processor createAppendProcessor(String fieldName, Object fieldValue, boolean allowDuplicates) { - return new AppendProcessor(randomAlphaOfLength(10), - null, new TestTemplateService.MockTemplateScript.Factory(fieldName), - ValueSource.wrap(fieldValue, TestTemplateService.instance()), allowDuplicates); + return new AppendProcessor( + randomAlphaOfLength(10), + null, + new TestTemplateService.MockTemplateScript.Factory(fieldName), + ValueSource.wrap(fieldValue, TestTemplateService.instance()), + allowDuplicates + ); } private enum Scalar { @@ -247,27 +251,32 @@ public class AppendProcessorTests extends OpenSearchTestCase { Object randomValue() { return randomInt(); } - }, DOUBLE { + }, + DOUBLE { @Override Object randomValue() { return randomDouble(); } - }, FLOAT { + }, + FLOAT { @Override Object randomValue() { return randomFloat(); } - }, BOOLEAN { + }, + BOOLEAN { @Override Object randomValue() { return randomBoolean(); } - }, STRING { + }, + STRING { @Override Object randomValue() { return randomAlphaOfLengthBetween(1, 10); } - }, MAP { + }, + MAP { @Override Object randomValue() { int numItems = randomIntBetween(1, 10); @@ -277,7 +286,8 @@ public class AppendProcessorTests extends OpenSearchTestCase { } return map; } - }, NULL { + }, + NULL { @Override Object randomValue() { return null; diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java index a30c75c61db..bbd9ff4c8b9 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java @@ -53,7 +53,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { @Override protected String modifyInput(String input) { - //largest value that allows all results < Long.MAX_VALUE bytes + // largest value that allows all results < Long.MAX_VALUE bytes long randomNumber = randomLongBetween(1, Long.MAX_VALUE / ByteSizeUnit.PB.toBytes(1)); ByteSizeUnit randomUnit = randomFrom(ByteSizeUnit.values()); modifiedInput = randomNumber + randomUnit.getSuffix(); @@ -75,10 +75,14 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "8912pb"); Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); OpenSearchException exception = expectThrows(OpenSearchException.class, () -> processor.execute(ingestDocument)); - assertThat(exception.getMessage(), - CoreMatchers.equalTo("failed to parse setting [Ingest Field] with value [8912pb] as a size in bytes")); - assertThat(exception.getCause().getMessage(), - CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported")); + assertThat( + exception.getMessage(), + CoreMatchers.equalTo("failed to parse setting [Ingest Field] with value [8912pb] as a size in bytes") + ); + assertThat( + exception.getCause().getMessage(), + CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported") + ); } public void testNotBytes() { @@ -86,8 +90,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "junk"); Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); OpenSearchException exception = expectThrows(OpenSearchException.class, () -> processor.execute(ingestDocument)); - assertThat(exception.getMessage(), - CoreMatchers.equalTo("failed to parse [junk]")); + assertThat(exception.getMessage(), CoreMatchers.equalTo("failed to parse [junk]")); } public void testMissingUnits() { @@ -95,8 +98,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "1"); Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); OpenSearchException exception = expectThrows(OpenSearchException.class, () -> processor.execute(ingestDocument)); - assertThat(exception.getMessage(), - CoreMatchers.containsString("unit is missing or unrecognized")); + assertThat(exception.getMessage(), CoreMatchers.containsString("unit is missing or unrecognized")); } public void testFractional() throws Exception { @@ -105,7 +107,8 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L)); - assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " + - "[Ingest Field]"); + assertWarnings( + "Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " + "[Ingest Field]" + ); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ConvertProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ConvertProcessorTests.java index 0a1db4380dd..6eed29e330f 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ConvertProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ConvertProcessorTests.java @@ -115,7 +115,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to integer")); } } @@ -185,7 +185,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to long")); } } @@ -228,7 +228,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to double")); } } @@ -271,7 +271,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to float")); } } @@ -317,7 +317,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { if (randomBoolean()) { fieldValue = "string-" + randomAlphaOfLengthBetween(1, 10); } else { - //verify that only proper boolean values are supported and we are strict about it + // verify that only proper boolean values are supported and we are strict about it fieldValue = randomFrom("on", "off", "yes", "no", "0", "1"); } ingestDocument.setFieldValue(fieldName, fieldValue); @@ -326,7 +326,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(Exception e) { + } catch (Exception e) { assertThat(e.getMessage(), equalTo("[" + fieldValue + "] is not a boolean value, cannot convert to boolean")); } } @@ -335,7 +335,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Object fieldValue; String expectedFieldValue; - switch(randomIntBetween(0, 2)) { + switch (randomIntBetween(0, 2)) { case 0: float randomFloat = randomFloat(); fieldValue = randomFloat; @@ -369,7 +369,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { for (int j = 0; j < numItems; j++) { Object randomValue; String randomValueString; - switch(randomIntBetween(0, 2)) { + switch (randomIntBetween(0, 2)) { case 0: float randomFloat = randomFloat(); randomValue = randomFloat; @@ -453,7 +453,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { public void testAutoConvertNotString() throws Exception { Object randomValue; - switch(randomIntBetween(0, 2)) { + switch (randomIntBetween(0, 2)) { case 0: float randomFloat = randomFloat(); randomValue = randomFloat; @@ -488,8 +488,10 @@ public class ConvertProcessorTests extends OpenSearchTestCase { public void testAutoConvertMatchBoolean() throws Exception { boolean randomBoolean = randomBoolean(); String booleanString = Boolean.toString(randomBoolean); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap("field", booleanString)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap("field", booleanString) + ); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); @@ -519,7 +521,7 @@ public class ConvertProcessorTests extends OpenSearchTestCase { public void testAutoConvertDoubleNotMatched() throws Exception { double randomDouble = randomDouble(); String randomString = Double.toString(randomDouble); - float randomFloat = Float.parseFloat(randomString); + float randomFloat = Float.parseFloat(randomString); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", randomString)); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorFactoryTests.java index 14815f51fe2..e762951600f 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorFactoryTests.java @@ -57,7 +57,7 @@ public class CsvProcessorFactoryTests extends OpenSearchTestCase { CsvProcessor csv = factory.create(null, "csv", null, properties); assertThat(csv, notNullValue()); assertThat(csv.field, equalTo("field")); - assertThat(csv.headers, equalTo(new String[]{"target"})); + assertThat(csv.headers, equalTo(new String[] { "target" })); assertThat(csv.quote, equalTo('|')); assertThat(csv.separator, equalTo('/')); assertThat(csv.emptyValue, equalTo("empty")); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorTests.java index 455f6527d99..1359750dc16 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CsvProcessorTests.java @@ -47,12 +47,11 @@ import java.util.stream.Collectors; public class CsvProcessorTests extends OpenSearchTestCase { - private static final Character[] SEPARATORS = new Character[]{',', ';', '|', '.', '\t'}; - private static final String[] QUOTES = new String[]{"'", "\"", ""}; + private static final Character[] SEPARATORS = new Character[] { ',', ';', '|', '.', '\t' }; + private static final String[] QUOTES = new String[] { "'", "\"", "" }; private final String quote; private final char separator; - public CsvProcessorTests(@Name("quote") String quote, @Name("separator") char separator) { this.quote = quote; this.separator = separator; @@ -63,7 +62,7 @@ public class CsvProcessorTests extends OpenSearchTestCase { LinkedList list = new LinkedList<>(); for (Character separator : SEPARATORS) { for (String quote : QUOTES) { - list.add(new Object[]{quote, separator}); + list.add(new Object[] { quote, separator }); } } return list; @@ -173,7 +172,7 @@ public class CsvProcessorTests extends OpenSearchTestCase { } public void testSingleField() { - String[] headers = new String[]{randomAlphaOfLengthBetween(5, 10)}; + String[] headers = new String[] { randomAlphaOfLengthBetween(5, 10) }; String value = randomAlphaOfLengthBetween(5, 10); String csv = quote + value + quote; @@ -186,8 +185,10 @@ public class CsvProcessorTests extends OpenSearchTestCase { int numItems = randomIntBetween(2, 10); Map items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { - items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10) + quote + quote + randomAlphaOfLengthBetween(5 - , 10) + quote + quote); + items.put( + randomAlphaOfLengthBetween(5, 10), + randomAlphaOfLengthBetween(5, 10) + quote + quote + randomAlphaOfLengthBetween(5, 10) + quote + quote + ); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); @@ -202,16 +203,17 @@ public class CsvProcessorTests extends OpenSearchTestCase { int numItems = randomIntBetween(2, 10); Map items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { - items.put(randomAlphaOfLengthBetween(5, 10), - separator + randomAlphaOfLengthBetween(5, 10) + separator + "\n\r" + randomAlphaOfLengthBetween(5, 10)); + items.put( + randomAlphaOfLengthBetween(5, 10), + separator + randomAlphaOfLengthBetween(5, 10) + separator + "\n\r" + randomAlphaOfLengthBetween(5, 10) + ); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv); - items.forEach((key, value) -> assertEquals(value.replace(quote + quote, quote), ingestDocument.getFieldValue(key, - String.class))); + items.forEach((key, value) -> assertEquals(value.replace(quote + quote, quote), ingestDocument.getFieldValue(key, String.class))); } public void testEmptyFields() { @@ -221,32 +223,42 @@ public class CsvProcessorTests extends OpenSearchTestCase { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().toArray(new String[numItems]); - String csv = - items.values().stream().map(v -> quote + v + quote).limit(numItems - 1).skip(3).collect(Collectors.joining(separator + "")); + String csv = items.values() + .stream() + .map(v -> quote + v + quote) + .limit(numItems - 1) + .skip(3) + .collect(Collectors.joining(separator + "")); - IngestDocument ingestDocument = processDocument(headers, - "" + separator + "" + separator + "" + separator + csv + separator + separator + - "abc"); + IngestDocument ingestDocument = processDocument( + headers, + "" + separator + "" + separator + "" + separator + csv + separator + separator + "abc" + ); items.keySet().stream().limit(3).forEach(key -> assertFalse(ingestDocument.hasField(key))); - items.entrySet().stream().limit(numItems - 1).skip(3).forEach(e -> assertEquals(e.getValue(), - ingestDocument.getFieldValue(e.getKey(), String.class))); + items.entrySet() + .stream() + .limit(numItems - 1) + .skip(3) + .forEach(e -> assertEquals(e.getValue(), ingestDocument.getFieldValue(e.getKey(), String.class))); items.keySet().stream().skip(numItems - 1).forEach(key -> assertFalse(ingestDocument.hasField(key))); } public void testWrongStrings() throws Exception { assumeTrue("single run only", quote.isEmpty()); - expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "abc\"abc")); - expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "\"abc\"asd")); - expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "\"abcasd")); - expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "abc\nabc")); - expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "abc\rabc")); + expectThrows(IllegalArgumentException.class, () -> processDocument(new String[] { "a" }, "abc\"abc")); + expectThrows(IllegalArgumentException.class, () -> processDocument(new String[] { "a" }, "\"abc\"asd")); + expectThrows(IllegalArgumentException.class, () -> processDocument(new String[] { "a" }, "\"abcasd")); + expectThrows(IllegalArgumentException.class, () -> processDocument(new String[] { "a" }, "abc\nabc")); + expectThrows(IllegalArgumentException.class, () -> processDocument(new String[] { "a" }, "abc\rabc")); } public void testQuotedWhitespaces() { assumeFalse("quote needed", quote.isEmpty()); - IngestDocument document = processDocument(new String[]{"a", "b", "c", "d"}, - " abc " + separator + " def" + separator + "ghi " + separator + " " + quote + " ooo " + quote); + IngestDocument document = processDocument( + new String[] { "a", "b", "c", "d" }, + " abc " + separator + " def" + separator + "ghi " + separator + " " + quote + " ooo " + quote + ); assertEquals("abc", document.getFieldValue("a", String.class)); assertEquals("def", document.getFieldValue("b", String.class)); assertEquals("ghi", document.getFieldValue("c", String.class)); @@ -255,9 +267,27 @@ public class CsvProcessorTests extends OpenSearchTestCase { public void testUntrimmed() { assumeFalse("quote needed", quote.isEmpty()); - IngestDocument document = processDocument(new String[]{"a", "b", "c", "d", "e", "f"}, - " abc " + separator + " def" + separator + "ghi " + separator + " " - + quote + "ooo" + quote + " " + separator + " " + quote + "jjj" + quote + " ", false); + IngestDocument document = processDocument( + new String[] { "a", "b", "c", "d", "e", "f" }, + " abc " + + separator + + " def" + + separator + + "ghi " + + separator + + " " + + quote + + "ooo" + + quote + + " " + + separator + + " " + + quote + + "jjj" + + quote + + " ", + false + ); assertEquals(" abc ", document.getFieldValue("a", String.class)); assertEquals(" def", document.getFieldValue("b", String.class)); assertEquals("ghi ", document.getFieldValue("c", String.class)); @@ -273,10 +303,29 @@ public class CsvProcessorTests extends OpenSearchTestCase { if (ingestDocument.hasField(fieldName)) { ingestDocument.removeField(fieldName); } - CsvProcessor processor = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, new String[]{"a"}, false, ',', '"', true, null); + CsvProcessor processor = new CsvProcessor( + randomAlphaOfLength(5), + null, + fieldName, + new String[] { "a" }, + false, + ',', + '"', + true, + null + ); processor.execute(ingestDocument); - CsvProcessor processor2 = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, new String[]{"a"}, false, - ',', '"', false, null); + CsvProcessor processor2 = new CsvProcessor( + randomAlphaOfLength(5), + null, + fieldName, + new String[] { "a" }, + false, + ',', + '"', + false, + null + ); expectThrows(IllegalArgumentException.class, () -> processor2.execute(ingestDocument)); } @@ -308,8 +357,17 @@ public class CsvProcessorTests extends OpenSearchTestCase { ingestDocument.setFieldValue(fieldName, csv); char quoteChar = quote.isEmpty() ? '"' : quote.charAt(0); - CsvProcessor processor = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, headers, trim, separator, quoteChar, false, - emptyValue); + CsvProcessor processor = new CsvProcessor( + randomAlphaOfLength(5), + null, + fieldName, + headers, + trim, + separator, + quoteChar, + false, + emptyValue + ); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java index cf0ecb4eccb..951b93deb6e 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java @@ -50,25 +50,37 @@ import static org.hamcrest.core.IsEqual.equalTo; public class DateFormatTests extends OpenSearchTestCase { public void testParseJava() { - Function javaFunction = DateFormat.Java.getFunction("MMM dd HH:mm:ss Z", - ZoneOffset.ofHours(-8), Locale.ENGLISH); - assertThat(javaFunction.apply("Nov 24 01:29:01 -0800").toInstant() - .atZone(ZoneId.of("GMT-8")) - .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), - equalTo("11 24 01:29:01")); + Function javaFunction = DateFormat.Java.getFunction( + "MMM dd HH:mm:ss Z", + ZoneOffset.ofHours(-8), + Locale.ENGLISH + ); + assertThat( + javaFunction.apply("Nov 24 01:29:01 -0800") + .toInstant() + .atZone(ZoneId.of("GMT-8")) + .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)), + equalTo("11 24 01:29:01") + ); } public void testParseYearOfEraJavaWithTimeZone() { - Function javaFunction = DateFormat.Java.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", - ZoneOffset.UTC, Locale.ROOT); + Function javaFunction = DateFormat.Java.getFunction( + "yyyy-MM-dd'T'HH:mm:ss.SSSZZ", + ZoneOffset.UTC, + Locale.ROOT + ); ZonedDateTime datetime = javaFunction.apply("2018-02-05T13:44:56.657+0100"); String expectedDateTime = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").withZone(ZoneOffset.UTC).format(datetime); assertThat(expectedDateTime, is("2018-02-05T12:44:56.657Z")); } public void testParseYearJavaWithTimeZone() { - Function javaFunction = DateFormat.Java.getFunction("uuuu-MM-dd'T'HH:mm:ss.SSSZZ", - ZoneOffset.UTC, Locale.ROOT); + Function javaFunction = DateFormat.Java.getFunction( + "uuuu-MM-dd'T'HH:mm:ss.SSSZZ", + ZoneOffset.UTC, + Locale.ROOT + ); ZonedDateTime datetime = javaFunction.apply("2018-02-05T13:44:56.657+0100"); String expectedDateTime = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").withZone(ZoneOffset.UTC).format(datetime); assertThat(expectedDateTime, is("2018-02-05T12:44:56.657Z")); @@ -84,55 +96,68 @@ public class DateFormatTests extends OpenSearchTestCase { } public void testParseWeekBased() { - assumeFalse("won't work in jdk8 " + - "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", - JavaVersion.current().equals(JavaVersion.parse("8"))); + assumeFalse( + "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", + JavaVersion.current().equals(JavaVersion.parse("8")) + ); String format = randomFrom("YYYY-ww"); ZoneId timezone = DateUtils.of("Europe/Amsterdam"); Function javaFunction = DateFormat.Java.getFunction(format, timezone, Locale.ROOT); ZonedDateTime dateTime = javaFunction.apply("2020-33"); - assertThat(dateTime, equalTo(ZonedDateTime.of(2020,8,10,0,0,0,0,timezone))); + assertThat(dateTime, equalTo(ZonedDateTime.of(2020, 8, 10, 0, 0, 0, 0, timezone))); } public void testParseWeekBasedWithLocale() { - assumeFalse("won't work in jdk8 " + - "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", - JavaVersion.current().equals(JavaVersion.parse("8"))); + assumeFalse( + "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", + JavaVersion.current().equals(JavaVersion.parse("8")) + ); String format = randomFrom("YYYY-ww"); ZoneId timezone = DateUtils.of("Europe/Amsterdam"); Function javaFunction = DateFormat.Java.getFunction(format, timezone, Locale.US); ZonedDateTime dateTime = javaFunction.apply("2020-33"); - //33rd week of 2020 starts on 9th August 2020 as per US locale - assertThat(dateTime, equalTo(ZonedDateTime.of(2020,8,9,0,0,0,0,timezone))); + // 33rd week of 2020 starts on 9th August 2020 as per US locale + assertThat(dateTime, equalTo(ZonedDateTime.of(2020, 8, 9, 0, 0, 0, 0, timezone))); } public void testParseUnixMs() { - assertThat(DateFormat.UnixMs.getFunction(null, ZoneOffset.UTC, null).apply("1000500").toInstant().toEpochMilli(), - equalTo(1000500L)); + assertThat( + DateFormat.UnixMs.getFunction(null, ZoneOffset.UTC, null).apply("1000500").toInstant().toEpochMilli(), + equalTo(1000500L) + ); } public void testParseUnix() { - assertThat(DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null).apply("1000.5").toInstant().toEpochMilli(), - equalTo(1000500L)); + assertThat(DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null).apply("1000.5").toInstant().toEpochMilli(), equalTo(1000500L)); } public void testParseUnixWithMsPrecision() { - assertThat(DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null).apply("1495718015").toInstant().toEpochMilli(), - equalTo(1495718015000L)); + assertThat( + DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null).apply("1495718015").toInstant().toEpochMilli(), + equalTo(1495718015000L) + ); } public void testParseISO8601() { - assertThat(DateFormat.Iso8601.getFunction(null, ZoneOffset.UTC, null).apply("2001-01-01T00:00:00-0800") - .toInstant().toEpochMilli(), equalTo(978336000000L)); - assertThat(DateFormat.Iso8601.getFunction(null, ZoneOffset.UTC, null).apply("2001-01-01T00:00:00-0800").toString(), - equalTo("2001-01-01T08:00Z")); + assertThat( + DateFormat.Iso8601.getFunction(null, ZoneOffset.UTC, null).apply("2001-01-01T00:00:00-0800").toInstant().toEpochMilli(), + equalTo(978336000000L) + ); + assertThat( + DateFormat.Iso8601.getFunction(null, ZoneOffset.UTC, null).apply("2001-01-01T00:00:00-0800").toString(), + equalTo("2001-01-01T08:00Z") + ); } public void testParseWhenZoneNotPresentInText() { - assertThat(DateFormat.Iso8601.getFunction(null, ZoneOffset.of("+0100"), null).apply("2001-01-01T00:00:00") - .toInstant().toEpochMilli(), equalTo(978303600000L)); - assertThat(DateFormat.Iso8601.getFunction(null, ZoneOffset.of("+0100"), null).apply("2001-01-01T00:00:00").toString(), - equalTo("2001-01-01T00:00+01:00")); + assertThat( + DateFormat.Iso8601.getFunction(null, ZoneOffset.of("+0100"), null).apply("2001-01-01T00:00:00").toInstant().toEpochMilli(), + equalTo(978303600000L) + ); + assertThat( + DateFormat.Iso8601.getFunction(null, ZoneOffset.of("+0100"), null).apply("2001-01-01T00:00:00").toString(), + equalTo("2001-01-01T00:00+01:00") + ); } public void testParseISO8601Failure() { @@ -140,16 +165,18 @@ public class DateFormatTests extends OpenSearchTestCase { try { function.apply("2001-01-0:00-0800"); fail("parse should have failed"); - } catch(IllegalArgumentException e) { - //all good + } catch (IllegalArgumentException e) { + // all good } } public void testTAI64NParse() { String input = "4000000050d506482dbdf024"; String expected = "2012-12-22T03:00:46.767+02:00"; - assertThat(DateFormat.Tai64n.getFunction(null, ZoneOffset.ofHours(2), null) - .apply((randomBoolean() ? "@" : "") + input).toString(), equalTo(expected)); + assertThat( + DateFormat.Tai64n.getFunction(null, ZoneOffset.ofHours(2), null).apply((randomBoolean() ? "@" : "") + input).toString(), + equalTo(expected) + ); } public void testFromString() { diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java index d6e4bf3810b..820ef3a8ee9 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateIndexNameProcessorTests.java @@ -50,45 +50,96 @@ public class DateIndexNameProcessorTests extends OpenSearchTestCase { public void testJavaPattern() throws Exception { Function function = DateFormat.Java.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSXX", ZoneOffset.UTC, Locale.ROOT); - DateIndexNameProcessor processor = createProcessor("_field", Collections.singletonList(function), - ZoneOffset.UTC, "events-", "y", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z")); + DateIndexNameProcessor processor = createProcessor( + "_field", + Collections.singletonList(function), + ZoneOffset.UTC, + "events-", + "y", + "yyyyMMdd" + ); + IngestDocument document = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z") + ); processor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } - public void testTAI64N()throws Exception { + public void testTAI64N() throws Exception { Function function = DateFormat.Tai64n.getFunction(null, ZoneOffset.UTC, null); - DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function), - ZoneOffset.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024")); + DateIndexNameProcessor dateProcessor = createProcessor( + "_field", + Collections.singletonList(function), + ZoneOffset.UTC, + "events-", + "m", + "yyyyMMdd" + ); + IngestDocument document = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024") + ); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } - public void testUnixMs()throws Exception { + public void testUnixMs() throws Exception { Function function = DateFormat.UnixMs.getFunction(null, ZoneOffset.UTC, null); - DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function), - ZoneOffset.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("_field", "1000500")); + DateIndexNameProcessor dateProcessor = createProcessor( + "_field", + Collections.singletonList(function), + ZoneOffset.UTC, + "events-", + "m", + "yyyyMMdd" + ); + IngestDocument document = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("_field", "1000500") + ); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); - document = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("_field", 1000500L)); + document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", 1000500L)); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } - public void testUnix()throws Exception { + public void testUnix() throws Exception { Function function = DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null); - DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function), - ZoneOffset.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("_field", "1000.5")); + DateIndexNameProcessor dateProcessor = createProcessor( + "_field", + Collections.singletonList(function), + ZoneOffset.UTC, + "events-", + "m", + "yyyyMMdd" + ); + IngestDocument document = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("_field", "1000.5") + ); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } @@ -100,23 +151,48 @@ public class DateIndexNameProcessorTests extends OpenSearchTestCase { String date = Integer.toString(randomInt()); Function dateTimeFunction = DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null); - DateIndexNameProcessor dateProcessor = createProcessor("_field", - Collections.singletonList(dateTimeFunction), ZoneOffset.UTC, indexNamePrefix, - dateRounding, indexNameFormat); + DateIndexNameProcessor dateProcessor = createProcessor( + "_field", + Collections.singletonList(dateTimeFunction), + ZoneOffset.UTC, + indexNamePrefix, + dateRounding, + indexNameFormat + ); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("_field", date)); + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", date)); dateProcessor.execute(document); - assertThat(document.getSourceAndMetadata().get("_index"), - equalTo("<"+indexNamePrefix+"{" + DateFormatter.forPattern(indexNameFormat) - .format(dateTimeFunction.apply(date))+"||/"+dateRounding+"{"+indexNameFormat+"|UTC}}>")); + assertThat( + document.getSourceAndMetadata().get("_index"), + equalTo( + "<" + + indexNamePrefix + + "{" + + DateFormatter.forPattern(indexNameFormat).format(dateTimeFunction.apply(date)) + + "||/" + + dateRounding + + "{" + + indexNameFormat + + "|UTC}}>" + ) + ); } - private DateIndexNameProcessor createProcessor(String field, List> dateFormats, - ZoneId timezone, String indexNamePrefix, String dateRounding, - String indexNameFormat) { - return new DateIndexNameProcessor(randomAlphaOfLength(10), null, field, dateFormats, timezone, + private DateIndexNameProcessor createProcessor( + String field, + List> dateFormats, + ZoneId timezone, + String indexNamePrefix, + String dateRounding, + String indexNameFormat + ) { + return new DateIndexNameProcessor( + randomAlphaOfLength(10), + null, + field, + dateFormats, + timezone, new TestTemplateService.MockTemplateScript.Factory(indexNamePrefix), new TestTemplateService.MockTemplateScript.Factory(dateRounding), new TestTemplateService.MockTemplateScript.Factory(indexNameFormat) diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorFactoryTests.java index c004409ff47..a19fc27c277 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorFactoryTests.java @@ -80,7 +80,7 @@ public class DateProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("processor creation should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), containsString("[field] required property is missing")); } } @@ -95,7 +95,7 @@ public class DateProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("processor creation should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), containsString("[formats] required property is missing")); } } @@ -143,7 +143,7 @@ public class DateProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("processor creation should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), containsString("[formats] property isn't a list, but of type [java.lang.String]")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorTests.java index 2699e49d598..2f4df76953d 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateProcessorTests.java @@ -64,9 +64,15 @@ public class DateProcessorTests extends OpenSearchTestCase { } public void testJavaPattern() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - null, templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH), - "date_as_string", Collections.singletonList("yyyy dd MM HH:mm:ss"), "date_as_date"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneId.of("Europe/Amsterdam")), + templatize(Locale.ENGLISH), + "date_as_string", + Collections.singletonList("yyyy dd MM HH:mm:ss"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06 11:05:15"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -79,9 +85,15 @@ public class DateProcessorTests extends OpenSearchTestCase { matchFormats.add("yyyy dd MM"); matchFormats.add("dd/MM/yyyy"); matchFormats.add("dd-MM-yyyy"); - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - null, templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH), - "date_as_string", matchFormats, "date_as_date"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneId.of("Europe/Amsterdam")), + templatize(Locale.ENGLISH), + "date_as_string", + matchFormats, + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06"); @@ -107,15 +119,21 @@ public class DateProcessorTests extends OpenSearchTestCase { try { dateProcessor.execute(ingestDocument); fail("processor should have failed due to not supported date format"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("unable to parse date [2010]")); } } public void testJavaPatternNoTimezone() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - null, null, null, - "date_as_string", Arrays.asList("yyyy dd MM HH:mm:ss XXX"), "date_as_date"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + null, + null, + "date_as_string", + Arrays.asList("yyyy dd MM HH:mm:ss XXX"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06 00:00:00 -02:00"); @@ -126,14 +144,20 @@ public class DateProcessorTests extends OpenSearchTestCase { public void testInvalidJavaPattern() { try { - DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), - null, templatize(ZoneOffset.UTC), templatize(randomLocale(random())), - "date_as_string", Collections.singletonList("invalid pattern"), "date_as_date"); + DateProcessor processor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneOffset.UTC), + templatize(randomLocale(random())), + "date_as_string", + Collections.singletonList("invalid pattern"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010"); processor.execute(RandomDocumentPicks.randomIngestDocument(random(), document)); fail("date processor execution should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("unable to parse date [2010]")); assertThat(e.getCause().getMessage(), equalTo("Invalid format: [invalid pattern]: Unknown pattern letter: i")); } @@ -141,9 +165,15 @@ public class DateProcessorTests extends OpenSearchTestCase { public void testJavaPatternLocale() { assumeFalse("Can't run in a FIPS JVM, Joda parse date error", inFipsJvm()); - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - null, templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ITALIAN), - "date_as_string", Collections.singletonList("yyyy dd MMMM"), "date_as_date"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneId.of("Europe/Amsterdam")), + templatize(Locale.ITALIAN), + "date_as_string", + Collections.singletonList("yyyy dd MMMM"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 giugno"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -153,9 +183,15 @@ public class DateProcessorTests extends OpenSearchTestCase { public void testJavaPatternEnglishLocale() { // Since testJavaPatternLocale is muted in FIPS mode, test that we can correctly parse dates in english - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - null, templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH), - "date_as_string", Collections.singletonList("yyyy dd MMMM"), "date_as_date"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneId.of("Europe/Amsterdam")), + templatize(Locale.ENGLISH), + "date_as_string", + Collections.singletonList("yyyy dd MMMM"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 June"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -165,21 +201,35 @@ public class DateProcessorTests extends OpenSearchTestCase { public void testJavaPatternDefaultYear() { String format = randomFrom("dd/MM", "8dd/MM"); - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - null, templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH), - "date_as_string", Collections.singletonList(format), "date_as_date"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneId.of("Europe/Amsterdam")), + templatize(Locale.ENGLISH), + "date_as_string", + Collections.singletonList(format), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "12/06"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); - assertThat(ingestDocument.getFieldValue("date_as_date", String.class), - equalTo(ZonedDateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); + assertThat( + ingestDocument.getFieldValue("date_as_date", String.class), + equalTo(ZonedDateTime.now().getYear() + "-06-12T00:00:00.000+02:00") + ); } public void testTAI64N() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), null, templatize(ZoneOffset.ofHours(2)), + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneOffset.ofHours(2)), templatize(randomLocale(random())), - "date_as_string", Collections.singletonList("TAI64N"), "date_as_date"); + "date_as_string", + Collections.singletonList("TAI64N"), + "date_as_date" + ); Map document = new HashMap<>(); String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; document.put("date_as_string", dateAsString); @@ -189,8 +239,15 @@ public class DateProcessorTests extends OpenSearchTestCase { } public void testUnixMs() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), null, templatize(ZoneOffset.UTC), - templatize(randomLocale(random())), "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneOffset.UTC), + templatize(randomLocale(random())), + "date_as_string", + Collections.singletonList("UNIX_MS"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -205,9 +262,15 @@ public class DateProcessorTests extends OpenSearchTestCase { } public void testUnix() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), null, templatize(ZoneOffset.UTC), + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneOffset.UTC), templatize(randomLocale(random())), - "date_as_string", Collections.singletonList("UNIX"), "date_as_date"); + "date_as_string", + Collections.singletonList("UNIX"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "1000.5"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -216,33 +279,57 @@ public class DateProcessorTests extends OpenSearchTestCase { } public void testInvalidTimezone() { - DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), - null, new TestTemplateService.MockTemplateScript.Factory("invalid_timezone"), templatize(randomLocale(random())), - "date_as_string", Collections.singletonList("yyyy"), "date_as_date"); + DateProcessor processor = new DateProcessor( + randomAlphaOfLength(10), + null, + new TestTemplateService.MockTemplateScript.Factory("invalid_timezone"), + templatize(randomLocale(random())), + "date_as_string", + Collections.singletonList("yyyy"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> processor.execute(RandomDocumentPicks.randomIngestDocument(random(), document))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> processor.execute(RandomDocumentPicks.randomIngestDocument(random(), document)) + ); assertThat(e.getMessage(), equalTo("unable to parse date [2010]")); assertThat(e.getCause().getMessage(), equalTo("Unknown time-zone ID: invalid_timezone")); } public void testInvalidLocale() { - DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), - null, templatize(ZoneOffset.UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"), - "date_as_string", Collections.singletonList("yyyy"), "date_as_date"); + DateProcessor processor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneOffset.UTC), + new TestTemplateService.MockTemplateScript.Factory("invalid_locale"), + "date_as_string", + Collections.singletonList("yyyy"), + "date_as_date" + ); Map document = new HashMap<>(); document.put("date_as_string", "2010"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> processor.execute(RandomDocumentPicks.randomIngestDocument(random(), document))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> processor.execute(RandomDocumentPicks.randomIngestDocument(random(), document)) + ); assertThat(e.getMessage(), equalTo("unable to parse date [2010]")); assertThat(e.getCause().getMessage(), equalTo("Unknown language: invalid")); } public void testOutputFormat() { long nanosAfterEpoch = randomLongBetween(1, 999999); - DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), null, null, null, - "date_as_string", Collections.singletonList("iso8601"), "date_as_date", "HH:mm:ss.SSSSSSSSS"); + DateProcessor processor = new DateProcessor( + randomAlphaOfLength(10), + null, + null, + null, + "date_as_string", + Collections.singletonList("iso8601"), + "date_as_date", + "HH:mm:ss.SSSSSSSSS" + ); Map document = new HashMap<>(); document.put("date_as_string", Instant.EPOCH.plusNanos(nanosAfterEpoch).toString()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java index 08e042a4ab9..6f44b81e7b4 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DissectProcessorTests.java @@ -53,8 +53,15 @@ import static org.hamcrest.Matchers.equalTo; public class DissectProcessorTests extends OpenSearchTestCase { public void testMatch() { - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("message", "foo,bar,baz")); + IngestDocument ingestDocument = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("message", "foo,bar,baz") + ); DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "%{a},%{b},%{c}", "", true); dissectProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("foo")); @@ -63,11 +70,15 @@ public class DissectProcessorTests extends OpenSearchTestCase { } public void testMatchOverwrite() { - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, - MapBuilder.newMapBuilder() - .put("message", "foo,bar,baz") - .put("a", "willgetstompped") - .map()); + IngestDocument ingestDocument = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + MapBuilder.newMapBuilder().put("message", "foo,bar,baz").put("a", "willgetstompped").map() + ); assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("willgetstompped")); DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "%{a},%{b},%{c}", "", true); dissectProcessor.execute(ingestDocument); @@ -77,10 +88,23 @@ public class DissectProcessorTests extends OpenSearchTestCase { } public void testAdvancedMatch() { - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("message", "foo bar,,,,,,,baz nope:notagain 😊 🐇 🙃")); - DissectProcessor dissectProcessor = - new DissectProcessor("", null, "message", "%{a->} %{*b->},%{&b} %{}:%{?skipme} %{+smile/2} 🐇 %{+smile/1}", "::::", true); + IngestDocument ingestDocument = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("message", "foo bar,,,,,,,baz nope:notagain 😊 🐇 🙃") + ); + DissectProcessor dissectProcessor = new DissectProcessor( + "", + null, + "message", + "%{a->} %{*b->},%{&b} %{}:%{?skipme} %{+smile/2} 🐇 %{+smile/1}", + "::::", + true + ); dissectProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("a", String.class), equalTo("foo")); assertThat(ingestDocument.getFieldValue("bar", String.class), equalTo("baz")); @@ -90,8 +114,15 @@ public class DissectProcessorTests extends OpenSearchTestCase { } public void testMiss() { - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, - Collections.singletonMap("message", "foo:bar,baz")); + IngestDocument ingestDocument = new IngestDocument( + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("message", "foo:bar,baz") + ); DissectProcessor dissectProcessor = new DissectProcessor("", null, "message", "%{a},%{b},%{c}", "", true); DissectException e = expectThrows(DissectException.class, () -> dissectProcessor.execute(ingestDocument)); assertThat(e.getMessage(), CoreMatchers.containsString("Unable to find match for dissect pattern")); @@ -109,8 +140,10 @@ public class DissectProcessorTests extends OpenSearchTestCase { public void testNullValueWithIgnoreMissing() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = new DissectProcessor("", null, fieldName, "%{a},%{b},%{c}", "", true); - IngestDocument originalIngestDocument = RandomDocumentPicks - .randomIngestDocument(random(), Collections.singletonMap(fieldName, null)); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap(fieldName, null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); @@ -119,8 +152,10 @@ public class DissectProcessorTests extends OpenSearchTestCase { public void testNullValueWithOutIgnoreMissing() { String fieldName = RandomDocumentPicks.randomFieldName(random()); Processor processor = new DissectProcessor("", null, fieldName, "%{a},%{b},%{c}", "", false); - IngestDocument originalIngestDocument = RandomDocumentPicks - .randomIngestDocument(random(), Collections.singletonMap(fieldName, null)); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap(fieldName, null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorFactoryTests.java index 465107345e0..90a472b6ba4 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorFactoryTests.java @@ -63,7 +63,7 @@ public class DotExpanderProcessorFactoryTests extends OpenSearchTestCase { public void testValidFields() throws Exception { DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory(); - String[] fields = new String[] {"a.b", "a.b.c", "a.b.c.d", "ab.cd"}; + String[] fields = new String[] { "a.b", "a.b.c", "a.b.c.d", "ab.cd" }; for (String field : fields) { Map config = new HashMap<>(); config.put("field", field); @@ -85,7 +85,7 @@ public class DotExpanderProcessorFactoryTests extends OpenSearchTestCase { public void testCreate_invalidFields() throws Exception { DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory(); - String[] fields = new String[] {"a", "abc"}; + String[] fields = new String[] { "a", "abc" }; for (String field : fields) { Map config = new HashMap<>(); config.put("field", field); @@ -93,7 +93,7 @@ public class DotExpanderProcessorFactoryTests extends OpenSearchTestCase { assertThat(e.getMessage(), equalTo("[field] field does not contain a dot")); } - fields = new String[] {".a", "a.", "."}; + fields = new String[] { ".a", "a.", "." }; for (String field : fields) { Map config = new HashMap<>(); config.put("field", field); @@ -101,7 +101,7 @@ public class DotExpanderProcessorFactoryTests extends OpenSearchTestCase { assertThat(e.getMessage(), equalTo("[field] Field can't start or end with a dot")); } - fields = new String[] {"a..b", "a...b", "a.b..c", "abc.def..hij"}; + fields = new String[] { "a..b", "a...b", "a.b..c", "abc.def..hij" }; for (String field : fields) { Map config = new HashMap<>(); config.put("field", field); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorTests.java index 23c057799b8..cd912269a59 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DotExpanderProcessorTests.java @@ -100,8 +100,13 @@ public class DotExpanderProcessorTests extends OpenSearchTestCase { // so because foo is no branch field but a value field the `foo.bar` field can't be expanded // into [foo].[bar], so foo should be renamed first into `[foo].[bar]: IngestDocument document = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor("_tag", null, new TestTemplateService.MockTemplateScript.Factory("foo"), - new TestTemplateService.MockTemplateScript.Factory("foo.bar"), false); + Processor processor = new RenameProcessor( + "_tag", + null, + new TestTemplateService.MockTemplateScript.Factory("foo"), + new TestTemplateService.MockTemplateScript.Factory("foo.bar"), + false + ); processor.execute(document); processor = new DotExpanderProcessor("_tag", null, null, "foo.bar"); processor.execute(document); @@ -156,36 +161,35 @@ public class DotExpanderProcessorTests extends OpenSearchTestCase { assertThat(document.getFieldValue("field.foo.bar.baz", String.class), equalTo("value")); } - public void testEscapeFields_doNothingIfFieldNotInSourceDoc() throws Exception { - //asking to expand a (literal) field that is not present in the source document + // asking to expand a (literal) field that is not present in the source document Map source = new HashMap<>(); source.put("foo.bar", "baz1"); IngestDocument document = new IngestDocument(source, Collections.emptyMap()); - //abc.def does not exist in source, so don't mutate document + // abc.def does not exist in source, so don't mutate document DotExpanderProcessor processor = new DotExpanderProcessor("_tag", null, null, "abc.def"); processor.execute(document); - //hasField returns false since it requires the expanded form, which is not expanded since we did not ask for it to be + // hasField returns false since it requires the expanded form, which is not expanded since we did not ask for it to be assertFalse(document.hasField("foo.bar")); - //nothing has changed + // nothing has changed assertEquals(document.getSourceAndMetadata().get("foo.bar"), "baz1"); - //abc.def is not found anywhere + // abc.def is not found anywhere assertFalse(document.hasField("abc.def")); assertFalse(document.getSourceAndMetadata().containsKey("abc")); assertFalse(document.getSourceAndMetadata().containsKey("abc.def")); - //asking to expand a (literal) field that does not exist, but the nested field does exist + // asking to expand a (literal) field that does not exist, but the nested field does exist source = new HashMap<>(); Map inner = new HashMap<>(); inner.put("bar", "baz1"); source.put("foo", inner); document = new IngestDocument(source, Collections.emptyMap()); - //foo.bar, the literal value (as opposed to nested value) does not exist in source, so don't mutate document + // foo.bar, the literal value (as opposed to nested value) does not exist in source, so don't mutate document processor = new DotExpanderProcessor("_tag", null, null, "foo.bar"); processor.execute(document); - //hasField returns true because the nested/expanded form exists in the source document + // hasField returns true because the nested/expanded form exists in the source document assertTrue(document.hasField("foo.bar")); - //nothing changed + // nothing changed assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1)); assertThat(document.getFieldValue("foo.bar", String.class), equalTo("baz1")); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorFactoryTests.java index e3fcc712b5f..6ba7a063f54 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorFactoryTests.java @@ -67,7 +67,7 @@ public class FailProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[message] required property is missing")); } } @@ -77,8 +77,7 @@ public class FailProcessorFactoryTests extends OpenSearchTestCase { Map config = new HashMap<>(); config.put("message", "{{error}}"); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, - null, config)); + OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, null, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorTests.java index ec45edd548a..e4fca7870fd 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FailProcessorTests.java @@ -45,8 +45,7 @@ public class FailProcessorTests extends OpenSearchTestCase { public void test() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String message = randomAlphaOfLength(10); - Processor processor = new FailProcessor(randomAlphaOfLength(10), - null, new TestTemplateService.MockTemplateScript.Factory(message)); + Processor processor = new FailProcessor(randomAlphaOfLength(10), null, new TestTemplateService.MockTemplateScript.Factory(message)); try { processor.execute(ingestDocument); fail("fail processor should throw an exception"); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorFactoryTests.java index 7268c5355c5..996e379e6e2 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorFactoryTests.java @@ -53,7 +53,7 @@ public class ForEachProcessorFactoryTests extends OpenSearchTestCase { private final Consumer genericExecutor = Runnable::run; public void testCreate() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> { }); + Processor processor = new TestProcessor(ingestDocument -> {}); Map registry = new HashMap<>(); registry.put("_name", (r, t, description, c) -> processor); ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService); @@ -69,7 +69,7 @@ public class ForEachProcessorFactoryTests extends OpenSearchTestCase { } public void testSetIgnoreMissing() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> { }); + Processor processor = new TestProcessor(ingestDocument -> {}); Map registry = new HashMap<>(); registry.put("_name", (r, t, description, c) -> processor); ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService); @@ -86,7 +86,7 @@ public class ForEachProcessorFactoryTests extends OpenSearchTestCase { } public void testCreateWithTooManyProcessorTypes() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> { }); + Processor processor = new TestProcessor(ingestDocument -> {}); Map registry = new HashMap<>(); registry.put("_first", (r, t, description, c) -> processor); registry.put("_second", (r, t, description, c) -> processor); @@ -107,13 +107,15 @@ public class ForEachProcessorFactoryTests extends OpenSearchTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("processor", Collections.singletonMap("_name", Collections.emptyMap())); - Exception expectedException = expectThrows(OpenSearchParseException.class, - () -> forEachFactory.create(Collections.emptyMap(), null, null, config)); + Exception expectedException = expectThrows( + OpenSearchParseException.class, + () -> forEachFactory.create(Collections.emptyMap(), null, null, config) + ); assertThat(expectedException.getMessage(), equalTo("No processor type exists with name [_name]")); } public void testCreateWithMissingField() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> { }); + Processor processor = new TestProcessor(ingestDocument -> {}); Map registry = new HashMap<>(); registry.put("_name", (r, t, description, c) -> processor); ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(scriptService); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java index 02452486d9e..f0c61700f4d 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ForEachProcessorTests.java @@ -62,13 +62,17 @@ public class ForEachProcessorTests extends OpenSearchTestCase { values.add("bar"); values.add("baz"); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values) + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("values", values) ); - ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", new AsyncUpperCaseProcessor("_ingest._value"), - false); - processor.execute(ingestDocument, (result, e) -> { - }); + ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", new AsyncUpperCaseProcessor("_ingest._value"), false); + processor.execute(ingestDocument, (result, e) -> {}); assertBusy(() -> { @SuppressWarnings("unchecked") @@ -82,7 +86,13 @@ public class ForEachProcessorTests extends OpenSearchTestCase { public void testExecuteWithFailure() throws Exception { IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, Collections.singletonMap("values", Arrays.asList("a", "b", "c")) + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("values", Arrays.asList("a", "b", "c")) ); TestProcessor testProcessor = new TestProcessor(id -> { @@ -92,7 +102,7 @@ public class ForEachProcessorTests extends OpenSearchTestCase { }); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", testProcessor, false); Exception[] exceptions = new Exception[1]; - processor.execute(ingestDocument, (result, e) -> {exceptions[0] = e;}); + processor.execute(ingestDocument, (result, e) -> { exceptions[0] = e; }); assertThat(exceptions[0].getMessage(), equalTo("failure")); assertThat(testProcessor.getInvokedCounter(), equalTo(3)); assertThat(ingestDocument.getFieldValue("values", List.class), equalTo(Arrays.asList("a", "b", "c"))); @@ -107,7 +117,10 @@ public class ForEachProcessorTests extends OpenSearchTestCase { }); Processor onFailureProcessor = new TestProcessor(ingestDocument1 -> {}); processor = new ForEachProcessor( - "_tag", null, "values", new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)), + "_tag", + null, + "values", + new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)), false ); processor.execute(ingestDocument, (result, e) -> {}); @@ -120,7 +133,13 @@ public class ForEachProcessorTests extends OpenSearchTestCase { values.add(new HashMap<>()); values.add(new HashMap<>()); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values) + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("values", values) ); TestProcessor innerProcessor = new TestProcessor(id -> { @@ -154,9 +173,17 @@ public class ForEachProcessorTests extends OpenSearchTestCase { IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, document); ForEachProcessor processor = new ForEachProcessor( - "_tag", null, "values", new SetProcessor("_tag", - null, new TestTemplateService.MockTemplateScript.Factory("_ingest._value.new_field"), - (model) -> model.get("other")), false); + "_tag", + null, + "values", + new SetProcessor( + "_tag", + null, + new TestTemplateService.MockTemplateScript.Factory("_ingest._value.new_field"), + (model) -> model.get("other") + ), + false + ); processor.execute(ingestDocument, (result, e) -> {}); assertThat(ingestDocument.getFieldValue("values.0.new_field", String.class), equalTo("value")); @@ -168,22 +195,22 @@ public class ForEachProcessorTests extends OpenSearchTestCase { public void testRandom() { Processor innerProcessor = new Processor() { - @Override - public IngestDocument execute(IngestDocument ingestDocument) { - String existingValue = ingestDocument.getFieldValue("_ingest._value", String.class); - ingestDocument.setFieldValue("_ingest._value", existingValue + "."); - return ingestDocument; - } + @Override + public IngestDocument execute(IngestDocument ingestDocument) { + String existingValue = ingestDocument.getFieldValue("_ingest._value", String.class); + ingestDocument.setFieldValue("_ingest._value", existingValue + "."); + return ingestDocument; + } - @Override - public String getType() { - return null; - } + @Override + public String getType() { + return null; + } - @Override - public String getTag() { - return null; - } + @Override + public String getTag() { + return null; + } @Override public String getDescription() { @@ -191,10 +218,16 @@ public class ForEachProcessorTests extends OpenSearchTestCase { } }; int numValues = randomIntBetween(1, 10000); - List values = IntStream.range(0, numValues).mapToObj(i->"").collect(Collectors.toList()); + List values = IntStream.range(0, numValues).mapToObj(i -> "").collect(Collectors.toList()); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values) + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("values", values) ); ForEachProcessor processor = new ForEachProcessor("_tag", null, "values", innerProcessor, false); @@ -212,18 +245,32 @@ public class ForEachProcessorTests extends OpenSearchTestCase { values.add(1); values.add(null); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values) + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("values", values) ); TemplateScript.Factory template = new TestTemplateService.MockTemplateScript.Factory("errors"); ForEachProcessor processor = new ForEachProcessor( - "_tag", null, "values", new CompoundProcessor(false, + "_tag", + null, + "values", + new CompoundProcessor( + false, org.opensearch.common.collect.List.of( - new UppercaseProcessor("_tag_upper", null, "_ingest._value", false, "_ingest._value")), + new UppercaseProcessor("_tag_upper", null, "_ingest._value", false, "_ingest._value") + ), org.opensearch.common.collect.List.of( - new AppendProcessor("_tag", null, template, (model) -> (Collections.singletonList("added")), true)) - ), false); + new AppendProcessor("_tag", null, template, (model) -> (Collections.singletonList("added")), true) + ) + ), + false + ); processor.execute(ingestDocument, (result, e) -> {}); List result = ingestDocument.getFieldValue("values", List.class); @@ -243,12 +290,11 @@ public class ForEachProcessorTests extends OpenSearchTestCase { Map source = new HashMap<>(); source.put("_value", "new_value"); source.put("values", values); - IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, source - ); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, source); - TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value", - doc.getFieldValue("_source._value", String.class))); + TestProcessor processor = new TestProcessor( + doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_source._value", String.class)) + ); ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", null, "values", processor, false); forEachProcessor.execute(ingestDocument, (result, e) -> {}); @@ -275,15 +321,25 @@ public class ForEachProcessorTests extends OpenSearchTestCase { values.add(value); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, Collections.singletonMap("values1", values) + "_index", + "_type", + "_id", + null, + null, + null, + Collections.singletonMap("values1", values) ); TestProcessor testProcessor = new TestProcessor( - doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH)) + doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH)) ); ForEachProcessor processor = new ForEachProcessor( - "_tag", null, "values1", new ForEachProcessor("_tag", null, "_ingest._value.values2", testProcessor, false), - false); + "_tag", + null, + "values1", + new ForEachProcessor("_tag", null, "_ingest._value.values2", testProcessor, false), + false + ); processor.execute(ingestDocument, (result, e) -> {}); List result = ingestDocument.getFieldValue("values1.0.values2", List.class); @@ -296,9 +352,7 @@ public class ForEachProcessorTests extends OpenSearchTestCase { } public void testIgnoreMissing() throws Exception { - IngestDocument originalIngestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, Collections.emptyMap() - ); + IngestDocument originalIngestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); TestProcessor testProcessor = new TestProcessor(doc -> {}); ForEachProcessor processor = new ForEachProcessor("_tag", null, "_ingest._value", testProcessor, true); @@ -311,7 +365,7 @@ public class ForEachProcessorTests extends OpenSearchTestCase { Map source = Collections.singletonMap("field", Arrays.asList("a", "b")); IngestDocument originalIngestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, source); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - TestProcessor testProcessor = new TestProcessor(id->id.appendFieldValue("field", "a")); + TestProcessor testProcessor = new TestProcessor(id -> id.appendFieldValue("field", "a")); ForEachProcessor processor = new ForEachProcessor("_tag", null, "field", testProcessor, true); processor.execute(ingestDocument, (result, e) -> {}); assertThat(testProcessor.getInvokedCounter(), equalTo(2)); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorFactoryTests.java index 7de69a6e6d0..397ffeb4e64 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorFactoryTests.java @@ -129,7 +129,9 @@ public class GrokProcessorFactoryTests extends OpenSearchTestCase { config.put("patterns", Collections.singletonList("%{MY_PATTERN:name}!")); config.put("pattern_definitions", Collections.singletonMap("MY_PATTERN", "[")); OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config)); - assertThat(e.getMessage(), - equalTo("[patterns] Invalid regex pattern found in: [%{MY_PATTERN:name}!]. premature end of char-class")); + assertThat( + e.getMessage(), + equalTo("[patterns] Invalid regex pattern found in: [%{MY_PATTERN:name}!]. premature end of char-class") + ); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorGetActionTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorGetActionTests.java index 5fba59819ad..5ccf85dd53f 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorGetActionTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorGetActionTests.java @@ -57,8 +57,7 @@ import static org.hamcrest.core.IsNull.nullValue; import static org.mockito.Mockito.mock; public class GrokProcessorGetActionTests extends OpenSearchTestCase { - private static final Map TEST_PATTERNS = - org.opensearch.common.collect.Map.of("PATTERN2", "foo2", "PATTERN1", "foo1"); + private static final Map TEST_PATTERNS = org.opensearch.common.collect.Map.of("PATTERN2", "foo2", "PATTERN1", "foo1"); public void testRequest() throws Exception { GrokProcessorGetAction.Request request = new GrokProcessorGetAction.Request(false); @@ -82,8 +81,11 @@ public class GrokProcessorGetActionTests extends OpenSearchTestCase { public void testResponseSorting() { List sortedKeys = new ArrayList<>(TEST_PATTERNS.keySet()); Collections.sort(sortedKeys); - GrokProcessorGetAction.TransportAction transportAction = - new GrokProcessorGetAction.TransportAction(mock(TransportService.class), mock(ActionFilters.class), TEST_PATTERNS); + GrokProcessorGetAction.TransportAction transportAction = new GrokProcessorGetAction.TransportAction( + mock(TransportService.class), + mock(ActionFilters.class), + TEST_PATTERNS + ); GrokProcessorGetAction.Response[] receivedResponse = new GrokProcessorGetAction.Response[1]; transportAction.doExecute(null, new GrokProcessorGetAction.Request(true), new ActionListener() { @Override diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorTests.java index a71b0e829a2..1ed6feeb714 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GrokProcessorTests.java @@ -45,15 +45,22 @@ import java.util.Map; import static org.opensearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.equalTo; - public class GrokProcessorTests extends OpenSearchTestCase { public void testMatch() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "1"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc.getFieldValue("one", String.class), equalTo("1")); } @@ -62,8 +69,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "A"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.emptyMap(), - Collections.singletonList("(?(?i)A)"), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.emptyMap(), + Collections.singletonList("(?(?i)A)"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc.getFieldValue("a", String.class), equalTo("A")); } @@ -72,8 +87,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "23"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("Provided Grok expressions do not match field value: [23]")); } @@ -82,9 +105,19 @@ public class GrokProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "23"); - Exception e = expectThrows(IllegalArgumentException.class, () -> new GrokProcessor(randomAlphaOfLength(10), - null, Collections.singletonMap("ONE", "1"), Collections.singletonList("%{NOTONE:not_one}"), fieldName, - false, false, MatcherWatchdog.noop())); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{NOTONE:not_one}"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ) + ); assertThat(e.getMessage(), equalTo("Unable to find pattern [NOTONE] in Grok's pattern dictionary")); } @@ -93,8 +126,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { IngestDocument originalDoc = new IngestDocument(new HashMap<>(), new HashMap<>()); originalDoc.setFieldValue(fieldName, fieldName); IngestDocument doc = new IngestDocument(originalDoc); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.emptyMap(), - Collections.singletonList(fieldName), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.emptyMap(), + Collections.singletonList(fieldName), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc, equalTo(originalDoc)); } @@ -103,8 +144,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, null); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot process it.")); } @@ -114,8 +163,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); originalIngestDocument.setFieldValue(fieldName, null); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, true, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + true, + MatcherWatchdog.noop() + ); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -124,8 +181,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, 1); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); } @@ -134,8 +199,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, 1); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, true, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + true, + MatcherWatchdog.noop() + ); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); } @@ -143,8 +216,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { public void testMissingField() { String fieldName = "foo.bar"; IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [foo] not present as part of path [foo.bar]")); } @@ -153,8 +234,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { String fieldName = "foo.bar"; IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, Collections.singletonMap("ONE", "1"), - Collections.singletonList("%{ONE:one}"), fieldName, false, true, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonMap("ONE", "1"), + Collections.singletonList("%{ONE:one}"), + fieldName, + false, + true, + MatcherWatchdog.noop() + ); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -167,8 +256,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { patternBank.put("ONE", "1"); patternBank.put("TWO", "2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, patternBank, - Arrays.asList("%{ONE:one}", "%{TWO:two}", "%{THREE:three}"), fieldName, false, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + patternBank, + Arrays.asList("%{ONE:one}", "%{TWO:two}", "%{THREE:three}"), + fieldName, + false, + false, + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc.hasField("one"), equalTo(false)); assertThat(doc.getFieldValue("two", String.class), equalTo("2")); @@ -183,8 +280,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { patternBank.put("ONE", "1"); patternBank.put("TWO", "2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, patternBank, - Arrays.asList("%{ONE:one}", "%{TWO:two}", "%{THREE:three}"), fieldName, true, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + patternBank, + Arrays.asList("%{ONE:one}", "%{TWO:two}", "%{THREE:three}"), + fieldName, + true, + false, + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc.hasField("one"), equalTo(false)); assertThat(doc.getFieldValue("two", String.class), equalTo("2")); @@ -198,8 +303,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { doc.setFieldValue(fieldName, "first1"); Map patternBank = new HashMap<>(); patternBank.put("ONE", "1"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, patternBank, - Arrays.asList("%{ONE:one}"), fieldName, true, false, MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + patternBank, + Arrays.asList("%{ONE:one}"), + fieldName, + true, + false, + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc.hasField("one"), equalTo(true)); assertThat(doc.getFieldValue("_ingest._grok_match_index", String.class), equalTo("0")); @@ -229,8 +342,16 @@ public class GrokProcessorTests extends OpenSearchTestCase { patternBank.put("ONE", "1"); patternBank.put("TWO", "2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, patternBank, Arrays.asList("%{ONE:first}-%{TWO:second}", - "%{ONE:first}-%{THREE:second}"), fieldName, randomBoolean(), randomBoolean(), MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + patternBank, + Arrays.asList("%{ONE:first}-%{TWO:second}", "%{ONE:first}-%{THREE:second}"), + fieldName, + randomBoolean(), + randomBoolean(), + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc.getFieldValue("first", String.class), equalTo("1")); assertThat(doc.getFieldValue("second", String.class), equalTo("3")); @@ -242,23 +363,37 @@ public class GrokProcessorTests extends OpenSearchTestCase { doc.setFieldValue(fieldName, "12"); Map patternBank = new HashMap<>(); patternBank.put("ONETWO", "1|2"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, patternBank, - Collections.singletonList("%{ONETWO:first}%{ONETWO:first}"), fieldName, randomBoolean(), randomBoolean(), - MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + patternBank, + Collections.singletonList("%{ONETWO:first}%{ONETWO:first}"), + fieldName, + randomBoolean(), + randomBoolean(), + MatcherWatchdog.noop() + ); processor.execute(doc); assertThat(doc.getFieldValue("first", String.class), equalTo("1")); } - public void testUnmatchedNamesNotIncludedInDocument() throws Exception { + public void testUnmatchedNamesNotIncludedInDocument() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "3"); Map patternBank = new HashMap<>(); patternBank.put("ONETWO", "1|2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), null, patternBank, - Collections.singletonList("%{ONETWO:first}|%{THREE:second}"), fieldName, randomBoolean(), randomBoolean(), - MatcherWatchdog.noop()); + GrokProcessor processor = new GrokProcessor( + randomAlphaOfLength(10), + null, + patternBank, + Collections.singletonList("%{ONETWO:first}|%{THREE:second}"), + fieldName, + randomBoolean(), + randomBoolean(), + MatcherWatchdog.noop() + ); processor.execute(doc); assertFalse(doc.hasField("first")); assertThat(doc.getFieldValue("second", String.class), equalTo("3")); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GsubProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GsubProcessorFactoryTests.java index 73d1ebf4a71..8b726c9473b 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GsubProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/GsubProcessorFactoryTests.java @@ -69,7 +69,7 @@ public class GsubProcessorFactoryTests extends AbstractStringProcessorFactoryTes try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[pattern] required property is missing")); } } @@ -82,7 +82,7 @@ public class GsubProcessorFactoryTests extends AbstractStringProcessorFactoryTes try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[replacement] required property is missing")); } } @@ -96,7 +96,7 @@ public class GsubProcessorFactoryTests extends AbstractStringProcessorFactoryTes try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), containsString("[pattern] Invalid regex pattern. Unclosed character class")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JoinProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JoinProcessorTests.java index 45ee4f02e02..03015050b7b 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JoinProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JoinProcessorTests.java @@ -47,7 +47,7 @@ import static org.hamcrest.Matchers.equalTo; public class JoinProcessorTests extends OpenSearchTestCase { - private static final String[] SEPARATORS = new String[]{"-", "_", "."}; + private static final String[] SEPARATORS = new String[] { "-", "_", "." }; public void testJoinStrings() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); @@ -96,7 +96,7 @@ public class JoinProcessorTests extends OpenSearchTestCase { Processor processor = new JoinProcessor(randomAlphaOfLength(10), null, fieldName, "-", fieldName); try { processor.execute(ingestDocument); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.String] cannot be cast to [java.util.List]")); } } @@ -107,7 +107,7 @@ public class JoinProcessorTests extends OpenSearchTestCase { Processor processor = new JoinProcessor(randomAlphaOfLength(10), null, fieldName, "-", fieldName); try { processor.execute(ingestDocument); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); } } @@ -117,7 +117,7 @@ public class JoinProcessorTests extends OpenSearchTestCase { Processor processor = new JoinProcessor(randomAlphaOfLength(10), null, "field", "-", "field"); try { processor.execute(ingestDocument); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [field] is null, cannot join.")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorFactoryTests.java index a26e5117c83..5843f9afdbc 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorFactoryTests.java @@ -85,8 +85,10 @@ public class JsonProcessorFactoryTests extends OpenSearchTestCase { public void testCreateWithMissingField() throws Exception { Map config = new HashMap<>(); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchParseException.class, - () -> FACTORY.create(null, processorTag, null, config)); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> FACTORY.create(null, processorTag, null, config) + ); assertThat(exception.getMessage(), equalTo("[field] required property is missing")); } @@ -97,8 +99,10 @@ public class JsonProcessorFactoryTests extends OpenSearchTestCase { config.put("field", randomField); config.put("target_field", randomTargetField); config.put("add_to_root", true); - OpenSearchException exception = expectThrows(OpenSearchParseException.class, - () -> FACTORY.create(null, randomAlphaOfLength(10), null, config)); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> FACTORY.create(null, randomAlphaOfLength(10), null, config) + ); assertThat(exception.getMessage(), equalTo("[target_field] Cannot set a target field while also setting `add_to_root` to true")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorTests.java index 285046ec457..9aad2f9c7ed 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/JsonProcessorTests.java @@ -77,8 +77,12 @@ public class JsonProcessorTests extends OpenSearchTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument)); - assertThat(exception.getCause().getMessage(), containsString("Unrecognized token 'blah': " + - "was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')")); + assertThat( + exception.getCause().getMessage(), + containsString( + "Unrecognized token 'blah': " + "was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')" + ) + ); } public void testByteArray() { @@ -90,9 +94,7 @@ public class JsonProcessorTests extends OpenSearchTestCase { Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument)); assertThat( exception.getCause().getMessage(), - containsString( - "Unrecognized token 'B': was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')" - ) + containsString("Unrecognized token 'B': was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')") ); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java index c7d41ddba49..62060a682c0 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorFactoryTests.java @@ -91,8 +91,10 @@ public class KeyValueProcessorFactoryTests extends OpenSearchTestCase { KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); Map config = new HashMap<>(); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchParseException.class, - () -> factory.create(null, processorTag, null, config)); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> factory.create(null, processorTag, null, config) + ); assertThat(exception.getMessage(), equalTo("[field] required property is missing")); } @@ -101,8 +103,10 @@ public class KeyValueProcessorFactoryTests extends OpenSearchTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchParseException.class, - () -> factory.create(null, processorTag, null, config)); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> factory.create(null, processorTag, null, config) + ); assertThat(exception.getMessage(), equalTo("[field_split] required property is missing")); } @@ -112,8 +116,10 @@ public class KeyValueProcessorFactoryTests extends OpenSearchTestCase { config.put("field", "field1"); config.put("field_split", "&"); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchParseException.class, - () -> factory.create(null, processorTag, null, config)); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> factory.create(null, processorTag, null, config) + ); assertThat(exception.getMessage(), equalTo("[value_split] required property is missing")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java index 748b590e97d..685a78e2e76 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/KeyValueProcessorTests.java @@ -65,7 +65,7 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testRootTarget() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe"); - Processor processor = createKvProcessor("myField", "&", "=", null, null,null, false); + Processor processor = createKvProcessor("myField", "&", "=", null, null, null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello")); assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe"))); @@ -74,7 +74,7 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testKeySameAsSourceField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("first", "first=hello"); - Processor processor = createKvProcessor("first", "&", "=", null, null,null, false); + Processor processor = createKvProcessor("first", "&", "=", null, null, null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello"))); } @@ -82,8 +82,7 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testIncludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = createKvProcessor(fieldName, "&", "=", - Sets.newHashSet("first"), null, "target", false); + Processor processor = createKvProcessor(fieldName, "&", "=", Sets.newHashSet("first"), null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); assertFalse(ingestDocument.hasField("target.second")); @@ -92,8 +91,7 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testExcludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = createKvProcessor(fieldName, "&", "=", - null, Sets.newHashSet("second"), "target", false); + Processor processor = createKvProcessor(fieldName, "&", "=", null, Sets.newHashSet("second"), "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); assertFalse(ingestDocument.hasField("target.second")); @@ -101,10 +99,20 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testIncludeAndExcludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, - "first=hello&second=world&second=universe&third=bar"); - Processor processor = createKvProcessor(fieldName, "&", "=", - Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false); + String fieldName = RandomDocumentPicks.addRandomField( + random(), + ingestDocument, + "first=hello&second=world&second=universe&third=bar" + ); + Processor processor = createKvProcessor( + fieldName, + "&", + "=", + Sets.newHashSet("first", "second"), + Sets.newHashSet("first", "second"), + "target", + false + ); processor.execute(ingestDocument); assertFalse(ingestDocument.hasField("target.first")); assertFalse(ingestDocument.hasField("target.second")); @@ -113,16 +121,17 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testMissingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); - Processor processor = createKvProcessor("unknown", "&", - "=", null, null, "target", false); + Processor processor = createKvProcessor("unknown", "&", "=", null, null, "target", false); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]")); } public void testNullValueWithIgnoreMissing() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap(fieldName, null)); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap(fieldName, null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Processor processor = createKvProcessor(fieldName, "", "", null, null, "target", true); processor.execute(ingestDocument); @@ -164,9 +173,11 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testTrimMultiCharSequence() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, - "to=, orig_to=, %+relay=mail.example.com[private/dovecot-lmtp]," + - " delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent " + String fieldName = RandomDocumentPicks.addRandomField( + random(), + ingestDocument, + "to=, orig_to=, %+relay=mail.example.com[private/dovecot-lmtp]," + + " delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent " ); Processor processor = createKvProcessor(fieldName, " ", "=", null, null, "target", false, "%+", "<>,", false, null); processor.execute(ingestDocument); @@ -182,7 +193,9 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { public void testStripBrackets() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField( - random(), ingestDocument, "first=&second=\"world\"&second=(universe)&third=&fourth=[bar]&fifth='last'" + random(), + ingestDocument, + "first=&second=\"world\"&second=(universe)&third=&fourth=[bar]&fifth='last'" ); Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, true, null); processor.execute(ingestDocument); @@ -202,18 +215,43 @@ public class KeyValueProcessorTests extends OpenSearchTestCase { assertThat(ingestDocument.getFieldValue("target.arg_second", List.class), equalTo(Arrays.asList("world", "universe"))); } - private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set includeKeys, - Set excludeKeys, String targetField, - boolean ignoreMissing) throws Exception { + private static KeyValueProcessor createKvProcessor( + String field, + String fieldSplit, + String valueSplit, + Set includeKeys, + Set excludeKeys, + String targetField, + boolean ignoreMissing + ) throws Exception { return createKvProcessor( - field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, null, null, false, null + field, + fieldSplit, + valueSplit, + includeKeys, + excludeKeys, + targetField, + ignoreMissing, + null, + null, + false, + null ); } - private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set includeKeys, - Set excludeKeys, String targetField, boolean ignoreMissing, - String trimKey, String trimValue, boolean stripBrackets, - String prefix) throws Exception { + private static KeyValueProcessor createKvProcessor( + String field, + String fieldSplit, + String valueSplit, + Set includeKeys, + Set excludeKeys, + String targetField, + boolean ignoreMissing, + String trimKey, + String trimValue, + boolean stripBrackets, + String prefix + ) throws Exception { Map config = new HashMap<>(); config.put("field", field); config.put("field_split", fieldSplit); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java index 42c2cfe1b41..66ca888a0d3 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java @@ -70,9 +70,13 @@ public class RemoveProcessorFactoryTests extends OpenSearchTestCase { String processorTag = randomAlphaOfLength(10); RemoveProcessor removeProcessor = factory.create(null, processorTag, null, config); assertThat(removeProcessor.getTag(), equalTo(processorTag)); - assertThat(removeProcessor.getFields().stream() - .map(template -> template.newInstance(Collections.emptyMap()).execute()) - .collect(Collectors.toList()), equalTo(Arrays.asList("field1", "field2"))); + assertThat( + removeProcessor.getFields() + .stream() + .map(template -> template.newInstance(Collections.emptyMap()).execute()) + .collect(Collectors.toList()), + equalTo(Arrays.asList("field1", "field2")) + ); } public void testCreateMissingField() throws Exception { @@ -80,7 +84,7 @@ public class RemoveProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } @@ -90,8 +94,7 @@ public class RemoveProcessorFactoryTests extends OpenSearchTestCase { Map config = new HashMap<>(); config.put("field", "{{field1}}"); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchException.class, - () -> factory.create(null, processorTag, null, config)); + OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, null, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java index d8fcfad367b..cf652361571 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java @@ -50,8 +50,12 @@ public class RemoveProcessorTests extends OpenSearchTestCase { public void testRemoveFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RemoveProcessor(randomAlphaOfLength(10), - null, Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(field)), false); + Processor processor = new RemoveProcessor( + randomAlphaOfLength(10), + null, + Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(field)), + false + ); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(field), equalTo(false)); } @@ -66,7 +70,7 @@ public class RemoveProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("remove field should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorFactoryTests.java index 6016326fdb6..ec43be97689 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorFactoryTests.java @@ -83,7 +83,7 @@ public class RenameProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } @@ -94,7 +94,7 @@ public class RenameProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java index 272aa23d69b..fc95693024c 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RenameProcessorTests.java @@ -94,7 +94,7 @@ public class RenameProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("[3] is out of bounds for array with length [2] as part of path [list.3]")); assertThat(actualList.size(), equalTo(2)); assertThat(actualList.get(0), equalTo("item2")); @@ -105,12 +105,11 @@ public class RenameProcessorTests extends OpenSearchTestCase { public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = createRenameProcessor(fieldName, - RandomDocumentPicks.randomFieldName(random()), false); + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [" + fieldName + "] doesn't exist")); } } @@ -119,8 +118,7 @@ public class RenameProcessorTests extends OpenSearchTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = createRenameProcessor(fieldName, - RandomDocumentPicks.randomFieldName(random()), true); + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -128,12 +126,15 @@ public class RenameProcessorTests extends OpenSearchTestCase { public void testRenameNewFieldAlreadyExists() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = createRenameProcessor(RandomDocumentPicks.randomExistingFieldName( - random(), ingestDocument), fieldName, false); + Processor processor = createRenameProcessor( + RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), + fieldName, + false + ); try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [" + fieldName + "] already exists")); } } @@ -171,8 +172,8 @@ public class RenameProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(UnsupportedOperationException e) { - //the set failed, the old field has not been removed + } catch (UnsupportedOperationException e) { + // the set failed, the old field has not been removed assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); } @@ -196,7 +197,7 @@ public class RenameProcessorTests extends OpenSearchTestCase { processor.execute(ingestDocument); fail("processor execute should have failed"); } catch (UnsupportedOperationException e) { - //the set failed, the old field has not been removed + // the set failed, the old field has not been removed assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true)); assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false)); } @@ -211,10 +212,12 @@ public class RenameProcessorTests extends OpenSearchTestCase { assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Collections.singletonMap("bar", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar", String.class), equalTo("bar")); - Processor processor2 = createRenameProcessor( "foo.bar", "foo.bar.baz", false); + Processor processor2 = createRenameProcessor("foo.bar", "foo.bar.baz", false); processor2.execute(ingestDocument); - assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Collections.singletonMap("bar", - Collections.singletonMap("baz", "bar")))); + assertThat( + ingestDocument.getFieldValue("foo", Map.class), + equalTo(Collections.singletonMap("bar", Collections.singletonMap("baz", "bar"))) + ); assertThat(ingestDocument.getFieldValue("foo.bar", Map.class), equalTo(Collections.singletonMap("baz", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar.baz", String.class), equalTo("bar")); @@ -225,7 +228,12 @@ public class RenameProcessorTests extends OpenSearchTestCase { } private RenameProcessor createRenameProcessor(String field, String targetField, boolean ignoreMissing) { - return new RenameProcessor(randomAlphaOfLength(10), null, new TestTemplateService.MockTemplateScript.Factory(field), - new TestTemplateService.MockTemplateScript.Factory(targetField), ignoreMissing); + return new RenameProcessor( + randomAlphaOfLength(10), + null, + new TestTemplateService.MockTemplateScript.Factory(field), + new TestTemplateService.MockTemplateScript.Factory(targetField), + ignoreMissing + ); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorFactoryTests.java index ae9998aba04..cece704a39d 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorFactoryTests.java @@ -108,8 +108,10 @@ public class ScriptProcessorFactoryTests extends OpenSearchTestCase { configMap.put("source", "bar"); configMap.put("lang", "mockscript"); - XContentParseException exception = expectThrows(XContentParseException.class, - () -> factory.create(null, randomAlphaOfLength(10), null, configMap)); + XContentParseException exception = expectThrows( + XContentParseException.class, + () -> factory.create(null, randomAlphaOfLength(10), null, configMap) + ); assertThat(exception.getMessage(), containsString("[script] failed to parse field [source]")); } @@ -117,8 +119,10 @@ public class ScriptProcessorFactoryTests extends OpenSearchTestCase { Map configMap = new HashMap<>(); configMap.put("lang", "mockscript"); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> factory.create(null, randomAlphaOfLength(10), null, configMap)); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> factory.create(null, randomAlphaOfLength(10), null, configMap) + ); assertThat(exception.getMessage(), is("must specify either [source] for an inline script or [id] for a stored script")); } @@ -138,33 +142,40 @@ public class ScriptProcessorFactoryTests extends OpenSearchTestCase { public void testFactoryInvalidateWithInvalidCompiledScript() throws Exception { String randomType = randomFrom("source", "id"); ScriptService mockedScriptService = mock(ScriptService.class); - ScriptException thrownException = new ScriptException("compile-time exception", new RuntimeException(), - Collections.emptyList(), "script", "mockscript"); + ScriptException thrownException = new ScriptException( + "compile-time exception", + new RuntimeException(), + Collections.emptyList(), + "script", + "mockscript" + ); when(mockedScriptService.compile(any(), any())).thenThrow(thrownException); factory = new ScriptProcessor.Factory(mockedScriptService); Map configMap = new HashMap<>(); configMap.put(randomType, "my_script"); - OpenSearchException exception = expectThrows(OpenSearchException.class, - () -> factory.create(null, randomAlphaOfLength(10), null, configMap)); + OpenSearchException exception = expectThrows( + OpenSearchException.class, + () -> factory.create(null, randomAlphaOfLength(10), null, configMap) + ); assertThat(exception.getMessage(), is("compile-time exception")); } public void testInlineIsCompiled() throws Exception { String scriptName = "foo"; - ScriptService scriptService = new ScriptService(Settings.builder().build(), + ScriptService scriptService = new ScriptService( + Settings.builder().build(), Collections.singletonMap( - Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine( - Script.DEFAULT_SCRIPT_LANG, - Collections.singletonMap(scriptName, ctx -> { - ctx.put("foo", "bar"); - return null; - }), - Collections.emptyMap() - ) - ), new HashMap<>(ScriptModule.CORE_CONTEXTS)); + Script.DEFAULT_SCRIPT_LANG, + new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> { + ctx.put("foo", "bar"); + return null; + }), Collections.emptyMap()) + ), + new HashMap<>(ScriptModule.CORE_CONTEXTS) + ); factory = new ScriptProcessor.Factory(scriptService); Map configMap = new HashMap<>(); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java index 3bf1ca5afa8..1aa48984415 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java @@ -60,20 +60,16 @@ public class ScriptProcessorTests extends OpenSearchTestCase { @Before public void setupScripting() { String scriptName = "script"; - scriptService = new ScriptService(Settings.builder().build(), + scriptService = new ScriptService( + Settings.builder().build(), Collections.singletonMap( - Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine( - Script.DEFAULT_SCRIPT_LANG, - Collections.singletonMap( - scriptName, ctx -> { - Integer bytesIn = (Integer) ctx.get("bytes_in"); - Integer bytesOut = (Integer) ctx.get("bytes_out"); - ctx.put("bytes_total", bytesIn + bytesOut); - return null; - } - ), - Collections.emptyMap() - ) + Script.DEFAULT_SCRIPT_LANG, + new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> { + Integer bytesIn = (Integer) ctx.get("bytes_in"); + Integer bytesOut = (Integer) ctx.get("bytes_out"); + ctx.put("bytes_total", bytesIn + bytesOut); + return null; + }), Collections.emptyMap()) ), new HashMap<>(ScriptModule.CORE_CONTEXTS) ); @@ -112,20 +108,16 @@ public class ScriptProcessorTests extends OpenSearchTestCase { public void testTypeDeprecation() throws Exception { String scriptName = "script"; - ScriptService scriptService = new ScriptService(Settings.builder().build(), - Collections.singletonMap( - Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine( - Script.DEFAULT_SCRIPT_LANG, - Collections.singletonMap( - scriptName, ctx -> { - ctx.get("_type"); - return null; - } - ), - Collections.emptyMap() - ) - ), - new HashMap<>(ScriptModule.CORE_CONTEXTS) + ScriptService scriptService = new ScriptService( + Settings.builder().build(), + Collections.singletonMap( + Script.DEFAULT_SCRIPT_LANG, + new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, Collections.singletonMap(scriptName, ctx -> { + ctx.get("_type"); + return null; + }), Collections.emptyMap()) + ), + new HashMap<>(ScriptModule.CORE_CONTEXTS) ); Script script = new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorFactoryTests.java index 5290c35d6ca..bb0f319a61b 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorFactoryTests.java @@ -85,7 +85,7 @@ public class SetProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } @@ -96,7 +96,7 @@ public class SetProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); } } @@ -108,7 +108,7 @@ public class SetProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); } } @@ -119,8 +119,7 @@ public class SetProcessorFactoryTests extends OpenSearchTestCase { config.put("field", "{{field1}}"); config.put("value", "value1"); String processorTag = randomAlphaOfLength(10); - OpenSearchException exception = expectThrows(OpenSearchException.class, - () -> factory.create(null, processorTag, null, config)); + OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, null, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java index 1394126b480..0e7ba5556fb 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java @@ -59,7 +59,7 @@ public class SetProcessorTests extends OpenSearchTestCase { public void testSetNewFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - //used to verify that there are no conflicts between subsequent fields going to be added + // used to verify that there are no conflicts between subsequent fields going to be added IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); Object fieldValue = RandomDocumentPicks.randomFieldValue(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue); @@ -76,9 +76,11 @@ public class SetProcessorTests extends OpenSearchTestCase { try { processor.execute(ingestDocument); fail("processor execute should have failed"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("cannot set [inner] with parent object of type [java.lang.String] as " + - "part of path [field.inner]")); + } catch (IllegalArgumentException e) { + assertThat( + e.getMessage(), + equalTo("cannot set [inner] with parent object of type [java.lang.String] as " + "part of path [field.inner]") + ); } } @@ -154,7 +156,13 @@ public class SetProcessorTests extends OpenSearchTestCase { } private static Processor createSetProcessor(String fieldName, Object fieldValue, boolean overrideEnabled, boolean ignoreEmptyValue) { - return new SetProcessor(randomAlphaOfLength(10), null, new TestTemplateService.MockTemplateScript.Factory(fieldName), - ValueSource.wrap(fieldValue, TestTemplateService.instance()), overrideEnabled, ignoreEmptyValue); + return new SetProcessor( + randomAlphaOfLength(10), + null, + new TestTemplateService.MockTemplateScript.Factory(fieldName), + ValueSource.wrap(fieldValue, TestTemplateService.instance()), + overrideEnabled, + ignoreEmptyValue + ); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorFactoryTests.java index 68bcf814d5c..d496e883aba 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorFactoryTests.java @@ -114,7 +114,7 @@ public class SortProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorTests.java index ef44d897703..779cab8dc35 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SortProcessorTests.java @@ -75,7 +75,7 @@ public class SortProcessorTests extends OpenSearchTestCase { public void testSortIntegersNonRandom() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - Integer[] expectedResult = new Integer[]{1,2,3,4,5,10,20,21,22,50,100}; + Integer[] expectedResult = new Integer[] { 1, 2, 3, 4, 5, 10, 20, 21, 22, 50, 100 }; List fieldValue = new ArrayList<>(expectedResult.length); fieldValue.addAll(Arrays.asList(expectedResult).subList(0, expectedResult.length)); Collections.shuffle(fieldValue, random()); @@ -260,7 +260,7 @@ public class SortProcessorTests extends OpenSearchTestCase { Processor processor = new SortProcessor(randomAlphaOfLength(10), null, fieldName, order, fieldName); try { processor.execute(ingestDocument); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.String] cannot be cast to [java.util.List]")); } } @@ -272,7 +272,7 @@ public class SortProcessorTests extends OpenSearchTestCase { Processor processor = new SortProcessor(randomAlphaOfLength(10), null, fieldName, order, fieldName); try { processor.execute(ingestDocument); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); } } @@ -283,7 +283,7 @@ public class SortProcessorTests extends OpenSearchTestCase { Processor processor = new SortProcessor(randomAlphaOfLength(10), null, "field", order, "field"); try { processor.execute(ingestDocument); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("field [field] is null, cannot sort.")); } } @@ -303,8 +303,7 @@ public class SortProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); String targetFieldName = fieldName + "foo"; - Processor processor = new SortProcessor(randomAlphaOfLength(10), null, fieldName, - SortOrder.DESCENDING, targetFieldName); + Processor processor = new SortProcessor(randomAlphaOfLength(10), null, fieldName, SortOrder.DESCENDING, targetFieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(targetFieldName, List.class), expectedResult); } @@ -324,8 +323,7 @@ public class SortProcessorTests extends OpenSearchTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); String targetFieldName = fieldName + "foo"; - Processor processor = new SortProcessor(randomAlphaOfLength(10), null, fieldName, - SortOrder.ASCENDING, targetFieldName); + Processor processor = new SortProcessor(randomAlphaOfLength(10), null, fieldName, SortOrder.ASCENDING, targetFieldName); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(targetFieldName, List.class), expectedResult); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorFactoryTests.java index 0df7e445120..59e0b2066d6 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorFactoryTests.java @@ -63,7 +63,7 @@ public class SplitProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } @@ -75,7 +75,7 @@ public class SplitProcessorFactoryTests extends OpenSearchTestCase { try { factory.create(null, null, null, config); fail("factory create should have failed"); - } catch(OpenSearchParseException e) { + } catch (OpenSearchParseException e) { assertThat(e.getMessage(), equalTo("[separator] required property is missing")); } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorTests.java index d4b516a5440..196fa2cb293 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SplitProcessorTests.java @@ -70,8 +70,7 @@ public class SplitProcessorTests extends OpenSearchTestCase { } public void testSplitNullValue() throws Exception { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap("field", null)); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); Processor processor = new SplitProcessor(randomAlphaOfLength(10), null, "field", "\\.", false, false, "field"); try { processor.execute(ingestDocument); @@ -83,8 +82,10 @@ public class SplitProcessorTests extends OpenSearchTestCase { public void testSplitNullValueWithIgnoreMissing() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap(fieldName, null)); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap(fieldName, null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Processor processor = new SplitProcessor(randomAlphaOfLength(10), null, fieldName, "\\.", true, false, fieldName); processor.execute(ingestDocument); @@ -108,8 +109,10 @@ public class SplitProcessorTests extends OpenSearchTestCase { processor.execute(ingestDocument); fail("split processor should have failed"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast " + - "to [java.lang.String]")); + assertThat( + e.getMessage(), + equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast " + "to [java.lang.String]") + ); } } @@ -123,11 +126,13 @@ public class SplitProcessorTests extends OpenSearchTestCase { IngestDocument ingestDocument = new IngestDocument(source, new HashMap<>()); splitProcessor.execute(ingestDocument); @SuppressWarnings("unchecked") - List flags = (List)ingestDocument.getFieldValue("flags", List.class); + List flags = (List) ingestDocument.getFieldValue("flags", List.class); assertThat(flags, equalTo(Arrays.asList("new", "hot", "super", "fun", "interesting"))); ingestDocument.appendFieldValue("flags", "additional_flag"); - assertThat(ingestDocument.getFieldValue("flags", List.class), equalTo(Arrays.asList("new", "hot", "super", - "fun", "interesting", "additional_flag"))); + assertThat( + ingestDocument.getFieldValue("flags", List.class), + equalTo(Arrays.asList("new", "hot", "super", "fun", "interesting", "additional_flag")) + ); } public void testSplitWithTargetField() throws Exception { diff --git a/modules/ingest-common/src/yamlRestTest/java/org/opensearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java b/modules/ingest-common/src/yamlRestTest/java/org/opensearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java index 7bc8e79f545..de27ed9ce9c 100644 --- a/modules/ingest-common/src/yamlRestTest/java/org/opensearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java +++ b/modules/ingest-common/src/yamlRestTest/java/org/opensearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java @@ -49,4 +49,3 @@ public class IngestCommonClientYamlTestSuiteIT extends OpenSearchClientYamlSuite return OpenSearchClientYamlSuiteTestCase.createParameters(); } } - diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java index 746c29fa478..2ef5d8da000 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/opensearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java @@ -84,22 +84,25 @@ public class GeoIpProcessorNonIngestNodeIT extends OpenSearchIntegTestCase { try { Files.createDirectories(databasePath); Files.copy( - new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), - databasePath.resolve("GeoLite2-City.mmdb")); + new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), + databasePath.resolve("GeoLite2-City.mmdb") + ); Files.copy( - new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), - databasePath.resolve("GeoLite2-Country.mmdb")); + new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), + databasePath.resolve("GeoLite2-Country.mmdb") + ); Files.copy( - new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb")), - databasePath.resolve("GeoLite2-ASN.mmdb")); + new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb")), + databasePath.resolve("GeoLite2-ASN.mmdb") + ); } catch (final IOException e) { throw new UncheckedIOException(e); } return Settings.builder() - .put("ingest.geoip.database_path", databasePath) - .put(nonIngestNode()) - .put(super.nodeSettings(nodeOrdinal)) - .build(); + .put("ingest.geoip.database_path", databasePath) + .put(nonIngestNode()) + .put(super.nodeSettings(nodeOrdinal)) + .build(); } /** @@ -173,13 +176,13 @@ public class GeoIpProcessorNonIngestNodeIT extends OpenSearchIntegTestCase { assertDatabaseLoadStatus(ingestNode, true); // the geo-IP database should still not be loaded on the non-ingest nodes Arrays.stream(internalCluster().getNodeNames()) - .filter(node -> node.equals(ingestNode) == false) - .forEach(node -> assertDatabaseLoadStatus(node, false)); + .filter(node -> node.equals(ingestNode) == false) + .forEach(node -> assertDatabaseLoadStatus(node, false)); } private void assertDatabaseLoadStatus(final String node, final boolean loaded) { final IngestService ingestService = internalCluster().getInstance(IngestService.class, node); - final GeoIpProcessor.Factory factory = (GeoIpProcessor.Factory)ingestService.getProcessorFactories().get("geoip"); + final GeoIpProcessor.Factory factory = (GeoIpProcessor.Factory) ingestService.getProcessorFactories().get("geoip"); for (final DatabaseReaderLazyLoader loader : factory.databaseReaders().values()) { if (loaded) { assertNotNull(loader.databaseReader.get()); diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/DatabaseReaderLazyLoader.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/DatabaseReaderLazyLoader.java index 064590f1a68..1cafae0e50c 100644 --- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/DatabaseReaderLazyLoader.java +++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/DatabaseReaderLazyLoader.java @@ -86,7 +86,7 @@ class DatabaseReaderLazyLoader implements Closeable { if (fileSize <= 512) { throw new IOException("unexpected file length [" + fileSize + "] for [" + databasePath + "]"); } - final int[] databaseTypeMarker = {'d', 'a', 't', 'a', 'b', 'a', 's', 'e', '_', 't', 'y', 'p', 'e'}; + final int[] databaseTypeMarker = { 'd', 'a', 't', 'a', 'b', 'a', 's', 'e', '_', 't', 'y', 'p', 'e' }; try (InputStream in = databaseInputStream()) { // read the last 512 bytes final long skipped = in.skip(fileSize - 512); diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java index 61045fe5d6a..384ae6f14dc 100644 --- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java @@ -99,13 +99,15 @@ public final class GeoIpProcessor extends AbstractProcessor { */ GeoIpProcessor( final String tag, - String description, final String field, + String description, + final String field, final DatabaseReaderLazyLoader lazyLoader, final String targetField, final Set properties, final boolean ignoreMissing, final GeoIpCache cache, - boolean firstOnly) { + boolean firstOnly + ) { super(tag, description); this.field = field; this.targetField = targetField; @@ -186,8 +188,10 @@ public final class GeoIpProcessor extends AbstractProcessor { geoData = Collections.emptyMap(); } } else { - throw new OpenSearchParseException("Unsupported database type [" + lazyLoader.getDatabaseType() - + "]", new IllegalStateException()); + throw new OpenSearchParseException( + "Unsupported database type [" + lazyLoader.getDatabaseType() + "]", + new IllegalStateException() + ); } return geoData; } @@ -215,8 +219,8 @@ public final class GeoIpProcessor extends AbstractProcessor { private Map retrieveCityGeoData(InetAddress ipAddress) { SpecialPermission.check(); - CityResponse response = AccessController.doPrivileged((PrivilegedAction) () -> - cache.putIfAbsent(ipAddress, CityResponse.class, ip -> { + CityResponse response = AccessController.doPrivileged( + (PrivilegedAction) () -> cache.putIfAbsent(ipAddress, CityResponse.class, ip -> { try { return lazyLoader.get().city(ip); } catch (AddressNotFoundException e) { @@ -224,7 +228,8 @@ public final class GeoIpProcessor extends AbstractProcessor { } catch (Exception e) { throw new RuntimeException(e); } - })); + }) + ); Country country = response.getCountry(); City city = response.getCity(); @@ -301,8 +306,8 @@ public final class GeoIpProcessor extends AbstractProcessor { private Map retrieveCountryGeoData(InetAddress ipAddress) { SpecialPermission.check(); - CountryResponse response = AccessController.doPrivileged((PrivilegedAction) () -> - cache.putIfAbsent(ipAddress, CountryResponse.class, ip -> { + CountryResponse response = AccessController.doPrivileged( + (PrivilegedAction) () -> cache.putIfAbsent(ipAddress, CountryResponse.class, ip -> { try { return lazyLoader.get().country(ip); } catch (AddressNotFoundException e) { @@ -310,7 +315,8 @@ public final class GeoIpProcessor extends AbstractProcessor { } catch (Exception e) { throw new RuntimeException(e); } - })); + }) + ); Country country = response.getCountry(); Continent continent = response.getContinent(); @@ -346,8 +352,8 @@ public final class GeoIpProcessor extends AbstractProcessor { private Map retrieveAsnGeoData(InetAddress ipAddress) { SpecialPermission.check(); - AsnResponse response = AccessController.doPrivileged((PrivilegedAction) () -> - cache.putIfAbsent(ipAddress, AsnResponse.class, ip -> { + AsnResponse response = AccessController.doPrivileged( + (PrivilegedAction) () -> cache.putIfAbsent(ipAddress, AsnResponse.class, ip -> { try { return lazyLoader.get().asn(ip); } catch (AddressNotFoundException e) { @@ -355,7 +361,8 @@ public final class GeoIpProcessor extends AbstractProcessor { } catch (Exception e) { throw new RuntimeException(e); } - })); + }) + ); Integer asn = response.getAutonomousSystemNumber(); String organization_name = response.getAutonomousSystemOrganization(); @@ -388,16 +395,23 @@ public final class GeoIpProcessor extends AbstractProcessor { } public static final class Factory implements Processor.Factory { - static final Set DEFAULT_CITY_PROPERTIES = Collections.unmodifiableSet(EnumSet.of( - Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_ISO_CODE, - Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION - )); - static final Set DEFAULT_COUNTRY_PROPERTIES = Collections.unmodifiableSet(EnumSet.of( - Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE - )); - static final Set DEFAULT_ASN_PROPERTIES = Collections.unmodifiableSet(EnumSet.of( - Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK - )); + static final Set DEFAULT_CITY_PROPERTIES = Collections.unmodifiableSet( + EnumSet.of( + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.LOCATION + ) + ); + static final Set DEFAULT_COUNTRY_PROPERTIES = Collections.unmodifiableSet( + EnumSet.of(Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE) + ); + static final Set DEFAULT_ASN_PROPERTIES = Collections.unmodifiableSet( + EnumSet.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK) + ); private final Map databaseReaders; @@ -414,9 +428,11 @@ public final class GeoIpProcessor extends AbstractProcessor { @Override public GeoIpProcessor create( - final Map registry, - final String processorTag, - final String description, final Map config) throws IOException { + final Map registry, + final String processorTag, + final String description, + final Map config + ) throws IOException { String ipField = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip"); String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb"); @@ -426,8 +442,7 @@ public final class GeoIpProcessor extends AbstractProcessor { DatabaseReaderLazyLoader lazyLoader = databaseReaders.get(databaseFile); if (lazyLoader == null) { - throw newConfigurationException(TYPE, processorTag, - "database_file", "database file [" + databaseFile + "] doesn't exist"); + throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist"); } final String databaseType = lazyLoader.getDatabaseType(); @@ -451,20 +466,33 @@ public final class GeoIpProcessor extends AbstractProcessor { } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { properties = DEFAULT_ASN_PROPERTIES; } else { - throw newConfigurationException(TYPE, processorTag, "database_file", "Unsupported database type [" - + databaseType + "]"); + throw newConfigurationException( + TYPE, + processorTag, + "database_file", + "Unsupported database type [" + databaseType + "]" + ); } } - return new GeoIpProcessor(processorTag, description, ipField, lazyLoader, targetField, properties, ignoreMissing, cache, - firstOnly); + return new GeoIpProcessor( + processorTag, + description, + ipField, + lazyLoader, + targetField, + properties, + ignoreMissing, + cache, + firstOnly + ); } } // Geoip2's AddressNotFoundException is checked and due to the fact that we need run their code // inside a PrivilegedAction code block, we are forced to catch any checked exception and rethrow // it with an unchecked exception. - //package private for testing + // package private for testing static final class AddressNotFoundRuntimeException extends RuntimeException { AddressNotFoundRuntimeException(Throwable cause) { @@ -488,15 +516,27 @@ public final class GeoIpProcessor extends AbstractProcessor { NETWORK; static final EnumSet ALL_CITY_PROPERTIES = EnumSet.of( - Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_NAME, - Property.REGION_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, + Property.IP, + Property.COUNTRY_ISO_CODE, + Property.COUNTRY_NAME, + Property.CONTINENT_NAME, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.TIMEZONE, Property.LOCATION ); static final EnumSet ALL_COUNTRY_PROPERTIES = EnumSet.of( - Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE + Property.IP, + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE ); static final EnumSet ALL_ASN_PROPERTIES = EnumSet.of( - Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK + Property.IP, + Property.ASN, + Property.ORGANIZATION_NAME, + Property.NETWORK ); public static Property parseProperty(String databaseType, String value) { @@ -516,8 +556,9 @@ public final class GeoIpProcessor extends AbstractProcessor { } return property; } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("illegal property value [" + value + "]. valid values are " + - Arrays.toString(validProperties.toArray())); + throw new IllegalArgumentException( + "illegal property value [" + value + "]. valid values are " + Arrays.toString(validProperties.toArray()) + ); } } } diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java index 4780edc19d8..6af408c1853 100644 --- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpPlugin.java @@ -65,10 +65,9 @@ import java.util.function.Function; import java.util.stream.Stream; public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable { - public static final Setting CACHE_SIZE = - Setting.longSetting("ingest.geoip.cache_size", 1000, 0, Setting.Property.NodeScope); + public static final Setting CACHE_SIZE = Setting.longSetting("ingest.geoip.cache_size", 1000, 0, Setting.Property.NodeScope); - static String[] DEFAULT_DATABASE_FILENAMES = new String[]{"GeoLite2-ASN.mmdb", "GeoLite2-City.mmdb", "GeoLite2-Country.mmdb"}; + static String[] DEFAULT_DATABASE_FILENAMES = new String[] { "GeoLite2-ASN.mmdb", "GeoLite2-City.mmdb", "GeoLite2-Country.mmdb" }; private Map databaseReaders; @@ -143,17 +142,15 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable } private static DatabaseReaderLazyLoader createLoader(Path databasePath, boolean loadDatabaseOnHeap) { - return new DatabaseReaderLazyLoader( - databasePath, - () -> { - DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(NoCache.getInstance()); - if (loadDatabaseOnHeap) { - builder.fileMode(Reader.FileMode.MEMORY); - } else { - builder.fileMode(Reader.FileMode.MEMORY_MAPPED); - } - return builder.build(); - }); + return new DatabaseReaderLazyLoader(databasePath, () -> { + DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(NoCache.getInstance()); + if (loadDatabaseOnHeap) { + builder.fileMode(Reader.FileMode.MEMORY); + } else { + builder.fileMode(Reader.FileMode.MEMORY_MAPPED); + } + return builder.build(); + }); } private static void assertDatabaseExistence(final Path path, final boolean exists) throws IOException { @@ -186,7 +183,7 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable static class GeoIpCache { private final Cache, AbstractResponse> cache; - //package private for testing + // package private for testing GeoIpCache(long maxSize) { if (maxSize < 0) { throw new IllegalArgumentException("geoip max cache size must be 0 or greater"); @@ -194,12 +191,15 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable this.cache = CacheBuilder., AbstractResponse>builder().setMaximumWeight(maxSize).build(); } - T putIfAbsent(InetAddress ip, Class responseType, - Function retrieveFunction) { + T putIfAbsent( + InetAddress ip, + Class responseType, + Function retrieveFunction + ) { - //can't use cache.computeIfAbsent due to the elevated permissions for the jackson (run via the cache loader) + // can't use cache.computeIfAbsent due to the elevated permissions for the jackson (run via the cache loader) CacheKey cacheKey = new CacheKey<>(ip, responseType); - //intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. + // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. AbstractResponse response = cache.get(cacheKey); if (response == null) { response = retrieveFunction.apply(ip); @@ -208,19 +208,19 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable return responseType.cast(response); } - //only useful for testing + // only useful for testing T get(InetAddress ip, Class responseType) { CacheKey cacheKey = new CacheKey<>(ip, responseType); return responseType.cast(cache.get(cacheKey)); } - /** - * The key to use for the cache. Since this cache can span multiple geoip processors that all use different databases, the response - * type is needed to be included in the cache key. For example, if we only used the IP address as the key the City and ASN the same - * IP may be in both with different values and we need to cache both. The response type scopes the IP to the correct database - * provides a means to safely cast the return objects. - * @param The AbstractResponse type used to scope the key and cast the result. - */ + /** + * The key to use for the cache. Since this cache can span multiple geoip processors that all use different databases, the response + * type is needed to be included in the cache key. For example, if we only used the IP address as the key the City and ASN the same + * IP may be in both with different values and we need to cache both. The response type scopes the IP to the correct database + * provides a means to safely cast the return objects. + * @param The AbstractResponse type used to scope the key and cast the result. + */ private static class CacheKey { private final InetAddress ip; @@ -231,17 +231,16 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable this.responseType = responseType; } - //generated + // generated @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CacheKey cacheKey = (CacheKey) o; - return Objects.equals(ip, cacheKey.ip) && - Objects.equals(responseType, cacheKey.responseType); + return Objects.equals(ip, cacheKey.ip) && Objects.equals(responseType, cacheKey.responseType); } - //generated + // generated @Override public int hashCode() { return Objects.hash(ip, responseType); diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java index ff4ddbb8e7e..15ca93e0fba 100644 --- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -187,8 +187,14 @@ public class GeoIpProcessorFactoryTests extends OpenSearchTestCase { String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString(); config.put("properties", Collections.singletonList(asnProperty)); Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config)); - assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + asnProperty + - "]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME]")); + assertThat( + e.getMessage(), + equalTo( + "[properties] illegal property value [" + + asnProperty + + "]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME]" + ) + ); } public void testBuildWithAsnDbAndCityFields() throws Exception { @@ -201,8 +207,10 @@ public class GeoIpProcessorFactoryTests extends OpenSearchTestCase { String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); config.put("properties", Collections.singletonList(cityProperty)); Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config)); - assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + cityProperty + - "]. valid values are [IP, ASN, ORGANIZATION_NAME, NETWORK]")); + assertThat( + e.getMessage(), + equalTo("[properties] illegal property value [" + cityProperty + "]. valid values are [IP, ASN, ORGANIZATION_NAME, NETWORK]") + ); } public void testBuildNonExistingDbFile() throws Exception { @@ -246,8 +254,13 @@ public class GeoIpProcessorFactoryTests extends OpenSearchTestCase { config1.put("field", "_field"); config1.put("properties", Collections.singletonList("invalid")); Exception e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config1)); - assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " + - "COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]")); + assertThat( + e.getMessage(), + equalTo( + "[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " + + "COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]" + ) + ); Map config2 = new HashMap<>(); config2.put("field", "_field"); @@ -354,8 +367,7 @@ public class GeoIpProcessorFactoryTests extends OpenSearchTestCase { copyDatabaseFiles(geoIpDir); final String databaseFilename = randomFrom(IngestGeoIpPlugin.DEFAULT_DATABASE_FILENAMES); Files.delete(geoIpDir.resolve(databaseFilename)); - final IOException e = - expectThrows(IOException.class, () -> IngestGeoIpPlugin.loadDatabaseReaders(geoIpDir, geoIpConfigDir)); + final IOException e = expectThrows(IOException.class, () -> IngestGeoIpPlugin.loadDatabaseReaders(geoIpDir, geoIpConfigDir)); assertThat(e, hasToString(containsString("expected database [" + databaseFilename + "] to exist in [" + geoIpDir + "]"))); } @@ -367,15 +379,12 @@ public class GeoIpProcessorFactoryTests extends OpenSearchTestCase { copyDatabaseFiles(geoIpDir); final String databaseFilename = randomFrom(IngestGeoIpPlugin.DEFAULT_DATABASE_FILENAMES); copyDatabaseFile(geoIpConfigDir, databaseFilename); - final IOException e = - expectThrows(IOException.class, () -> IngestGeoIpPlugin.loadDatabaseReaders(geoIpDir, geoIpConfigDir)); + final IOException e = expectThrows(IOException.class, () -> IngestGeoIpPlugin.loadDatabaseReaders(geoIpDir, geoIpConfigDir)); assertThat(e, hasToString(containsString("expected database [" + databaseFilename + "] to not exist in [" + geoIpConfigDir + "]"))); } private static void copyDatabaseFile(final Path path, final String databaseFilename) throws IOException { - Files.copy( - new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/" + databaseFilename)), - path.resolve(databaseFilename)); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/" + databaseFilename)), path.resolve(databaseFilename)); } private static void copyDatabaseFiles(final Path path) throws IOException { diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java index 54142fa98d9..f06802af8b5 100644 --- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java @@ -59,9 +59,17 @@ import static org.hamcrest.Matchers.nullValue; public class GeoIpProcessorTests extends OpenSearchTestCase { public void testCity() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", "8.8.8.8"); @@ -84,20 +92,38 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testNullValueWithIgnoreMissing() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, - new GeoIpCache(1000), false); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap("source_field", null)); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + true, + new GeoIpCache(1000), + false + ); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap("source_field", null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } public void testNonExistentWithIgnoreMissing() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + true, + new GeoIpCache(1000), + false + ); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); @@ -105,20 +131,38 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testNullWithoutIgnoreMissing() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap("source_field", null)); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap("source_field", null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [source_field] is null, cannot extract geoip information.")); } public void testNonExistentWithoutIgnoreMissing() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); @@ -126,9 +170,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testCity_withIpV6() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); String address = "2602:306:33d3:8000::3257:9652"; Map document = new HashMap<>(); @@ -155,9 +207,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testCityWithMissingLocation() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", "80.231.5.0"); @@ -172,9 +232,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testCountry() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-Country.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-Country.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); @@ -192,9 +260,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testCountryWithMissingLocation() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-Country.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-Country.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", "80.231.5.0"); @@ -210,9 +286,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { public void testAsn() throws Exception { String ip = "82.171.64.0"; - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-ASN.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-ASN.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", ip); @@ -230,9 +314,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testAddressIsNotInTheDatabase() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", "127.0.0.1"); @@ -243,9 +335,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { /** Don't silently do DNS lookups or anything trappy on bogus data */ public void testInvalid() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", "www.google.com"); @@ -255,9 +355,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testListAllValid() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", Arrays.asList("8.8.8.8", "82.171.64.0")); @@ -276,9 +384,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testListPartiallyValid() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", Arrays.asList("8.8.8.8", "127.0.0.1")); @@ -297,9 +413,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testListNoMatches() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), false); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + false + ); Map document = new HashMap<>(); document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.1")); @@ -310,9 +434,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testListFirstOnly() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), true); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + true + ); Map document = new HashMap<>(); document.put("source_field", Arrays.asList("8.8.8.8", "127.0.0.1")); @@ -329,9 +461,17 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { } public void testListFirstOnlyNoMatches() throws Exception { - GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", - loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, - new GeoIpCache(1000), true); + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoLite2-City.mmdb"), + "target_field", + EnumSet.allOf(GeoIpProcessor.Property.class), + false, + new GeoIpCache(1000), + true + ); Map document = new HashMap<>(); document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.2")); @@ -343,8 +483,8 @@ public class GeoIpProcessorTests extends OpenSearchTestCase { private DatabaseReaderLazyLoader loader(final String path) { final Supplier databaseInputStreamSupplier = () -> GeoIpProcessor.class.getResourceAsStream(path); - final CheckedSupplier loader = - () -> new DatabaseReader.Builder(databaseInputStreamSupplier.get()).build(); + final CheckedSupplier loader = () -> new DatabaseReader.Builder(databaseInputStreamSupplier.get()) + .build(); return new DatabaseReaderLazyLoader(PathUtils.get(path), loader) { @Override diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpPluginTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpPluginTests.java index 69ae929cb58..540d68b0982 100644 --- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpPluginTests.java +++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpPluginTests.java @@ -46,13 +46,12 @@ public class IngestGeoIpPluginTests extends OpenSearchTestCase { AbstractResponse response1 = mock(AbstractResponse.class); AbstractResponse response2 = mock(AbstractResponse.class); - //add a key + // add a key AbstractResponse cachedResponse = cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), AbstractResponse.class, ip -> response1); assertSame(cachedResponse, response1); assertSame(cachedResponse, cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), AbstractResponse.class, ip -> response1)); assertSame(cachedResponse, cache.get(InetAddresses.forString("127.0.0.1"), AbstractResponse.class)); - // evict old key by adding another value cachedResponse = cache.putIfAbsent(InetAddresses.forString("127.0.0.2"), AbstractResponse.class, ip -> response2); assertSame(cachedResponse, response2); @@ -64,14 +63,19 @@ public class IngestGeoIpPluginTests extends OpenSearchTestCase { public void testThrowsFunctionsException() { GeoIpCache cache = new GeoIpCache(1); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), AbstractResponse.class, - ip -> { throw new IllegalArgumentException("bad"); })); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> cache.putIfAbsent( + InetAddresses.forString("127.0.0.1"), + AbstractResponse.class, + ip -> { throw new IllegalArgumentException("bad"); } + ) + ); assertEquals("bad", ex.getMessage()); } public void testInvalidInit() { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new GeoIpCache(-1)); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new GeoIpCache(-1)); assertEquals("geoip max cache size must be 0 or greater", ex.getMessage()); } } diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java index 5090b9a4d40..ee424ad1322 100644 --- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java +++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentPlugin.java @@ -51,8 +51,12 @@ import java.util.stream.Stream; public class IngestUserAgentPlugin extends Plugin implements IngestPlugin { - private final Setting CACHE_SIZE_SETTING = Setting.longSetting("ingest.user_agent.cache_size", 1000, 0, - Setting.Property.NodeScope); + private final Setting CACHE_SIZE_SETTING = Setting.longSetting( + "ingest.user_agent.cache_size", + 1000, + 0, + Setting.Property.NodeScope + ); static final String DEFAULT_PARSER_NAME = "_default_"; @@ -62,7 +66,8 @@ public class IngestUserAgentPlugin extends Plugin implements IngestPlugin { if (Files.exists(userAgentConfigDirectory) == false && Files.isDirectory(userAgentConfigDirectory)) { throw new IllegalStateException( - "the user agent directory [" + userAgentConfigDirectory + "] containing the regex file doesn't exist"); + "the user agent directory [" + userAgentConfigDirectory + "] containing the regex file doesn't exist" + ); } long cacheSize = CACHE_SIZE_SETTING.get(parameters.env.settings()); @@ -78,15 +83,23 @@ public class IngestUserAgentPlugin extends Plugin implements IngestPlugin { static Map createUserAgentParsers(Path userAgentConfigDirectory, UserAgentCache cache) throws IOException { Map userAgentParsers = new HashMap<>(); - UserAgentParser defaultParser = new UserAgentParser(DEFAULT_PARSER_NAME, - IngestUserAgentPlugin.class.getResourceAsStream("/regexes.yml"), cache); + UserAgentParser defaultParser = new UserAgentParser( + DEFAULT_PARSER_NAME, + IngestUserAgentPlugin.class.getResourceAsStream("/regexes.yml"), + cache + ); userAgentParsers.put(DEFAULT_PARSER_NAME, defaultParser); if (Files.exists(userAgentConfigDirectory) && Files.isDirectory(userAgentConfigDirectory)) { PathMatcher pathMatcher = userAgentConfigDirectory.getFileSystem().getPathMatcher("glob:**.yml"); - try (Stream regexFiles = Files.find(userAgentConfigDirectory, 1, - (path, attr) -> attr.isRegularFile() && pathMatcher.matches(path))) { + try ( + Stream regexFiles = Files.find( + userAgentConfigDirectory, + 1, + (path, attr) -> attr.isRegularFile() && pathMatcher.matches(path) + ) + ) { Iterable iterable = regexFiles::iterator; for (Path path : iterable) { String parserName = path.getFileName().toString(); diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentCache.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentCache.java index bca7346a29f..c3bc0770b23 100644 --- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentCache.java +++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentCache.java @@ -64,8 +64,8 @@ class UserAgentCache { @Override public boolean equals(Object obj) { - if(obj != null && obj instanceof CompositeCacheKey) { - CompositeCacheKey s = (CompositeCacheKey)obj; + if (obj != null && obj instanceof CompositeCacheKey) { + CompositeCacheKey s = (CompositeCacheKey) obj; return parserName.equals(s.parserName) && userAgent.equals(s.userAgent); } return false; diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java index 96722d6eb24..7dc3ee43463 100644 --- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java +++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java @@ -68,8 +68,8 @@ final class UserAgentParser { private void init(InputStream regexStream) throws IOException { // EMPTY is safe here because we don't use namedObject - XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, regexStream); + XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, regexStream); XContentParser.Token token = yamlParser.nextToken(); @@ -81,26 +81,46 @@ final class UserAgentParser { List> parserConfigurations = readParserConfigurations(yamlParser); for (Map map : parserConfigurations) { - uaPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), - map.get("family_replacement"), map.get("v1_replacement"), map.get("v2_replacement"), - map.get("v3_replacement"), map.get("v4_replacement"))); + uaPatterns.add( + new UserAgentSubpattern( + compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("family_replacement"), + map.get("v1_replacement"), + map.get("v2_replacement"), + map.get("v3_replacement"), + map.get("v4_replacement") + ) + ); } - } - else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) { + } else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) { List> parserConfigurations = readParserConfigurations(yamlParser); for (Map map : parserConfigurations) { - osPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), - map.get("os_replacement"), map.get("os_v1_replacement"), map.get("os_v2_replacement"), - map.get("os_v3_replacement"), map.get("os_v4_replacement"))); + osPatterns.add( + new UserAgentSubpattern( + compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("os_replacement"), + map.get("os_v1_replacement"), + map.get("os_v2_replacement"), + map.get("os_v3_replacement"), + map.get("os_v4_replacement") + ) + ); } - } - else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) { + } else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) { List> parserConfigurations = readParserConfigurations(yamlParser); for (Map map : parserConfigurations) { - devicePatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), - map.get("device_replacement"), null, null, null, null)); + devicePatterns.add( + new UserAgentSubpattern( + compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("device_replacement"), + null, + null, + null, + null + ) + ); } } } @@ -121,7 +141,7 @@ final class UserAgentParser { } private List> readParserConfigurations(XContentParser yamlParser) throws IOException { - List > patternList = new ArrayList<>(); + List> patternList = new ArrayList<>(); XContentParser.Token token = yamlParser.nextToken(); if (token != XContentParser.Token.START_ARRAY) { @@ -237,61 +257,67 @@ final class UserAgentParser { private final Pattern pattern; private final String nameReplacement, v1Replacement, v2Replacement, v3Replacement, v4Replacement; - UserAgentSubpattern(Pattern pattern, String nameReplacement, - String v1Replacement, String v2Replacement, String v3Replacement, String v4Replacement) { - this.pattern = pattern; - this.nameReplacement = nameReplacement; - this.v1Replacement = v1Replacement; - this.v2Replacement = v2Replacement; - this.v3Replacement = v3Replacement; - this.v4Replacement = v4Replacement; + UserAgentSubpattern( + Pattern pattern, + String nameReplacement, + String v1Replacement, + String v2Replacement, + String v3Replacement, + String v4Replacement + ) { + this.pattern = pattern; + this.nameReplacement = nameReplacement; + this.v1Replacement = v1Replacement; + this.v2Replacement = v2Replacement; + this.v3Replacement = v3Replacement; + this.v4Replacement = v4Replacement; } public VersionedName match(String agentString) { - String name = null, major = null, minor = null, patch = null, build = null; - Matcher matcher = pattern.matcher(agentString); + String name = null, major = null, minor = null, patch = null, build = null; + Matcher matcher = pattern.matcher(agentString); - if (!matcher.find()) { - return null; - } - - int groupCount = matcher.groupCount(); - - if (nameReplacement != null) { - if (nameReplacement.contains("$1") && groupCount >= 1 && matcher.group(1) != null) { - name = nameReplacement.replaceFirst("\\$1", Matcher.quoteReplacement(matcher.group(1))); - } else { - name = nameReplacement; + if (!matcher.find()) { + return null; } - } else if (groupCount >= 1) { - name = matcher.group(1); - } - if (v1Replacement != null) { - major = v1Replacement; - } else if (groupCount >= 2) { - major = matcher.group(2); - } + int groupCount = matcher.groupCount(); - if (v2Replacement != null) { - minor = v2Replacement; - } else if (groupCount >= 3) { - minor = matcher.group(3); - } + if (nameReplacement != null) { + if (nameReplacement.contains("$1") && groupCount >= 1 && matcher.group(1) != null) { + name = nameReplacement.replaceFirst("\\$1", Matcher.quoteReplacement(matcher.group(1))); + } else { + name = nameReplacement; + } + } else if (groupCount >= 1) { + name = matcher.group(1); + } - if (v3Replacement != null) { - patch = v3Replacement; - } else if (groupCount >= 4) { - patch = matcher.group(4); - } + if (v1Replacement != null) { + major = v1Replacement; + } else if (groupCount >= 2) { + major = matcher.group(2); + } - if (v4Replacement != null) { - build = v4Replacement; - } else if (groupCount >= 5) { - build = matcher.group(5); - } + if (v2Replacement != null) { + minor = v2Replacement; + } else if (groupCount >= 3) { + minor = matcher.group(3); + } - return name == null ? null : new VersionedName(name, major, minor, patch, build); + if (v3Replacement != null) { + patch = v3Replacement; + } else if (groupCount >= 4) { + patch = matcher.group(4); + } + + if (v4Replacement != null) { + build = v4Replacement; + } else if (groupCount >= 5) { + build = matcher.group(5); + } + + return name == null ? null : new VersionedName(name, major, minor, patch, build); } - } + } } diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentProcessor.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentProcessor.java index 6cb93027c1d..0625f1f8fd1 100644 --- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentProcessor.java +++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentProcessor.java @@ -67,8 +67,16 @@ public class UserAgentProcessor extends AbstractProcessor { private final boolean ignoreMissing; private final boolean useECS; - public UserAgentProcessor(String tag, String description, String field, String targetField, UserAgentParser parser, - Set properties, boolean ignoreMissing, boolean useECS) { + public UserAgentProcessor( + String tag, + String description, + String field, + String targetField, + UserAgentParser parser, + Set properties, + boolean ignoreMissing, + boolean useECS + ) { super(tag, description); this.field = field; this.targetField = targetField; @@ -297,8 +305,12 @@ public class UserAgentProcessor extends AbstractProcessor { } @Override - public UserAgentProcessor create(Map factories, String processorTag, - String description, Map config) throws Exception { + public UserAgentProcessor create( + Map factories, + String processorTag, + String description, + Map config + ) throws Exception { String field = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "user_agent"); String regexFilename = readStringProperty(TYPE, processorTag, config, "regex_file", IngestUserAgentPlugin.DEFAULT_PARSER_NAME); @@ -308,8 +320,12 @@ public class UserAgentProcessor extends AbstractProcessor { UserAgentParser parser = userAgentParsers.get(regexFilename); if (parser == null) { - throw newConfigurationException(TYPE, processorTag, - "regex_file", "regex file [" + regexFilename + "] doesn't exist (has to exist at node startup)"); + throw newConfigurationException( + TYPE, + processorTag, + "regex_file", + "regex file [" + regexFilename + "] doesn't exist (has to exist at node startup)" + ); } final Set properties; @@ -327,9 +343,11 @@ public class UserAgentProcessor extends AbstractProcessor { } if (useECS == false) { - deprecationLogger.deprecate("ecs_false_non_common_schema", - "setting [ecs] to false for non-common schema " + - "format is deprecated and will be removed in 8.0, set to true or remove to use the non-deprecated format"); + deprecationLogger.deprecate( + "ecs_false_non_common_schema", + "setting [ecs] to false for non-common schema " + + "format is deprecated and will be removed in 8.0, set to true or remove to use the non-deprecated format" + ); } return new UserAgentProcessor(processorTag, description, field, targetField, parser, properties, ignoreMissing, useECS); @@ -340,16 +358,23 @@ public class UserAgentProcessor extends AbstractProcessor { NAME, // Deprecated in 6.7 (superceded by VERSION), to be removed in 8.0 - @Deprecated MAJOR, - @Deprecated MINOR, - @Deprecated PATCH, + @Deprecated + MAJOR, + @Deprecated + MINOR, + @Deprecated + PATCH, OS, // Deprecated in 6.7 (superceded by just using OS), to be removed in 8.0 - @Deprecated OS_NAME, - @Deprecated OS_MAJOR, - @Deprecated OS_MINOR, + @Deprecated + OS_NAME, + @Deprecated + OS_MAJOR, + @Deprecated + OS_MINOR, DEVICE, - @Deprecated BUILD, // Same deprecated as OS_* above + @Deprecated + BUILD, // Same deprecated as OS_* above ORIGINAL, VERSION; @@ -370,14 +395,16 @@ public class UserAgentProcessor extends AbstractProcessor { Property value = valueOf(propertyName.toUpperCase(Locale.ROOT)); if (DEPRECATED_PROPERTIES.contains(value)) { final String key = "user_agent_processor_property_" + propertyName.replaceAll("[^\\w_]+", "_"); - deprecationLogger.deprecate(key, - "the [{}] property is deprecated for the user-agent processor", propertyName); + deprecationLogger.deprecate(key, "the [{}] property is deprecated for the user-agent processor", propertyName); } return value; - } - catch (IllegalArgumentException e) { - throw new IllegalArgumentException("illegal property value [" + propertyName + "]. valid values are " + - Arrays.toString(EnumSet.allOf(Property.class).toArray())); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException( + "illegal property value [" + + propertyName + + "]. valid values are " + + Arrays.toString(EnumSet.allOf(Property.class).toArray()) + ); } } } diff --git a/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorFactoryTests.java b/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorFactoryTests.java index cf47818305e..72815a37f46 100644 --- a/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorFactoryTests.java +++ b/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorFactoryTests.java @@ -74,9 +74,12 @@ public class UserAgentProcessorFactoryTests extends OpenSearchTestCase { Files.createDirectories(userAgentConfigDir); // Copy file, leaving out the device parsers at the end - try (BufferedReader reader = new BufferedReader( - new InputStreamReader(UserAgentProcessor.class.getResourceAsStream("/regexes.yml"), StandardCharsets.UTF_8)); - BufferedWriter writer = Files.newBufferedWriter(userAgentConfigDir.resolve(regexWithoutDevicesFilename));) { + try ( + BufferedReader reader = new BufferedReader( + new InputStreamReader(UserAgentProcessor.class.getResourceAsStream("/regexes.yml"), StandardCharsets.UTF_8) + ); + BufferedWriter writer = Files.newBufferedWriter(userAgentConfigDir.resolve(regexWithoutDevicesFilename)); + ) { String line; while ((line = reader.readLine()) != null) { if (line.startsWith("device_parsers:")) { @@ -212,8 +215,13 @@ public class UserAgentProcessorFactoryTests extends OpenSearchTestCase { config.put("properties", Collections.singletonList("invalid")); OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> factory.create(null, null, null, config)); - assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [NAME, MAJOR, MINOR, " - + "PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD, ORIGINAL, VERSION]")); + assertThat( + e.getMessage(), + equalTo( + "[properties] illegal property value [invalid]. valid values are [NAME, MAJOR, MINOR, " + + "PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD, ORIGINAL, VERSION]" + ) + ); } public void testInvalidPropertiesType() throws Exception { diff --git a/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorTests.java b/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorTests.java index 95fa9e70bee..51ff8aae213 100644 --- a/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorTests.java +++ b/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/UserAgentProcessorTests.java @@ -60,23 +60,49 @@ public class UserAgentProcessorTests extends OpenSearchTestCase { UserAgentParser parser = new UserAgentParser(randomAlphaOfLength(10), regexStream, new UserAgentCache(1000)); - processor = new UserAgentProcessor(randomAlphaOfLength(10), null, "source_field", "target_field", parser, - EnumSet.allOf(UserAgentProcessor.Property.class), false, true); + processor = new UserAgentProcessor( + randomAlphaOfLength(10), + null, + "source_field", + "target_field", + parser, + EnumSet.allOf(UserAgentProcessor.Property.class), + false, + true + ); } public void testNullValueWithIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), null, "source_field", "target_field", null, - EnumSet.allOf(UserAgentProcessor.Property.class), true, true); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap("source_field", null)); + UserAgentProcessor processor = new UserAgentProcessor( + randomAlphaOfLength(10), + null, + "source_field", + "target_field", + null, + EnumSet.allOf(UserAgentProcessor.Property.class), + true, + true + ); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap("source_field", null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } public void testNonExistentWithIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), null, "source_field", "target_field", null, - EnumSet.allOf(UserAgentProcessor.Property.class), true, true); + UserAgentProcessor processor = new UserAgentProcessor( + randomAlphaOfLength(10), + null, + "source_field", + "target_field", + null, + EnumSet.allOf(UserAgentProcessor.Property.class), + true, + true + ); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); @@ -84,18 +110,36 @@ public class UserAgentProcessorTests extends OpenSearchTestCase { } public void testNullWithoutIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), null, "source_field", "target_field", null, - EnumSet.allOf(UserAgentProcessor.Property.class), false, true); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), - Collections.singletonMap("source_field", null)); + UserAgentProcessor processor = new UserAgentProcessor( + randomAlphaOfLength(10), + null, + "source_field", + "target_field", + null, + EnumSet.allOf(UserAgentProcessor.Property.class), + false, + true + ); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument( + random(), + Collections.singletonMap("source_field", null) + ); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [source_field] is null, cannot parse user-agent.")); } public void testNonExistentWithoutIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), null, "source_field", "target_field", null, - EnumSet.allOf(UserAgentProcessor.Property.class), false, true); + UserAgentProcessor processor = new UserAgentProcessor( + randomAlphaOfLength(10), + null, + "source_field", + "target_field", + null, + EnumSet.allOf(UserAgentProcessor.Property.class), + false, + true + ); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); @@ -105,8 +149,10 @@ public class UserAgentProcessorTests extends OpenSearchTestCase { @SuppressWarnings("unchecked") public void testCommonBrowser() throws Exception { Map document = new HashMap<>(); - document.put("source_field", - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"); + document.put( + "source_field", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" + ); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -131,9 +177,11 @@ public class UserAgentProcessorTests extends OpenSearchTestCase { @SuppressWarnings("unchecked") public void testUncommonDevice() throws Exception { Map document = new HashMap<>(); - document.put("source_field", - "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/525.10+ " - + "(KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2"); + document.put( + "source_field", + "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/525.10+ " + + "(KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2" + ); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -159,8 +207,7 @@ public class UserAgentProcessorTests extends OpenSearchTestCase { @SuppressWarnings("unchecked") public void testSpider() throws Exception { Map document = new HashMap<>(); - document.put("source_field", - "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"); + document.put("source_field", "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -182,8 +229,7 @@ public class UserAgentProcessorTests extends OpenSearchTestCase { @SuppressWarnings("unchecked") public void testUnknown() throws Exception { Map document = new HashMap<>(); - document.put("source_field", - "Something I made up v42.0.1"); + document.put("source_field", "Something I made up v42.0.1"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 808024ec95c..453787fe329 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -92,8 +92,8 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.matchAllQuery()) - .addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC).unmappedType("long")) - .addScriptField("foo", new Script(ScriptType.INLINE, "expression", script, paramsMap)); + .addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC).unmappedType("long")) + .addScriptField("foo", new Script(ScriptType.INLINE, "expression", script, paramsMap)); return req; } @@ -128,12 +128,15 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { public void testScore() throws Exception { createIndex("test"); ensureGreen("test"); - indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), - client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), - client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye")); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), + client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), + client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye") + ); ScriptScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction( - new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); + new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()) + ); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent @@ -147,8 +150,7 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); - score = ScoreFunctionBuilders.scriptFunction( - new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); + score = ScoreFunctionBuilders.scriptFunction(new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); req.addAggregation(AggregationBuilders.max("max_score").script((score).getScript())); req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent rsp = req.get(); @@ -158,11 +160,11 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { public void testDateMethods() throws Exception { OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "date0", "type=date", "date1", "type=date")); ensureGreen("test"); - indexRandom(true, - client().prepareIndex("test", "doc", "1") - .setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), - client().prepareIndex("test", "doc", "2") - .setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), + client().prepareIndex("test", "doc", "2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") + ); SearchResponse rsp = buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()").get(); assertEquals(2, rsp.getHits().getTotalHits().value); SearchHits hits = rsp.getHits(); @@ -188,11 +190,11 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { public void testDateObjectMethods() throws Exception { OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "date0", "type=date", "date1", "type=date")); ensureGreen("test"); - indexRandom(true, - client().prepareIndex("test", "doc", "1") - .setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), - client().prepareIndex("test", "doc", "2") - .setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("id", 1, "date0", "2015-04-28T04:02:07Z", "date1", "1985-09-01T23:11:01Z"), + client().prepareIndex("test", "doc", "2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") + ); SearchResponse rsp = buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour").get(); assertEquals(2, rsp.getHits().getTotalHits().value); SearchHits hits = rsp.getHits(); @@ -216,16 +218,15 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { } public void testMultiValueMethods() throws Exception { - OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", - "double0", "type=double", - "double1", "type=double", - "double2", "type=double")); + OpenSearchAssertions.assertAcked( + prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double", "double2", "type=double") + ); ensureGreen("test"); Map doc1 = new HashMap<>(); doc1.put("id", 1); - doc1.put("double0", new Double[]{5.0d, 1.0d, 1.5d}); - doc1.put("double1", new Double[]{1.2d, 2.4d}); + doc1.put("double0", new Double[] { 5.0d, 1.0d, 1.5d }); + doc1.put("double1", new Double[] { 1.2d, 2.4d }); doc1.put("double2", 3.0d); Map doc2 = new HashMap<>(); @@ -235,14 +236,15 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { Map doc3 = new HashMap<>(); doc3.put("id", 3); - doc3.put("double0", new Double[]{5.0d, 1.0d, 1.5d, -1.5d}); + doc3.put("double0", new Double[] { 5.0d, 1.0d, 1.5d, -1.5d }); doc3.put("double1", 4.0d); - indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource(doc1), - client().prepareIndex("test", "doc", "2").setSource(doc2), - client().prepareIndex("test", "doc", "3").setSource(doc3)); - + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource(doc1), + client().prepareIndex("test", "doc", "2").setSource(doc2), + client().prepareIndex("test", "doc", "3").setSource(doc3) + ); SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get(); assertSearchResponse(rsp); @@ -327,19 +329,27 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("doc['double'].getYear()").get(); fail(); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained IllegalArgumentException", - e.toString().contains("IllegalArgumentException"), equalTo(true)); - assertThat(e.toString() + "should have contained does not exist for numeric field", - e.toString().contains("does not exist for numeric field"), equalTo(true)); + assertThat( + e.toString() + "should have contained IllegalArgumentException", + e.toString().contains("IllegalArgumentException"), + equalTo(true) + ); + assertThat( + e.toString() + "should have contained does not exist for numeric field", + e.toString().contains("does not exist for numeric field"), + equalTo(true) + ); } } public void testSparseField() throws Exception { OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "x", "type=long", "y", "type=long")); ensureGreen("test"); - indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 4), - client().prepareIndex("test", "doc","2").setSource("id", 2, "y", 2)); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 4), + client().prepareIndex("test", "doc", "2").setSource("id", 2, "y", 2) + ); SearchResponse rsp = buildRequest("doc['x'] + 1").get(); OpenSearchAssertions.assertSearchResponse(rsp); SearchHits hits = rsp.getHits(); @@ -356,20 +366,24 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("doc['bogus']").get(); fail("Expected missing field to cause failure"); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained ScriptException", - e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained missing field error", - e.toString().contains("does not exist in mappings"), equalTo(true)); + assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); + assertThat( + e.toString() + "should have contained missing field error", + e.toString().contains("does not exist in mappings"), + equalTo(true) + ); } } public void testParams() throws Exception { createIndex("test"); ensureGreen("test"); - indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 10), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "x", 3), - client().prepareIndex("test", "doc", "3").setSource("id", 3, "x", 5)); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("id", 1, "x", 10), + client().prepareIndex("test", "doc", "2").setSource("id", 2, "x", 3), + client().prepareIndex("test", "doc", "3").setSource("id", 3, "x", 5) + ); // a = int, b = double, c = long String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; SearchResponse rsp = buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L).get(); @@ -386,10 +400,8 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("garbage%@#%@").get(); fail("Expected expression compilation failure"); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained ScriptException", - e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained compilation failure", - e.toString().contains("compile error"), equalTo(true)); + assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); + assertThat(e.toString() + "should have contained compilation failure", e.toString().contains("compile error"), equalTo(true)); } } @@ -399,10 +411,12 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("a", "a", "astring").get(); fail("Expected string parameter to cause failure"); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained ScriptException", - e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained non-numeric parameter error", - e.toString().contains("must be a numeric type"), equalTo(true)); + assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); + assertThat( + e.toString() + "should have contained non-numeric parameter error", + e.toString().contains("must be a numeric type"), + equalTo(true) + ); } } @@ -412,10 +426,12 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("doc['text.keyword']").get(); fail("Expected text field to cause execution failure"); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained ScriptException", - e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained non-numeric field error", - e.toString().contains("must be numeric"), equalTo(true)); + assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); + assertThat( + e.toString() + "should have contained non-numeric field error", + e.toString().contains("must be numeric"), + equalTo(true) + ); } } @@ -425,10 +441,12 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("bogus").get(); fail("Expected bogus variable to cause execution failure"); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained ScriptException", - e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained unknown variable error", - e.toString().contains("Unknown variable"), equalTo(true)); + assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); + assertThat( + e.toString() + "should have contained unknown variable error", + e.toString().contains("Unknown variable"), + equalTo(true) + ); } } @@ -438,10 +456,12 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("doc").get(); fail("Expected doc variable without field to cause execution failure"); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained ScriptException", - e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained a missing specific field error", - e.toString().contains("must be used with a specific field"), equalTo(true)); + assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); + assertThat( + e.toString() + "should have contained a missing specific field error", + e.toString().contains("must be used with a specific field"), + equalTo(true) + ); } } @@ -451,10 +471,12 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { buildRequest("doc['foo'].bogus").get(); fail("Expected bogus field member to cause execution failure"); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString() + "should have contained ScriptException", - e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained member variable [bogus] does not exist", - e.toString().contains("Member variable [bogus] does not exist"), equalTo(true)); + assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); + assertThat( + e.toString() + "should have contained member variable [bogus] does not exist", + e.toString().contains("Member variable [bogus] does not exist"), + equalTo(true) + ); } } @@ -462,26 +484,30 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { // i.e. _value for aggregations createIndex("test"); ensureGreen("test"); - indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource("x", 5, "y", 1.2), - client().prepareIndex("test", "doc", "2").setSource("x", 10, "y", 1.4), - client().prepareIndex("test", "doc", "3").setSource("x", 13, "y", 1.8)); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("x", 5, "y", 1.2), + client().prepareIndex("test", "doc", "2").setSource("x", 10, "y", 1.4), + client().prepareIndex("test", "doc", "3").setSource("x", 13, "y", 1.8) + ); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.matchAllQuery()) - .addAggregation( - AggregationBuilders.stats("int_agg").field("x") - .script(new Script(ScriptType.INLINE, - ExpressionScriptEngine.NAME, "_value * 3", Collections.emptyMap()))) - .addAggregation( - AggregationBuilders.stats("double_agg").field("y") - .script(new Script(ScriptType.INLINE, - ExpressionScriptEngine.NAME, "_value - 1.1", Collections.emptyMap()))) - .addAggregation( - AggregationBuilders.stats("const_agg").field("x") // specifically to test a script w/o _value - .script(new Script(ScriptType.INLINE, - ExpressionScriptEngine.NAME, "3.0", Collections.emptyMap())) - ); + .addAggregation( + AggregationBuilders.stats("int_agg") + .field("x") + .script(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value * 3", Collections.emptyMap())) + ) + .addAggregation( + AggregationBuilders.stats("double_agg") + .field("y") + .script(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value - 1.1", Collections.emptyMap())) + ) + .addAggregation( + AggregationBuilders.stats("const_agg") + .field("x") // specifically to test a script w/o _value + .script(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "3.0", Collections.emptyMap())) + ); SearchResponse rsp = req.get(); assertEquals(3, rsp.getHits().getTotalHits().value); @@ -502,20 +528,22 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { public void testStringSpecialValueVariable() throws Exception { // i.e. expression script for term aggregations, which is not allowed - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("doc", "text", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", "text", "type=keyword").get()); ensureGreen("test"); - indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource("text", "hello"), - client().prepareIndex("test", "doc", "2").setSource("text", "goodbye"), - client().prepareIndex("test", "doc", "3").setSource("text", "hello")); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("text", "hello"), + client().prepareIndex("test", "doc", "2").setSource("text", "goodbye"), + client().prepareIndex("test", "doc", "3").setSource("text", "hello") + ); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.matchAllQuery()) - .addAggregation( - AggregationBuilders.terms("term_agg").field("text") - .script( - new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value", Collections.emptyMap()))); + .addAggregation( + AggregationBuilders.terms("term_agg") + .field("text") + .script(new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value", Collections.emptyMap())) + ); String message; try { @@ -527,10 +555,8 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { } catch (SearchPhaseExecutionException e) { message = e.toString(); } - assertThat(message + "should have contained ScriptException", - message.contains("ScriptException"), equalTo(true)); - assertThat(message + "should have contained text variable error", - message.contains("text variable"), equalTo(true)); + assertThat(message + "should have contained ScriptException", message.contains("ScriptException"), equalTo(true)); + assertThat(message + "should have contained text variable error", message.contains("text variable"), equalTo(true)); } // test to make sure expressions are not allowed to be used as update scripts @@ -557,26 +583,38 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { public void testPipelineAggregationScript() throws Exception { createIndex("agg_index"); ensureGreen("agg_index"); - indexRandom(true, - client().prepareIndex("agg_index", "doc", "1").setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "2").setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "3").setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0), - client().prepareIndex("agg_index", "doc", "5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0)); - SearchResponse response = client() - .prepareSearch("agg_index") - .addAggregation( - histogram("histogram") - .field("one") - .interval(2) - .subAggregation(sum("twoSum").field("two")) - .subAggregation(sum("threeSum").field("three")) - .subAggregation(sum("fourSum").field("four")) - .subAggregation(bucketScript("totalSum", - new Script(ScriptType.INLINE, - ExpressionScriptEngine.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "twoSum", "threeSum", "fourSum"))) - .execute().actionGet(); + indexRandom( + true, + client().prepareIndex("agg_index", "doc", "1").setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index", "doc", "2").setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index", "doc", "3").setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index", "doc", "4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0), + client().prepareIndex("agg_index", "doc", "5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0) + ); + SearchResponse response = client().prepareSearch("agg_index") + .addAggregation( + histogram("histogram").field("one") + .interval(2) + .subAggregation(sum("twoSum").field("two")) + .subAggregation(sum("threeSum").field("three")) + .subAggregation(sum("fourSum").field("four")) + .subAggregation( + bucketScript( + "totalSum", + new Script( + ScriptType.INLINE, + ExpressionScriptEngine.NAME, + "_value0 + _value1 + _value2", + Collections.emptyMap() + ), + "twoSum", + "threeSum", + "fourSum" + ) + ) + ) + .execute() + .actionGet(); Histogram histogram = response.getAggregations().get("histogram"); assertThat(histogram, notNullValue()); @@ -602,15 +640,27 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { } public void testGeo() throws Exception { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location").field("type", "geo_point"); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("location") + .field("type", "geo_point"); xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() - .field("name", "test") - .startObject("location").field("lat", 61.5240).field("lon", 105.3188).endObject() - .endObject()).execute().actionGet(); + client().prepareIndex("test", "type1", "1") + .setSource( + jsonBuilder().startObject() + .field("name", "test") + .startObject("location") + .field("lat", 61.5240) + .field("lon", 105.3188) + .endObject() + .endObject() + ) + .execute() + .actionGet(); refresh(); // access .lat SearchResponse rsp = buildRequest("doc['location'].lat").get(); @@ -635,15 +685,21 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { } public void testBoolean() throws Exception { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("doc") - .startObject("properties").startObject("vip").field("type", "boolean"); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() + .startObject() + .startObject("doc") + .startObject("properties") + .startObject("vip") + .field("type", "boolean"); xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").addMapping("doc", xContentBuilder)); ensureGreen(); - indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource("id", 1, "price", 1.0, "vip", true), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "price", 2.0, "vip", false), - client().prepareIndex("test", "doc", "3").setSource("id", 3, "price", 2.0, "vip", false)); + indexRandom( + true, + client().prepareIndex("test", "doc", "1").setSource("id", 1, "price", 1.0, "vip", true), + client().prepareIndex("test", "doc", "2").setSource("id", 2, "price", 2.0, "vip", false), + client().prepareIndex("test", "doc", "3").setSource("id", 3, "price", 2.0, "vip", false) + ); // access .value SearchResponse rsp = buildRequest("doc['vip'].value").get(); assertSearchResponse(rsp); @@ -671,9 +727,11 @@ public class MoreExpressionIT extends OpenSearchIntegTestCase { public void testFilterScript() throws Exception { createIndex("test"); ensureGreen("test"); - indexRandom(true, + indexRandom( + true, client().prepareIndex("test", "doc", "1").setSource("id", 1, "foo", 1.0), - client().prepareIndex("test", "doc", "2").setSource("id", 2, "foo", 0.0)); + client().prepareIndex("test", "doc", "2").setSource("id", 2, "foo", 0.0) + ); SearchRequestBuilder builder = buildRequest("doc['foo'].value"); Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script))); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java index 32cd3df98c7..2a8236d5e0e 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java @@ -63,33 +63,42 @@ public class StoredExpressionIT extends OpenSearchIntegTestCase { } public void testAllOpsDisabledIndexedScripts() throws IOException { - client().admin().cluster().preparePutStoredScript() - .setId("script1") - .setContent(new BytesArray("{\"script\": {\"lang\": \"expression\", \"source\": \"2\"} }"), XContentType.JSON) - .get(); + client().admin() + .cluster() + .preparePutStoredScript() + .setId("script1") + .setContent(new BytesArray("{\"script\": {\"lang\": \"expression\", \"source\": \"2\"} }"), XContentType.JSON) + .get(); client().prepareIndex("test", "scriptTest", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON).get(); try { client().prepareUpdate("test", "scriptTest", "1") - .setScript(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())).get(); + .setScript(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) + .get(); fail("update script should have been rejected"); - } catch(Exception e) { + } catch (Exception e) { assertThat(e.getMessage(), containsString("failed to execute script")); assertThat(e.getCause().getMessage(), containsString("Failed to compile stored script [script1] using lang [expression]")); } try { client().prepareSearch() - .setSource(new SearchSourceBuilder().scriptField("test1", - new Script(ScriptType.STORED, null, "script1", Collections.emptyMap()))) - .setIndices("test").setTypes("scriptTest").get(); + .setSource( + new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) + ) + .setIndices("test") + .setTypes("scriptTest") + .get(); fail("search script should have been rejected"); - } catch(Exception e) { + } catch (Exception e) { assertThat(e.toString(), containsString("cannot execute scripts using [field] context")); } try { client().prepareSearch("test") - .setSource( - new SearchSourceBuilder().aggregation(AggregationBuilders.terms("test").script( - new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())))).get(); + .setSource( + new SearchSourceBuilder().aggregation( + AggregationBuilders.terms("test").script(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) + ) + ) + .get(); } catch (Exception e) { assertThat(e.toString(), containsString("cannot execute scripts using [aggs] context")); } diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateField.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateField.java index bf6e567c9aa..4f904ad0bd5 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateField.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateField.java @@ -46,28 +46,28 @@ final class DateField { private DateField() {} // supported variables - static final String VALUE_VARIABLE = "value"; - static final String EMPTY_VARIABLE = "empty"; - static final String LENGTH_VARIABLE = "length"; + static final String VALUE_VARIABLE = "value"; + static final String EMPTY_VARIABLE = "empty"; + static final String LENGTH_VARIABLE = "length"; // supported methods - static final String GETVALUE_METHOD = "getValue"; - static final String ISEMPTY_METHOD = "isEmpty"; - static final String SIZE_METHOD = "size"; - static final String MINIMUM_METHOD = "min"; - static final String MAXIMUM_METHOD = "max"; - static final String AVERAGE_METHOD = "avg"; - static final String MEDIAN_METHOD = "median"; - static final String SUM_METHOD = "sum"; - static final String COUNT_METHOD = "count"; + static final String GETVALUE_METHOD = "getValue"; + static final String ISEMPTY_METHOD = "isEmpty"; + static final String SIZE_METHOD = "size"; + static final String MINIMUM_METHOD = "min"; + static final String MAXIMUM_METHOD = "max"; + static final String AVERAGE_METHOD = "avg"; + static final String MEDIAN_METHOD = "median"; + static final String SUM_METHOD = "sum"; + static final String COUNT_METHOD = "count"; // date-specific - static final String GET_YEAR_METHOD = "getYear"; - static final String GET_MONTH_METHOD = "getMonth"; + static final String GET_YEAR_METHOD = "getYear"; + static final String GET_MONTH_METHOD = "getMonth"; static final String GET_DAY_OF_MONTH_METHOD = "getDayOfMonth"; - static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay"; - static final String GET_MINUTES_METHOD = "getMinutes"; - static final String GET_SECONDS_METHOD = "getSeconds"; + static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay"; + static final String GET_MINUTES_METHOD = "getMinutes"; + static final String GET_SECONDS_METHOD = "getSeconds"; static DoubleValuesSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateMethodValueSource.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateMethodValueSource.java index 84c144e8a6f..6aa7e640c77 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateMethodValueSource.java @@ -68,7 +68,7 @@ class DateMethodValueSource extends FieldDataValueSource { return new DoubleValues() { @Override public double doubleValue() throws IOException { - calendar.setTimeInMillis((long)docValues.doubleValue()); + calendar.setTimeInMillis((long) docValues.doubleValue()); return calendar.get(calendarType); } diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObject.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObject.java index 6b7030b9519..502e29a5f2b 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObject.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObject.java @@ -45,44 +45,44 @@ final class DateObject { private DateObject() {} // supported variables - static final String CENTURY_OF_ERA_VARIABLE = "centuryOfEra"; - static final String DAY_OF_MONTH_VARIABLE = "dayOfMonth"; - static final String DAY_OF_WEEK_VARIABLE = "dayOfWeek"; - static final String DAY_OF_YEAR_VARIABLE = "dayOfYear"; - static final String ERA_VARIABLE = "era"; - static final String HOUR_OF_DAY_VARIABLE = "hourOfDay"; - static final String MILLIS_OF_DAY_VARIABLE = "millisOfDay"; - static final String MILLIS_OF_SECOND_VARIABLE = "millisOfSecond"; - static final String MINUTE_OF_DAY_VARIABLE = "minuteOfDay"; - static final String MINUTE_OF_HOUR_VARIABLE = "minuteOfHour"; - static final String MONTH_OF_YEAR_VARIABLE = "monthOfYear"; - static final String SECOND_OF_DAY_VARIABLE = "secondOfDay"; - static final String SECOND_OF_MINUTE_VARIABLE = "secondOfMinute"; - static final String WEEK_OF_WEEK_YEAR_VARIABLE = "weekOfWeekyear"; - static final String WEEK_YEAR_VARIABLE = "weekyear"; - static final String YEAR_VARIABLE = "year"; - static final String YEAR_OF_CENTURY_VARIABLE = "yearOfCentury"; - static final String YEAR_OF_ERA_VARIABLE = "yearOfEra"; + static final String CENTURY_OF_ERA_VARIABLE = "centuryOfEra"; + static final String DAY_OF_MONTH_VARIABLE = "dayOfMonth"; + static final String DAY_OF_WEEK_VARIABLE = "dayOfWeek"; + static final String DAY_OF_YEAR_VARIABLE = "dayOfYear"; + static final String ERA_VARIABLE = "era"; + static final String HOUR_OF_DAY_VARIABLE = "hourOfDay"; + static final String MILLIS_OF_DAY_VARIABLE = "millisOfDay"; + static final String MILLIS_OF_SECOND_VARIABLE = "millisOfSecond"; + static final String MINUTE_OF_DAY_VARIABLE = "minuteOfDay"; + static final String MINUTE_OF_HOUR_VARIABLE = "minuteOfHour"; + static final String MONTH_OF_YEAR_VARIABLE = "monthOfYear"; + static final String SECOND_OF_DAY_VARIABLE = "secondOfDay"; + static final String SECOND_OF_MINUTE_VARIABLE = "secondOfMinute"; + static final String WEEK_OF_WEEK_YEAR_VARIABLE = "weekOfWeekyear"; + static final String WEEK_YEAR_VARIABLE = "weekyear"; + static final String YEAR_VARIABLE = "year"; + static final String YEAR_OF_CENTURY_VARIABLE = "yearOfCentury"; + static final String YEAR_OF_ERA_VARIABLE = "yearOfEra"; // supported methods - static final String GETCENTURY_OF_ERA_METHOD = "getCenturyOfEra"; - static final String GETDAY_OF_MONTH_METHOD = "getDayOfMonth"; - static final String GETDAY_OF_WEEK_METHOD = "getDayOfWeek"; - static final String GETDAY_OF_YEAR_METHOD = "getDayOfYear"; - static final String GETERA_METHOD = "getEra"; - static final String GETHOUR_OF_DAY_METHOD = "getHourOfDay"; - static final String GETMILLIS_OF_DAY_METHOD = "getMillisOfDay"; - static final String GETMILLIS_OF_SECOND_METHOD = "getMillisOfSecond"; - static final String GETMINUTE_OF_DAY_METHOD = "getMinuteOfDay"; - static final String GETMINUTE_OF_HOUR_METHOD = "getMinuteOfHour"; - static final String GETMONTH_OF_YEAR_METHOD = "getMonthOfYear"; - static final String GETSECOND_OF_DAY_METHOD = "getSecondOfDay"; - static final String GETSECOND_OF_MINUTE_METHOD = "getSecondOfMinute"; - static final String GETWEEK_OF_WEEK_YEAR_METHOD = "getWeekOfWeekyear"; - static final String GETWEEK_YEAR_METHOD = "getWeekyear"; - static final String GETYEAR_METHOD = "getYear"; - static final String GETYEAR_OF_CENTURY_METHOD = "getYearOfCentury"; - static final String GETYEAR_OF_ERA_METHOD = "getYearOfEra"; + static final String GETCENTURY_OF_ERA_METHOD = "getCenturyOfEra"; + static final String GETDAY_OF_MONTH_METHOD = "getDayOfMonth"; + static final String GETDAY_OF_WEEK_METHOD = "getDayOfWeek"; + static final String GETDAY_OF_YEAR_METHOD = "getDayOfYear"; + static final String GETERA_METHOD = "getEra"; + static final String GETHOUR_OF_DAY_METHOD = "getHourOfDay"; + static final String GETMILLIS_OF_DAY_METHOD = "getMillisOfDay"; + static final String GETMILLIS_OF_SECOND_METHOD = "getMillisOfSecond"; + static final String GETMINUTE_OF_DAY_METHOD = "getMinuteOfDay"; + static final String GETMINUTE_OF_HOUR_METHOD = "getMinuteOfHour"; + static final String GETMONTH_OF_YEAR_METHOD = "getMonthOfYear"; + static final String GETSECOND_OF_DAY_METHOD = "getSecondOfDay"; + static final String GETSECOND_OF_MINUTE_METHOD = "getSecondOfMinute"; + static final String GETWEEK_OF_WEEK_YEAR_METHOD = "getWeekOfWeekyear"; + static final String GETWEEK_YEAR_METHOD = "getWeekyear"; + static final String GETYEAR_METHOD = "getYear"; + static final String GETYEAR_OF_CENTURY_METHOD = "getYearOfCentury"; + static final String GETYEAR_OF_ERA_METHOD = "getYearOfEra"; static DoubleValuesSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { @@ -123,8 +123,9 @@ final class DateObject { case YEAR_OF_ERA_VARIABLE: return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ReadableDateTime::getYearOfEra); default: - throw new IllegalArgumentException("Member variable [" + variable + - "] does not exist for date object on field [" + fieldName + "]."); + throw new IllegalArgumentException( + "Member variable [" + variable + "] does not exist for date object on field [" + fieldName + "]." + ); } } @@ -167,8 +168,9 @@ final class DateObject { case GETYEAR_OF_ERA_METHOD: return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ReadableDateTime::getYearOfEra); default: - throw new IllegalArgumentException("Member method [" + method + - "] does not exist for date object on field [" + fieldName + "]."); + throw new IllegalArgumentException( + "Member method [" + method + "] does not exist for date object on field [" + fieldName + "]." + ); } } } diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObjectValueSource.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObjectValueSource.java index a7365593966..1ab778008e8 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObjectValueSource.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/DateObjectValueSource.java @@ -52,8 +52,12 @@ class DateObjectValueSource extends FieldDataValueSource { final String methodName; final ToIntFunction function; - DateObjectValueSource(IndexFieldData indexFieldData, MultiValueMode multiValueMode, - String methodName, ToIntFunction function) { + DateObjectValueSource( + IndexFieldData indexFieldData, + MultiValueMode multiValueMode, + String methodName, + ToIntFunction function + ) { super(indexFieldData, multiValueMode); Objects.requireNonNull(methodName); @@ -70,7 +74,7 @@ class DateObjectValueSource extends FieldDataValueSource { return new DoubleValues() { @Override public double doubleValue() throws IOException { - joda.setMillis((long)docValues.doubleValue()); + joda.setMillis((long) docValues.doubleValue()); return function.applyAsInt(joda); } diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java index 0b6d64a9ac6..ba131473be4 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionAggregationScript.java @@ -106,7 +106,7 @@ class ExpressionAggregationScript implements AggregationScript.LeafFactory { // _value isn't used in script if specialValue == null if (specialValue != null) { if (value instanceof Number) { - specialValue.setValue(((Number)value).doubleValue()); + specialValue.setValue(((Number) value).doubleValue()); } else { throw new GeneralScriptException("Cannot use expression with text variable using " + exprScript); } diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java index adced0fcff7..a6fcd7a1978 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java @@ -80,94 +80,90 @@ public class ExpressionScriptEngine implements ScriptEngine { public static final String NAME = "expression"; - private static Map, Function> contexts; + private static Map, Function> contexts; static { - Map, Function> contexts = new HashMap, Function>(); - contexts.put(BucketAggregationScript.CONTEXT, - ExpressionScriptEngine::newBucketAggregationScriptFactory); + Map, Function> contexts = new HashMap, Function>(); + contexts.put(BucketAggregationScript.CONTEXT, ExpressionScriptEngine::newBucketAggregationScriptFactory); - contexts.put(BucketAggregationSelectorScript.CONTEXT, - (Expression expr) -> { - BucketAggregationScript.Factory factory = newBucketAggregationScriptFactory(expr); - BucketAggregationSelectorScript.Factory wrappedFactory = parameters -> new BucketAggregationSelectorScript(parameters) { - @Override - public boolean execute() { - return factory.newInstance(getParams()).execute().doubleValue() == 1.0; - } - }; - return wrappedFactory; }); - - contexts.put(FilterScript.CONTEXT, - (Expression expr) -> new FilterScript.Factory() { + contexts.put(BucketAggregationSelectorScript.CONTEXT, (Expression expr) -> { + BucketAggregationScript.Factory factory = newBucketAggregationScriptFactory(expr); + BucketAggregationSelectorScript.Factory wrappedFactory = parameters -> new BucketAggregationSelectorScript(parameters) { @Override - public boolean isResultDeterministic() { - return true; + public boolean execute() { + return factory.newInstance(getParams()).execute().doubleValue() == 1.0; } + }; + return wrappedFactory; + }); - @Override - public FilterScript.LeafFactory newFactory(Map params, SearchLookup lookup) { - return newFilterScript(expr, lookup, params); - } - }); + contexts.put(FilterScript.CONTEXT, (Expression expr) -> new FilterScript.Factory() { + @Override + public boolean isResultDeterministic() { + return true; + } - contexts.put(ScoreScript.CONTEXT, - (Expression expr) -> new ScoreScript.Factory() { - @Override - public ScoreScript.LeafFactory newFactory(Map params, SearchLookup lookup) { - return newScoreScript(expr, lookup, params); - } + @Override + public FilterScript.LeafFactory newFactory(Map params, SearchLookup lookup) { + return newFilterScript(expr, lookup, params); + } + }); - @Override - public boolean isResultDeterministic() { - return true; - } - }); + contexts.put(ScoreScript.CONTEXT, (Expression expr) -> new ScoreScript.Factory() { + @Override + public ScoreScript.LeafFactory newFactory(Map params, SearchLookup lookup) { + return newScoreScript(expr, lookup, params); + } - contexts.put(TermsSetQueryScript.CONTEXT, - (Expression expr) -> (TermsSetQueryScript.Factory) (p, lookup) -> newTermsSetQueryScript(expr, lookup, p)); + @Override + public boolean isResultDeterministic() { + return true; + } + }); - contexts.put(AggregationScript.CONTEXT, - (Expression expr) -> new AggregationScript.Factory() { - @Override - public AggregationScript.LeafFactory newFactory(Map params, SearchLookup lookup) { - return newAggregationScript(expr, lookup, params); - } + contexts.put( + TermsSetQueryScript.CONTEXT, + (Expression expr) -> (TermsSetQueryScript.Factory) (p, lookup) -> newTermsSetQueryScript(expr, lookup, p) + ); - @Override - public boolean isResultDeterministic() { - return true; - } - }); + contexts.put(AggregationScript.CONTEXT, (Expression expr) -> new AggregationScript.Factory() { + @Override + public AggregationScript.LeafFactory newFactory(Map params, SearchLookup lookup) { + return newAggregationScript(expr, lookup, params); + } - contexts.put(NumberSortScript.CONTEXT, - (Expression expr) -> new NumberSortScript.Factory() { - @Override - public NumberSortScript.LeafFactory newFactory(Map params, SearchLookup lookup) { - return newSortScript(expr, lookup, params); - } + @Override + public boolean isResultDeterministic() { + return true; + } + }); - @Override - public boolean isResultDeterministic() { - return true; - } - }); + contexts.put(NumberSortScript.CONTEXT, (Expression expr) -> new NumberSortScript.Factory() { + @Override + public NumberSortScript.LeafFactory newFactory(Map params, SearchLookup lookup) { + return newSortScript(expr, lookup, params); + } - contexts.put(FieldScript.CONTEXT, - (Expression expr) -> new FieldScript.Factory() { - @Override - public FieldScript.LeafFactory newFactory(Map params, SearchLookup lookup) { - return newFieldScript(expr, lookup, params); - } + @Override + public boolean isResultDeterministic() { + return true; + } + }); - @Override - public boolean isResultDeterministic() { - return true; - } - }); + contexts.put(FieldScript.CONTEXT, (Expression expr) -> new FieldScript.Factory() { + @Override + public FieldScript.LeafFactory newFactory(Map params, SearchLookup lookup) { + return newFieldScript(expr, lookup, params); + } - ExpressionScriptEngine.contexts = Collections.unmodifiableMap(contexts); - } + @Override + public boolean isResultDeterministic() { + return true; + } + }); + + ExpressionScriptEngine.contexts = Collections.unmodifiableMap(contexts); + } @Override public String getType() { @@ -175,12 +171,7 @@ public class ExpressionScriptEngine implements ScriptEngine { } @Override - public T compile( - String scriptName, - String scriptSource, - ScriptContext context, - Map params - ) { + public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { // classloader created here final SecurityManager sm = System.getSecurityManager(); SpecialPermission.check(); @@ -224,8 +215,7 @@ public class ExpressionScriptEngine implements ScriptEngine { private static BucketAggregationScript.Factory newBucketAggregationScriptFactory(Expression expr) { return parameters -> { - ReplaceableConstDoubleValues[] functionValuesArray = - new ReplaceableConstDoubleValues[expr.variables.length]; + ReplaceableConstDoubleValues[] functionValuesArray = new ReplaceableConstDoubleValues[expr.variables.length]; Map functionValuesMap = new HashMap<>(); for (int i = 0; i < expr.variables.length; ++i) { functionValuesArray[i] = new ReplaceableConstDoubleValues(); @@ -237,12 +227,24 @@ public class ExpressionScriptEngine implements ScriptEngine { getParams().forEach((name, value) -> { ReplaceableConstDoubleValues placeholder = functionValuesMap.get(name); if (placeholder == null) { - throw new IllegalArgumentException("Error using " + expr + ". " + - "The variable [" + name + "] does not exist in the executable expressions script."); + throw new IllegalArgumentException( + "Error using " + + expr + + ". " + + "The variable [" + + name + + "] does not exist in the executable expressions script." + ); } else if (value instanceof Number == false) { - throw new IllegalArgumentException("Error using " + expr + ". " + - "Executable expressions scripts can only process numbers." + - " The variable [" + name + "] is not a number."); + throw new IllegalArgumentException( + "Error using " + + expr + + ". " + + "Executable expressions scripts can only process numbers." + + " The variable [" + + name + + "] is not a number." + ); } else { placeholder.setValue(((Number) value).doubleValue()); } @@ -280,8 +282,11 @@ public class ExpressionScriptEngine implements ScriptEngine { return new ExpressionNumberSortScript(expr, bindings, needsScores); } - private static TermsSetQueryScript.LeafFactory newTermsSetQueryScript(Expression expr, SearchLookup lookup, - @Nullable Map vars) { + private static TermsSetQueryScript.LeafFactory newTermsSetQueryScript( + Expression expr, + SearchLookup lookup, + @Nullable Map vars + ) { // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, // instead of complicating SimpleBindings (which should stay simple) SimpleBindings bindings = new SimpleBindings(); @@ -302,8 +307,11 @@ public class ExpressionScriptEngine implements ScriptEngine { return new ExpressionTermSetQueryScript(expr, bindings); } - private static AggregationScript.LeafFactory newAggregationScript(Expression expr, SearchLookup lookup, - @Nullable Map vars) { + private static AggregationScript.LeafFactory newAggregationScript( + Expression expr, + SearchLookup lookup, + @Nullable Map vars + ) { // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, // instead of complicating SimpleBindings (which should stay simple) SimpleBindings bindings = new SimpleBindings(); @@ -367,6 +375,7 @@ public class ExpressionScriptEngine implements ScriptEngine { public boolean execute() { return script.execute(null) != 0.0; } + @Override public void setDocument(int docid) { script.setDocument(docid); @@ -518,13 +527,13 @@ public class ExpressionScriptEngine implements ScriptEngine { // TODO: document and/or error if params contains _score? // NOTE: by checking for the variable in params first, it allows masking document fields with a global constant, // but if we were to reverse it, we could provide a way to supply dynamic defaults for documents missing the field? - private static void bindFromParams(@Nullable final Map params, - final SimpleBindings bindings, final String variable) throws ParseException { + private static void bindFromParams(@Nullable final Map params, final SimpleBindings bindings, final String variable) + throws ParseException { // NOTE: by checking for the variable in vars first, it allows masking document fields with a global constant, // but if we were to reverse it, we could provide a way to supply dynamic defaults for documents missing the field? Object value = params.get(variable); if (value instanceof Number) { - bindings.add(variable, DoubleValuesSource.constant(((Number)value).doubleValue())); + bindings.add(variable, DoubleValuesSource.constant(((Number) value).doubleValue())); } else { throw new ParseException("Parameter [" + variable + "] must be a numeric type", 0); } diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/GeoField.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/GeoField.java index 1f4d52d0b8a..35be5e25bb0 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/GeoField.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/GeoField.java @@ -43,14 +43,14 @@ final class GeoField { private GeoField() {} // supported variables - static final String EMPTY_VARIABLE = "empty"; - static final String LAT_VARIABLE = "lat"; - static final String LON_VARIABLE = "lon"; + static final String EMPTY_VARIABLE = "empty"; + static final String LAT_VARIABLE = "lat"; + static final String LON_VARIABLE = "lon"; // supported methods - static final String ISEMPTY_METHOD = "isEmpty"; - static final String GETLAT_METHOD = "getLat"; - static final String GETLON_METHOD = "getLon"; + static final String ISEMPTY_METHOD = "isEmpty"; + static final String GETLAT_METHOD = "getLat"; + static final String GETLON_METHOD = "getLon"; static DoubleValuesSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/NumericField.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/NumericField.java index a103e3de521..8fdd7e8885a 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/NumericField.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/NumericField.java @@ -44,20 +44,20 @@ final class NumericField { private NumericField() {} // supported variables - static final String VALUE_VARIABLE = "value"; - static final String EMPTY_VARIABLE = "empty"; - static final String LENGTH_VARIABLE = "length"; + static final String VALUE_VARIABLE = "value"; + static final String EMPTY_VARIABLE = "empty"; + static final String LENGTH_VARIABLE = "length"; // supported methods - static final String GETVALUE_METHOD = "getValue"; - static final String ISEMPTY_METHOD = "isEmpty"; - static final String SIZE_METHOD = "size"; - static final String MINIMUM_METHOD = "min"; - static final String MAXIMUM_METHOD = "max"; - static final String AVERAGE_METHOD = "avg"; - static final String MEDIAN_METHOD = "median"; - static final String SUM_METHOD = "sum"; - static final String COUNT_METHOD = "count"; + static final String GETVALUE_METHOD = "getValue"; + static final String ISEMPTY_METHOD = "isEmpty"; + static final String SIZE_METHOD = "size"; + static final String MINIMUM_METHOD = "min"; + static final String MAXIMUM_METHOD = "max"; + static final String AVERAGE_METHOD = "avg"; + static final String MEDIAN_METHOD = "median"; + static final String SUM_METHOD = "sum"; + static final String COUNT_METHOD = "count"; static DoubleValuesSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { switch (variable) { @@ -68,8 +68,9 @@ final class NumericField { case LENGTH_VARIABLE: return new CountMethodValueSource(fieldData); default: - throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for " + - "numeric field [" + fieldName + "]."); + throw new IllegalArgumentException( + "Member variable [" + variable + "] does not exist for " + "numeric field [" + fieldName + "]." + ); } } diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ReplaceableConstDoubleValueSource.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ReplaceableConstDoubleValueSource.java index 9e3c211f57e..28e4707a071 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ReplaceableConstDoubleValueSource.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ReplaceableConstDoubleValueSource.java @@ -62,10 +62,8 @@ final class ReplaceableConstDoubleValueSource extends DoubleValuesSource { @Override public Explanation explain(LeafReaderContext ctx, int docId, Explanation scoreExplanation) throws IOException { - if (fv.advanceExact(docId)) - return Explanation.match((float)fv.doubleValue(), "ReplaceableConstDoubleValues"); - else - return Explanation.noMatch("ReplaceableConstDoubleValues"); + if (fv.advanceExact(docId)) return Explanation.match((float) fv.doubleValue(), "ReplaceableConstDoubleValues"); + else return Explanation.noMatch("ReplaceableConstDoubleValues"); } @Override diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java index faccc6daf0e..1d6c7730d7f 100644 --- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java +++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java @@ -86,16 +86,12 @@ public class ExpressionFieldScriptTests extends OpenSearchTestCase { } public void testCompileError() { - ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['field'].value * *@#)(@$*@#$ + 4"); - }); + ScriptException e = expectThrows(ScriptException.class, () -> { compile("doc['field'].value * *@#)(@$*@#$ + 4"); }); assertTrue(e.getCause() instanceof ParseException); } public void testLinkError() { - ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['nonexistent'].value * 5"); - }); + ScriptException e = expectThrows(ScriptException.class, () -> { compile("doc['nonexistent'].value * 5"); }); assertTrue(e.getCause() instanceof ParseException); } diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java index fed93f8bd0f..66967188c08 100644 --- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java +++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java @@ -80,22 +80,17 @@ public class ExpressionNumberSortScriptTests extends OpenSearchTestCase { } private NumberSortScript.LeafFactory compile(String expression) { - NumberSortScript.Factory factory = - service.compile(null, expression, NumberSortScript.CONTEXT, Collections.emptyMap()); + NumberSortScript.Factory factory = service.compile(null, expression, NumberSortScript.CONTEXT, Collections.emptyMap()); return factory.newFactory(Collections.emptyMap(), lookup); } public void testCompileError() { - ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['field'].value * *@#)(@$*@#$ + 4"); - }); + ScriptException e = expectThrows(ScriptException.class, () -> { compile("doc['field'].value * *@#)(@$*@#$ + 4"); }); assertTrue(e.getCause() instanceof ParseException); } public void testLinkError() { - ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['nonexistent'].value * 5"); - }); + ScriptException e = expectThrows(ScriptException.class, () -> { compile("doc['nonexistent'].value * 5"); }); assertTrue(e.getCause() instanceof ParseException); } diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java index 76ee8c933e5..7d142992cc3 100644 --- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java +++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java @@ -80,22 +80,17 @@ public class ExpressionTermsSetQueryTests extends OpenSearchTestCase { } private TermsSetQueryScript.LeafFactory compile(String expression) { - TermsSetQueryScript.Factory factory = - service.compile(null, expression, TermsSetQueryScript.CONTEXT, Collections.emptyMap()); + TermsSetQueryScript.Factory factory = service.compile(null, expression, TermsSetQueryScript.CONTEXT, Collections.emptyMap()); return factory.newFactory(Collections.emptyMap(), lookup); } public void testCompileError() { - ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['field'].value * *@#)(@$*@#$ + 4"); - }); + ScriptException e = expectThrows(ScriptException.class, () -> { compile("doc['field'].value * *@#)(@$*@#$ + 4"); }); assertTrue(e.getCause() instanceof ParseException); } public void testLinkError() { - ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['nonexistent'].value * 5"); - }); + ScriptException e = expectThrows(ScriptException.class, () -> { compile("doc['nonexistent'].value * 5"); }); assertTrue(e.getCause() instanceof ParseException); } diff --git a/modules/lang-expression/src/yamlRestTest/java/org/opensearch/script/expression/LangExpressionClientYamlTestSuiteIT.java b/modules/lang-expression/src/yamlRestTest/java/org/opensearch/script/expression/LangExpressionClientYamlTestSuiteIT.java index e208365f207..d05288b3957 100644 --- a/modules/lang-expression/src/yamlRestTest/java/org/opensearch/script/expression/LangExpressionClientYamlTestSuiteIT.java +++ b/modules/lang-expression/src/yamlRestTest/java/org/opensearch/script/expression/LangExpressionClientYamlTestSuiteIT.java @@ -49,4 +49,3 @@ public class LangExpressionClientYamlTestSuiteIT extends OpenSearchClientYamlSui return OpenSearchClientYamlSuiteTestCase.createParameters(); } } - diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java index 18ec8777400..9ce270bf326 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java @@ -72,17 +72,19 @@ public class MultiSearchTemplateIT extends OpenSearchIntegTestCase { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { indexRequestBuilders[i] = client().prepareIndex("msearch", "test", String.valueOf(i)) - .setSource("odd", (i % 2 == 0), "group", (i % 3)); + .setSource("odd", (i % 2 == 0), "group", (i % 3)); } indexRandom(true, indexRequestBuilders); - final String template = Strings.toString(jsonBuilder().startObject() - .startObject("query") - .startObject("{{query_type}}") - .field("{{field_name}}", "{{field_value}}") - .endObject() - .endObject() - .endObject()); + final String template = Strings.toString( + jsonBuilder().startObject() + .startObject("query") + .startObject("{{query_type}}") + .field("{{field_name}}", "{{field_value}}") + .endObject() + .endObject() + .endObject() + ); MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); @@ -161,23 +163,23 @@ public class MultiSearchTemplateIT extends OpenSearchIntegTestCase { SearchTemplateResponse searchTemplateResponse1 = response1.getResponse(); assertThat(searchTemplateResponse1.hasResponse(), is(true)); assertHitCount(searchTemplateResponse1.getResponse(), (numDocs / 2) + (numDocs % 2)); - assertThat(searchTemplateResponse1.getSource().utf8ToString(), - equalTo("{\"query\":{\"match\":{\"odd\":\"true\"}}}")); + assertThat(searchTemplateResponse1.getSource().utf8ToString(), equalTo("{\"query\":{\"match\":{\"odd\":\"true\"}}}")); MultiSearchTemplateResponse.Item response2 = response.getResponses()[1]; assertThat(response2.isFailure(), is(false)); SearchTemplateResponse searchTemplateResponse2 = response2.getResponse(); assertThat(searchTemplateResponse2.hasResponse(), is(false)); - assertThat(searchTemplateResponse2.getSource().utf8ToString(), - equalTo("{\"query\":{\"match_phrase_prefix\":{\"message\":\"quick brown f\"}}}")); + assertThat( + searchTemplateResponse2.getSource().utf8ToString(), + equalTo("{\"query\":{\"match_phrase_prefix\":{\"message\":\"quick brown f\"}}}") + ); MultiSearchTemplateResponse.Item response3 = response.getResponses()[2]; assertThat(response3.isFailure(), is(false)); SearchTemplateResponse searchTemplateResponse3 = response3.getResponse(); assertThat(searchTemplateResponse3.hasResponse(), is(true)); assertHitCount(searchTemplateResponse3.getResponse(), (numDocs / 2)); - assertThat(searchTemplateResponse3.getSource().utf8ToString(), - equalTo("{\"query\":{\"term\":{\"odd\":\"false\"}}}")); + assertThat(searchTemplateResponse3.getSource().utf8ToString(), equalTo("{\"query\":{\"term\":{\"odd\":\"false\"}}}")); MultiSearchTemplateResponse.Item response4 = response.getResponses()[3]; assertThat(response4.isFailure(), is(true)); @@ -188,7 +190,6 @@ public class MultiSearchTemplateIT extends OpenSearchIntegTestCase { assertThat(response5.isFailure(), is(false)); SearchTemplateResponse searchTemplateResponse5 = response5.getResponse(); assertThat(searchTemplateResponse5.hasResponse(), is(false)); - assertThat(searchTemplateResponse5.getSource().utf8ToString(), - equalTo("{\"query\":{\"terms\":{\"group\":[1,2,3,]}}}")); + assertThat(searchTemplateResponse5.getSource().utf8ToString(), equalTo("{\"query\":{\"terms\":{\"group\":[1,2,3,]}}}")); } } diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java index 5c085abe35a..df53fcc0c3b 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/SearchTemplateIT.java @@ -68,12 +68,8 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { @Before public void setup() throws IOException { createIndex("test"); - client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject().field("text", "value1").endObject()) - .get(); - client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject().field("text", "value2").endObject()) - .get(); + client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("text", "value1").endObject()).get(); + client().prepareIndex("test", "type", "2").setSource(jsonBuilder().startObject().field("text", "value2").endObject()).get(); client().admin().indices().prepareRefresh().get(); } @@ -84,19 +80,20 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { SearchRequest searchRequest = new SearchRequest(); searchRequest.indices("_all"); - expectThrows(Exception.class, () -> new SearchTemplateRequestBuilder(client()) - .setRequest(searchRequest) + expectThrows( + Exception.class, + () -> new SearchTemplateRequestBuilder(client()).setRequest(searchRequest) .setScript(query) .setScriptType(ScriptType.INLINE) .setScriptParams(randomBoolean() ? null : Collections.emptyMap()) - .get()); + .get() + ); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(searchRequest) - .setScript(query) - .setScriptType(ScriptType.INLINE) - .setScriptParams(Collections.singletonMap("my_size", 1)) - .get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(searchRequest) + .setScript(query) + .setScriptType(ScriptType.INLINE) + .setScriptParams(Collections.singletonMap("my_size", 1)) + .get(); assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); } @@ -107,12 +104,12 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { public void testTemplateQueryAsEscapedString() throws Exception { SearchRequest searchRequest = new SearchRequest(); searchRequest.indices("_all"); - String query = - "{" + " \"source\" : \"{ \\\"size\\\": \\\"{{size}}\\\", \\\"query\\\":{\\\"match_all\\\":{}}}\"," - + " \"params\":{" - + " \"size\": 1" - + " }" - + "}"; + String query = "{" + + " \"source\" : \"{ \\\"size\\\": \\\"{{size}}\\\", \\\"query\\\":{\\\"match_all\\\":{}}}\"," + + " \"params\":{" + + " \"size\": 1" + + " }" + + "}"; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, query)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); @@ -126,14 +123,13 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { public void testTemplateQueryAsEscapedStringStartingWithConditionalClause() throws Exception { SearchRequest searchRequest = new SearchRequest(); searchRequest.indices("_all"); - String templateString = - "{" - + " \"source\" : \"{ {{#use_size}} \\\"size\\\": \\\"{{size}}\\\", {{/use_size}} \\\"query\\\":{\\\"match_all\\\":{}}}\"," - + " \"params\":{" - + " \"size\": 1," - + " \"use_size\": true" - + " }" - + "}"; + String templateString = "{" + + " \"source\" : \"{ {{#use_size}} \\\"size\\\": \\\"{{size}}\\\", {{/use_size}} \\\"query\\\":{\\\"match_all\\\":{}}}\"," + + " \"params\":{" + + " \"size\": 1," + + " \"use_size\": true" + + " }" + + "}"; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); @@ -147,14 +143,13 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { public void testTemplateQueryAsEscapedStringWithConditionalClauseAtEnd() throws Exception { SearchRequest searchRequest = new SearchRequest(); searchRequest.indices("_all"); - String templateString = - "{" - + " \"source\" : \"{ \\\"query\\\":{\\\"match_all\\\":{}} {{#use_size}}, \\\"size\\\": \\\"{{size}}\\\" {{/use_size}} }\"," - + " \"params\":{" - + " \"size\": 1," - + " \"use_size\": true" - + " }" - + "}"; + String templateString = "{" + + " \"source\" : \"{ \\\"query\\\":{\\\"match_all\\\":{}} {{#use_size}}, \\\"size\\\": \\\"{{size}}\\\" {{/use_size}} }\"," + + " \"params\":{" + + " \"size\": 1," + + " \"use_size\": true" + + " }" + + "}"; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString)); request.setRequest(searchRequest); SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get(); @@ -162,27 +157,31 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { } public void testIndexedTemplateClient() throws Exception { - assertAcked(client().admin().cluster().preparePutStoredScript() + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("testTemplate") .setContent( new BytesArray( - "{" + - " \"script\": {" + - " \"lang\": \"mustache\"," + - " \"source\": {" + - " \"query\": {" + - " \"match\": {" + - " \"theField\": \"{{fieldParam}}\"" + - " }" + - " }" + - " }" + - " }" + - "}" + "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": {" + + " \"match\": {" + + " \"theField\": \"{{fieldParam}}\"" + + " }" + + " }" + + " }" + + " }" + + "}" ), - XContentType.JSON)); + XContentType.JSON + ) + ); - GetStoredScriptResponse getResponse = client().admin().cluster() - .prepareGetStoredScript("testTemplate").get(); + GetStoredScriptResponse getResponse = client().admin().cluster().prepareGetStoredScript("testTemplate").get(); assertNotNull(getResponse.getSource()); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); @@ -197,10 +196,9 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("test").types("type")) - .setScript("testTemplate").setScriptType(ScriptType.STORED).setScriptParams(templateParams) - .get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( + new SearchRequest("test").types("type") + ).setScript("testTemplate").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get(); assertHitCount(searchResponse.getResponse(), 4); assertAcked(client().admin().cluster().prepareDeleteStoredScript("testTemplate")); @@ -211,29 +209,22 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { public void testIndexedTemplate() throws Exception { - String script = - "{" + - " \"script\": {" + - " \"lang\": \"mustache\"," + - " \"source\": {" + - " \"query\": {" + - " \"match\": {" + - " \"theField\": \"{{fieldParam}}\"" + - " }" + - " }" + - " }" + - " }" + - "}"; + String script = "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": {" + + " \"match\": {" + + " \"theField\": \"{{fieldParam}}\"" + + " }" + + " }" + + " }" + + " }" + + "}"; - assertAcked( - client().admin().cluster().preparePutStoredScript().setId("1a").setContent(new BytesArray(script), XContentType.JSON) - ); - assertAcked( - client().admin().cluster().preparePutStoredScript().setId("2").setContent(new BytesArray(script), XContentType.JSON) - ); - assertAcked( - client().admin().cluster().preparePutStoredScript().setId("3").setContent(new BytesArray(script), XContentType.JSON) - ); + assertAcked(client().admin().cluster().preparePutStoredScript().setId("1a").setContent(new BytesArray(script), XContentType.JSON)); + assertAcked(client().admin().cluster().preparePutStoredScript().setId("2").setContent(new BytesArray(script), XContentType.JSON)); + assertAcked(client().admin().cluster().preparePutStoredScript().setId("3").setContent(new BytesArray(script), XContentType.JSON)); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); @@ -247,26 +238,26 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest().indices("test").types("type")) - .setScript("1a") - .setScriptType(ScriptType.STORED) - .setScriptParams(templateParams) - .get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( + new SearchRequest().indices("test").types("type") + ).setScript("1a").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get(); assertHitCount(searchResponse.getResponse(), 4); - expectThrows(ResourceNotFoundException.class, () -> new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest().indices("test").types("type")) + expectThrows( + ResourceNotFoundException.class, + () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test").types("type")) .setScript("1000") .setScriptType(ScriptType.STORED) .setScriptParams(templateParams) - .get()); + .get() + ); templateParams.put("fieldParam", "bar"); - searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("test").types("type")) - .setScript("2").setScriptType(ScriptType.STORED).setScriptParams(templateParams) - .get(); + searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test").types("type")) + .setScript("2") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams) + .get(); assertHitCount(searchResponse.getResponse(), 1); } @@ -276,29 +267,31 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { ensureGreen("testindex"); client().prepareIndex("testindex", "test", "1") - .setSource(jsonBuilder().startObject().field("searchtext", "dev1").endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("searchtext", "dev1").endObject()) + .get(); client().admin().indices().prepareRefresh().get(); int iterations = randomIntBetween(2, 11); - String query = - "{" + - " \"script\": {" + - " \"lang\": \"mustache\"," + - " \"source\": {" + - " \"query\": {" + - " \"match_phrase_prefix\": {" + - " \"searchtext\": {" + - " \"query\": \"{{P_Keyword1}}\"," + - " \"slop\": {{slop}}" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; + String query = "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": {" + + " \"match_phrase_prefix\": {" + + " \"searchtext\": {" + + " \"query\": \"{{P_Keyword1}}\"," + + " \"slop\": {{slop}}" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; for (int i = 1; i < iterations; i++) { - assertAcked(client().admin().cluster().preparePutStoredScript() + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("git01") .setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(-1))), XContentType.JSON) ); @@ -309,47 +302,50 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { Map templateParams = new HashMap<>(); templateParams.put("P_Keyword1", "dev"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("testindex").types("test")) - .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) - .get()); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex").types("test")) + .setScript("git01") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams) + .get() + ); assertThat(e.getMessage(), containsString("No negative slop allowed")); - assertAcked(client().admin().cluster().preparePutStoredScript() + assertAcked( + client().admin() + .cluster() + .preparePutStoredScript() .setId("git01") .setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON) ); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("testindex").types("test")) - .setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams) - .get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( + new SearchRequest("testindex").types("test") + ).setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get(); assertHitCount(searchResponse.getResponse(), 1); } } public void testIndexedTemplateWithArray() throws Exception { - String multiQuery = - "{\n" + - " \"script\": {\n" + - " \"lang\": \"mustache\",\n" + - " \"source\": {\n" + - " \"query\": {\n" + - " \"terms\": {\n" + - " \"theField\": [\n" + - " \"{{#fieldParam}}\",\n" + - " \"{{.}}\",\n" + - " \"{{/fieldParam}}\"\n" + - " ]\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + String multiQuery = "{\n" + + " \"script\": {\n" + + " \"lang\": \"mustache\",\n" + + " \"source\": {\n" + + " \"query\": {\n" + + " \"terms\": {\n" + + " \"theField\": [\n" + + " \"{{#fieldParam}}\",\n" + + " \"{{.}}\",\n" + + " \"{{/fieldParam}}\"\n" + + " ]\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; assertAcked( - client().admin().cluster().preparePutStoredScript() - .setId("4") - .setContent(new BytesArray(multiQuery), XContentType.JSON) + client().admin().cluster().preparePutStoredScript().setId("4").setContent(new BytesArray(multiQuery), XContentType.JSON) ); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); @@ -361,13 +357,12 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase { client().admin().indices().prepareRefresh().get(); Map arrayTemplateParams = new HashMap<>(); - String[] fieldParams = {"foo", "bar"}; + String[] fieldParams = { "foo", "bar" }; arrayTemplateParams.put("fieldParam", fieldParams); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) - .setRequest(new SearchRequest("test").types("type")) - .setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams) - .get(); + SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest( + new SearchRequest("test").types("type") + ).setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams).get(); assertHitCount(searchResponse.getResponse(), 5); } diff --git a/modules/lang-mustache/src/javaRestTest/java/org/opensearch/script/mustache/SearchTemplateWithoutContentIT.java b/modules/lang-mustache/src/javaRestTest/java/org/opensearch/script/mustache/SearchTemplateWithoutContentIT.java index fbed39da9ed..a28b7edb390 100644 --- a/modules/lang-mustache/src/javaRestTest/java/org/opensearch/script/mustache/SearchTemplateWithoutContentIT.java +++ b/modules/lang-mustache/src/javaRestTest/java/org/opensearch/script/mustache/SearchTemplateWithoutContentIT.java @@ -43,15 +43,19 @@ import static org.hamcrest.CoreMatchers.containsString; public class SearchTemplateWithoutContentIT extends OpenSearchRestTestCase { public void testSearchTemplateMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - new Request(randomBoolean() ? "POST" : "GET", "/_search/template"))); + ResponseException responseException = expectThrows( + ResponseException.class, + () -> client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_search/template")) + ); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); assertThat(responseException.getMessage(), containsString("request body or source parameter is required")); } public void testMultiSearchTemplateMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - new Request(randomBoolean() ? "POST" : "GET", "/_msearch/template"))); + ResponseException responseException = expectThrows( + ResponseException.class, + () -> client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_msearch/template")) + ); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); assertThat(responseException.getMessage(), containsString("request body or source parameter is required")); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateRequest.java index 617b2b34097..b645eeeedf1 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateRequest.java @@ -148,9 +148,9 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MultiSearchTemplateRequest that = (MultiSearchTemplateRequest) o; - return maxConcurrentSearchRequests == that.maxConcurrentSearchRequests && - Objects.equals(requests, that.requests) && - Objects.equals(indicesOptions, that.indicesOptions); + return maxConcurrentSearchRequests == that.maxConcurrentSearchRequests + && Objects.equals(requests, that.requests) + && Objects.equals(indicesOptions, that.indicesOptions); } @Override @@ -158,8 +158,7 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi return Objects.hash(maxConcurrentSearchRequests, requests, indicesOptions); } - public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest, - XContent xContent) throws IOException { + public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest, XContent xContent) throws IOException { ByteArrayOutputStream output = new ByteArrayOutputStream(); for (SearchTemplateRequest templateRequest : multiSearchTemplateRequest.requests()) { final SearchRequest searchRequest = templateRequest.getRequest(); diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java index 5a7848cd86f..1802d03e209 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java @@ -188,14 +188,14 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera } public static MultiSearchTemplateResponse fromXContext(XContentParser parser) { - //The MultiSearchTemplateResponse is identical to the multi search response so we reuse the parsing logic in multi search response + // The MultiSearchTemplateResponse is identical to the multi search response so we reuse the parsing logic in multi search response MultiSearchResponse mSearchResponse = MultiSearchResponse.fromXContext(parser); org.opensearch.action.search.MultiSearchResponse.Item[] responses = mSearchResponse.getResponses(); Item[] templateResponses = new Item[responses.length]; int i = 0; for (org.opensearch.action.search.MultiSearchResponse.Item item : responses) { SearchTemplateResponse stResponse = null; - if(item.getResponse() != null){ + if (item.getResponse() != null) { stResponse = new SearchTemplateResponse(); stResponse.setResponse(item.getResponse()); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustachePlugin.java index 50873eb258d..498dc2ac57c 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustachePlugin.java @@ -57,23 +57,32 @@ import java.util.function.Supplier; public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin, SearchPlugin { @Override - public ScriptEngine getScriptEngine(Settings settings, Collection>contexts) { + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { return new MustacheScriptEngine(); } @Override public List> getActions() { - return Arrays.asList(new ActionHandler<>(SearchTemplateAction.INSTANCE, TransportSearchTemplateAction.class), - new ActionHandler<>(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class)); + return Arrays.asList( + new ActionHandler<>(SearchTemplateAction.INSTANCE, TransportSearchTemplateAction.class), + new ActionHandler<>(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class) + ); } @Override - public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster) { + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { return Arrays.asList( - new RestSearchTemplateAction(), - new RestMultiSearchTemplateAction(settings), - new RestRenderSearchTemplateAction()); + new RestSearchTemplateAction(), + new RestMultiSearchTemplateAction(settings), + new RestRenderSearchTemplateAction() + ); } } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java index 248e6d69f51..e94dac2a3bf 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java @@ -76,12 +76,7 @@ public final class MustacheScriptEngine implements ScriptEngine { * @return a compiled template object for later execution. * */ @Override - public T compile( - String templateName, - String templateSource, - ScriptContext context, - Map options - ) { + public T compile(String templateName, String templateSource, ScriptContext context, Map options) { if (context.instanceClazz.equals(TemplateScript.class) == false) { throw new IllegalArgumentException("mustache engine does not know how to handle context [" + context.name + "]"); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java index 18ab768205c..c4c7ec9bf12 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java @@ -54,8 +54,8 @@ import static org.opensearch.rest.RestRequest.Method.POST; public class RestMultiSearchTemplateAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchTemplateAction.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + - " Specifying types in multi search template requests is deprecated."; + static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + + " Specifying types in multi search template requests is deprecated."; private static final Set RESPONSE_PARAMS; @@ -66,7 +66,6 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler { RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } - private final boolean allowExplicitIndex; public RestMultiSearchTemplateAction(Settings settings) { @@ -75,14 +74,17 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler { @Override public List routes() { - return unmodifiableList(asList( - new Route(GET, "/_msearch/template"), - new Route(POST, "/_msearch/template"), - new Route(GET, "/{index}/_msearch/template"), - new Route(POST, "/{index}/_msearch/template"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_msearch/template"), - new Route(POST, "/{index}/{type}/_msearch/template"))); + return unmodifiableList( + asList( + new Route(GET, "/_msearch/template"), + new Route(POST, "/_msearch/template"), + new Route(GET, "/{index}/_msearch/template"), + new Route(POST, "/{index}/_msearch/template"), + // Deprecated typed endpoints. + new Route(GET, "/{index}/{type}/_msearch/template"), + new Route(POST, "/{index}/{type}/_msearch/template") + ) + ); } @Override @@ -113,17 +115,21 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler { multiRequest.maxConcurrentSearchRequests(restRequest.paramAsInt("max_concurrent_searches", 0)); } - RestMultiSearchAction.parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, - (searchRequest, bytes) -> { - SearchTemplateRequest searchTemplateRequest = SearchTemplateRequest.fromXContent(bytes); - if (searchTemplateRequest.getScript() != null) { - searchTemplateRequest.setRequest(searchRequest); - multiRequest.add(searchTemplateRequest); - } else { - throw new IllegalArgumentException("Malformed search template"); - } - RestSearchAction.checkRestTotalHits(restRequest, searchRequest); - }); + RestMultiSearchAction.parseMultiLineRequest( + restRequest, + multiRequest.indicesOptions(), + allowExplicitIndex, + (searchRequest, bytes) -> { + SearchTemplateRequest searchTemplateRequest = SearchTemplateRequest.fromXContent(bytes); + if (searchTemplateRequest.getScript() != null) { + searchTemplateRequest.setRequest(searchRequest); + multiRequest.add(searchTemplateRequest); + } else { + throw new IllegalArgumentException("Malformed search template"); + } + RestSearchAction.checkRestTotalHits(restRequest, searchRequest); + } + ); return multiRequest; } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java index df2da7be542..1cf61a24bbb 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java @@ -51,11 +51,14 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler { @Override public List routes() { - return unmodifiableList(asList( - new Route(GET, "/_render/template"), - new Route(POST, "/_render/template"), - new Route(GET, "/_render/template/{id}"), - new Route(POST, "/_render/template/{id}"))); + return unmodifiableList( + asList( + new Route(GET, "/_render/template"), + new Route(POST, "/_render/template"), + new Route(GET, "/_render/template/{id}"), + new Route(POST, "/_render/template/{id}") + ) + ); } @Override diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java index 487cb5b0c1d..b66d2756869 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestSearchTemplateAction.java @@ -63,14 +63,17 @@ public class RestSearchTemplateAction extends BaseRestHandler { @Override public List routes() { - return unmodifiableList(asList( - new Route(GET, "/_search/template"), - new Route(POST, "/_search/template"), - new Route(GET, "/{index}/_search/template"), - new Route(POST, "/{index}/_search/template"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_search/template"), - new Route(POST, "/{index}/{type}/_search/template"))); + return unmodifiableList( + asList( + new Route(GET, "/_search/template"), + new Route(POST, "/_search/template"), + new Route(GET, "/{index}/_search/template"), + new Route(POST, "/{index}/_search/template"), + // Deprecated typed endpoints. + new Route(GET, "/{index}/{type}/_search/template"), + new Route(POST, "/{index}/{type}/_search/template") + ) + ); } @Override @@ -83,7 +86,12 @@ public class RestSearchTemplateAction extends BaseRestHandler { // Creates the search request with all required params SearchRequest searchRequest = new SearchRequest(); RestSearchAction.parseSearchRequest( - searchRequest, request, null, client.getNamedWriteableRegistry(), size -> searchRequest.source().size(size)); + searchRequest, + request, + null, + client.getNamedWriteableRegistry(), + size -> searchRequest.source().size(size) + ); // Creates the search template request SearchTemplateRequest searchTemplateRequest; diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java index 460a5859b5c..6e8dc815eb5 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java @@ -99,13 +99,13 @@ public class SearchTemplateRequest extends ActionRequest implements CompositeInd if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SearchTemplateRequest request1 = (SearchTemplateRequest) o; - return simulate == request1.simulate && - explain == request1.explain && - profile == request1.profile && - Objects.equals(request, request1.request) && - scriptType == request1.scriptType && - Objects.equals(script, request1.script) && - Objects.equals(scriptParams, request1.scriptParams); + return simulate == request1.simulate + && explain == request1.explain + && profile == request1.profile + && Objects.equals(request, request1.request) + && scriptType == request1.scriptType + && Objects.equals(script, request1.script) + && Objects.equals(scriptParams, request1.scriptParams); } @Override @@ -196,9 +196,7 @@ public class SearchTemplateRequest extends ActionRequest implements CompositeInd private static final ObjectParser PARSER; static { PARSER = new ObjectParser<>("search_template"); - PARSER.declareField((parser, request, s) -> - request.setScriptParams(parser.map()) - , PARAMS_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField((parser, request, s) -> request.setScriptParams(parser.map()), PARAMS_FIELD, ObjectParser.ValueType.OBJECT); PARSER.declareString((request, s) -> { request.setScriptType(ScriptType.STORED); request.setScript(s); @@ -208,7 +206,7 @@ public class SearchTemplateRequest extends ActionRequest implements CompositeInd PARSER.declareField((parser, request, value) -> { request.setScriptType(ScriptType.INLINE); if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) + // convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) try (XContentBuilder builder = XContentFactory.jsonBuilder()) { request.setScript(Strings.toString(builder.copyCurrentStructure(parser))); } catch (IOException e) { diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequestBuilder.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequestBuilder.java index 8cf14307a1b..0d2aa49eaab 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequestBuilder.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequestBuilder.java @@ -39,8 +39,7 @@ import org.opensearch.script.ScriptType; import java.util.Map; -public class SearchTemplateRequestBuilder - extends ActionRequestBuilder { +public class SearchTemplateRequestBuilder extends ActionRequestBuilder { SearchTemplateRequestBuilder(OpenSearchClient client, SearchTemplateAction action) { super(client, action, new SearchTemplateRequest()); diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java index b26fa152def..027fff3d79b 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java @@ -58,8 +58,7 @@ public class SearchTemplateResponse extends ActionResponse implements StatusToXC /** Contains the search response, if any **/ private SearchResponse response; - SearchTemplateResponse() { - } + SearchTemplateResponse() {} SearchTemplateResponse(StreamInput in) throws IOException { super(in); @@ -104,17 +103,13 @@ public class SearchTemplateResponse extends ActionResponse implements StatusToXC if (contentAsMap.containsKey(TEMPLATE_OUTPUT_FIELD.getPreferredName())) { Object source = contentAsMap.get(TEMPLATE_OUTPUT_FIELD.getPreferredName()); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON) - .value(source); + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).value(source); searchTemplateResponse.setSource(BytesReference.bytes(builder)); } else { XContentType contentType = parser.contentType(); - XContentBuilder builder = XContentFactory.contentBuilder(contentType) - .map(contentAsMap); - XContentParser searchResponseParser = contentType.xContent().createParser( - parser.getXContentRegistry(), - parser.getDeprecationHandler(), - BytesReference.bytes(builder).streamInput()); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap); + XContentParser searchResponseParser = contentType.xContent() + .createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), BytesReference.bytes(builder).streamInput()); SearchResponse searchResponse = SearchResponse.fromXContent(searchResponseParser); searchTemplateResponse.setResponse(searchResponse); @@ -128,7 +123,7 @@ public class SearchTemplateResponse extends ActionResponse implements StatusToXC response.toXContent(builder, params); } else { builder.startObject(); - //we can assume the template is always json as we convert it before compiling it + // we can assume the template is always json as we convert it before compiling it try (InputStream stream = source.streamInput()) { builder.rawField(TEMPLATE_OUTPUT_FIELD.getPreferredName(), stream, XContentType.JSON); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportMultiSearchTemplateAction.java index b3b8e1fac5e..8ebc5ace8d5 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -57,8 +57,13 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction CustomMustacheFactory.createEncoder("test")); assertThat(e.getMessage(), equalTo("No encoder found for MIME type [test]")); - assertThat(CustomMustacheFactory.createEncoder(CustomMustacheFactory.JSON_MIME_TYPE_WITH_CHARSET), - instanceOf(CustomMustacheFactory.JsonEscapeEncoder.class)); - assertThat(CustomMustacheFactory.createEncoder(CustomMustacheFactory.JSON_MIME_TYPE), - instanceOf(CustomMustacheFactory.JsonEscapeEncoder.class)); - assertThat(CustomMustacheFactory.createEncoder(CustomMustacheFactory.PLAIN_TEXT_MIME_TYPE), - instanceOf(CustomMustacheFactory.DefaultEncoder.class)); - assertThat(CustomMustacheFactory.createEncoder(CustomMustacheFactory.X_WWW_FORM_URLENCODED_MIME_TYPE), - instanceOf(CustomMustacheFactory.UrlEncoder.class)); + assertThat( + CustomMustacheFactory.createEncoder(CustomMustacheFactory.JSON_MIME_TYPE_WITH_CHARSET), + instanceOf(CustomMustacheFactory.JsonEscapeEncoder.class) + ); + assertThat( + CustomMustacheFactory.createEncoder(CustomMustacheFactory.JSON_MIME_TYPE), + instanceOf(CustomMustacheFactory.JsonEscapeEncoder.class) + ); + assertThat( + CustomMustacheFactory.createEncoder(CustomMustacheFactory.PLAIN_TEXT_MIME_TYPE), + instanceOf(CustomMustacheFactory.DefaultEncoder.class) + ); + assertThat( + CustomMustacheFactory.createEncoder(CustomMustacheFactory.X_WWW_FORM_URLENCODED_MIME_TYPE), + instanceOf(CustomMustacheFactory.UrlEncoder.class) + ); } public void testJsonEscapeEncoder() { diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java index 0c2a8722e9f..aaf3126876a 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateRequestTests.java @@ -56,8 +56,8 @@ public class MultiSearchTemplateRequestTests extends OpenSearchTestCase { public void testParseRequest() throws Exception { byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/opensearch/script/mustache/simple-msearch-template.json"); - RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withContent(new BytesArray(data), XContentType.JSON).build(); + RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(data), XContentType.JSON) + .build(); MultiSearchTemplateRequest request = RestMultiSearchTemplateAction.parseRequest(restRequest, true); @@ -93,10 +93,10 @@ public class MultiSearchTemplateRequestTests extends OpenSearchTestCase { } public void testParseWithCarriageReturn() throws Exception { - final String content = "{\"index\":[\"test0\", \"test1\"], \"request_cache\": true}\r\n" + - "{\"source\": {\"query\" : {\"match_{{template}}\" :{}}}, \"params\": {\"template\": \"all\" } }\r\n"; - RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withContent(new BytesArray(content), XContentType.JSON).build(); + final String content = "{\"index\":[\"test0\", \"test1\"], \"request_cache\": true}\r\n" + + "{\"source\": {\"query\" : {\"match_{{template}}\" :{}}}, \"params\": {\"template\": \"all\" } }\r\n"; + RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(content), XContentType.JSON) + .build(); MultiSearchTemplateRequest request = RestMultiSearchTemplateAction.parseRequest(restRequest, true); @@ -115,8 +115,7 @@ public class MultiSearchTemplateRequestTests extends OpenSearchTestCase { public void testMaxConcurrentSearchRequests() { MultiSearchTemplateRequest request = new MultiSearchTemplateRequest(); request.maxConcurrentSearchRequests(randomIntBetween(1, Integer.MAX_VALUE)); - expectThrows(IllegalArgumentException.class, () -> - request.maxConcurrentSearchRequests(randomIntBetween(Integer.MIN_VALUE, 0))); + expectThrows(IllegalArgumentException.class, () -> request.maxConcurrentSearchRequests(randomIntBetween(Integer.MIN_VALUE, 0))); } public void testMultiSearchTemplateToJson() throws Exception { @@ -124,7 +123,7 @@ public class MultiSearchTemplateRequestTests extends OpenSearchTestCase { MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); for (int i = 0; i < numSearchRequests; i++) { // Create a random request. - String[] indices = {"test"}; + String[] indices = { "test" }; SearchRequest searchRequest = new SearchRequest(indices); // scroll is not supported in the current msearch or msearchtemplate api, so unset it: searchRequest.scroll((Scroll) null); @@ -144,12 +143,12 @@ public class MultiSearchTemplateRequestTests extends OpenSearchTestCase { multiSearchTemplateRequest.add(searchTemplateRequest); } - //Serialize the request + // Serialize the request String serialized = toJsonString(multiSearchTemplateRequest); - //Deserialize the request - RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withContent(new BytesArray(serialized), XContentType.JSON).build(); + // Deserialize the request + RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(serialized), XContentType.JSON) + .build(); MultiSearchTemplateRequest deser = RestMultiSearchTemplateAction.parseRequest(restRequest, true); // For object equality purposes need to set the search requests' source to non-null diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateResponseTests.java index 7dcc2fea503..4aee84beb4d 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MultiSearchTemplateResponseTests.java @@ -65,8 +65,16 @@ public class MultiSearchTemplateResponseTests extends AbstractXContentTestCase instanceSupplier = MultiSearchTemplateResponseTests::createTestInstanceWithFailures; - //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, - //but that does not bother our assertions, as we only want to test that we don't break. + // with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + // but that does not bother our assertions, as we only want to test that we don't break. boolean supportsUnknownFields = true; - //exceptions are not of the same type whenever parsed back + // exceptions are not of the same type whenever parsed back boolean assertToXContentEquivalence = false; - AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, - getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, - this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); + AbstractXContentTestCase.testFromXContent( + NUMBER_OF_TEST_RUNS, + instanceSupplier, + supportsUnknownFields, + Strings.EMPTY_ARRAY, + getRandomFieldsExcludeFilterWhenResultHasErrors(), + this::createParser, + this::doParseInstance, + this::assertEqualInstances, + assertToXContentEquivalence, + ToXContent.EMPTY_PARAMS + ); } } diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java index 662c9d33cbb..326990d804b 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheScriptEngineTests.java @@ -64,34 +64,40 @@ public class MustacheScriptEngineTests extends OpenSearchTestCase { public void testSimpleParameterReplace() { Map compileParams = Collections.singletonMap("content_type", "application/json"); { - String template = "GET _search {\"query\": " + "{\"boosting\": {" + "\"positive\": {\"match\": {\"body\": \"gift\"}}," - + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; + String template = "GET _search {\"query\": " + + "{\"boosting\": {" + + "\"positive\": {\"match\": {\"body\": \"gift\"}}," + + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + + "}}, \"negative_boost\": {{boost_val}} } }}"; Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); String o = qe.compile(null, template, TemplateScript.CONTEXT, compileParams).newInstance(vars).execute(); - assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + assertEquals( + "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", - o); + o + ); } { - String template = "GET _search {\"query\": " + "{\"boosting\": {" + "\"positive\": {\"match\": {\"body\": \"gift\"}}," - + "\"negative\": {\"term\": {\"body\": {\"value\": \"{{body_val}}\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; + String template = "GET _search {\"query\": " + + "{\"boosting\": {" + + "\"positive\": {\"match\": {\"body\": \"gift\"}}," + + "\"negative\": {\"term\": {\"body\": {\"value\": \"{{body_val}}\"}" + + "}}, \"negative_boost\": {{boost_val}} } }}"; Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); vars.put("body_val", "\"quick brown\""); String o = qe.compile(null, template, TemplateScript.CONTEXT, compileParams).newInstance(vars).execute(); - assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + assertEquals( + "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", - o); + o + ); } } public void testSimple() throws IOException { - String templateString = - "{" - + "\"source\":{\"match_{{template}}\": {}}," - + "\"params\":{\"template\":\"all\"}" - + "}"; + String templateString = "{" + "\"source\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, templateString); Script script = Script.parse(parser); TemplateScript.Factory compiled = qe.compile(null, script.getIdOrCode(), TemplateScript.CONTEXT, Collections.emptyMap()); @@ -100,13 +106,13 @@ public class MustacheScriptEngineTests extends OpenSearchTestCase { } public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException { - String templateString = - "{" - + " \"source\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + " \"params\":{" - + " \"template\":\"all\"," - + " \"use_it\": true" - + " }" - + "}"; + String templateString = "{" + + " \"source\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + + " \"params\":{" + + " \"template\":\"all\"," + + " \"use_it\": true" + + " }" + + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, templateString); Script script = Script.parse(parser); TemplateScript.Factory compiled = qe.compile(null, script.getIdOrCode(), TemplateScript.CONTEXT, Collections.emptyMap()); @@ -126,42 +132,42 @@ public class MustacheScriptEngineTests extends OpenSearchTestCase { assertThat(writer.toString(), equalTo("\\n")); } - Character[] specialChars = new Character[]{ - '\"', - '\\', - '\u0000', - '\u0001', - '\u0002', - '\u0003', - '\u0004', - '\u0005', - '\u0006', - '\u0007', - '\u0008', - '\u0009', - '\u000B', - '\u000C', - '\u000E', - '\u000F', - '\u001F'}; - String[] escapedChars = new String[]{ - "\\\"", - "\\\\", - "\\u0000", - "\\u0001", - "\\u0002", - "\\u0003", - "\\u0004", - "\\u0005", - "\\u0006", - "\\u0007", - "\\u0008", - "\\u0009", - "\\u000B", - "\\u000C", - "\\u000E", - "\\u000F", - "\\u001F"}; + Character[] specialChars = new Character[] { + '\"', + '\\', + '\u0000', + '\u0001', + '\u0002', + '\u0003', + '\u0004', + '\u0005', + '\u0006', + '\u0007', + '\u0008', + '\u0009', + '\u000B', + '\u000C', + '\u000E', + '\u000F', + '\u001F' }; + String[] escapedChars = new String[] { + "\\\"", + "\\\\", + "\\u0000", + "\\u0001", + "\\u0002", + "\\u0003", + "\\u0004", + "\\u0005", + "\\u0006", + "\\u0007", + "\\u0008", + "\\u0009", + "\\u000B", + "\\u000C", + "\\u000E", + "\\u000F", + "\\u001F" }; int iters = scaledRandomIntBetween(100, 1000); for (int i = 0; i < iters; i++) { int rounds = scaledRandomIntBetween(1, 20); @@ -202,13 +208,12 @@ public class MustacheScriptEngineTests extends OpenSearchTestCase { * */ private static boolean isEscapeChar(char c) { switch (c) { - case '"': - case '\\': - return true; + case '"': + case '\\': + return true; } - if (c < '\u002F') - return true; + if (c < '\u002F') return true; return false; } } diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheTests.java index 6788288c574..a74fc484235 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/MustacheTests.java @@ -65,7 +65,8 @@ public class MustacheTests extends OpenSearchTestCase { private ScriptEngine engine = new MustacheScriptEngine(); public void testBasics() { - String template = "GET _search {\"query\": " + "{\"boosting\": {" + String template = "GET _search {\"query\": " + + "{\"boosting\": {" + "\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; @@ -74,20 +75,18 @@ public class MustacheTests extends OpenSearchTestCase { TemplateScript.Factory factory = engine.compile(null, template, TemplateScript.CONTEXT, Collections.emptyMap()); TemplateScript result = factory.newInstance(params); assertEquals( - "Mustache templating broken", - "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," - + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.2 } }}", - result.execute() + "Mustache templating broken", + "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.2 } }}", + result.execute() ); } public void testArrayAccess() throws Exception { String template = "{{data.0}} {{data.1}}"; - TemplateScript.Factory factory = engine.compile(null, template, TemplateScript.CONTEXT, Collections.emptyMap()); + TemplateScript.Factory factory = engine.compile(null, template, TemplateScript.CONTEXT, Collections.emptyMap()); Map vars = new HashMap<>(); - Object data = randomFrom( - new String[] { "foo", "bar" }, - Arrays.asList("foo", "bar")); + Object data = randomFrom(new String[] { "foo", "bar" }, Arrays.asList("foo", "bar")); vars.put("data", data); assertThat(factory.newInstance(vars).execute(), equalTo("foo bar")); @@ -105,7 +104,7 @@ public class MustacheTests extends OpenSearchTestCase { TemplateScript.Factory factory = engine.compile(null, template, TemplateScript.CONTEXT, Collections.emptyMap()); Map vars = new HashMap<>(); Object data = randomFrom( - new String[][] { new String[] { "foo", "bar" }}, + new String[][] { new String[] { "foo", "bar" } }, Collections.singletonList(new String[] { "foo", "bar" }), singleton(new String[] { "foo", "bar" }) ); @@ -119,7 +118,8 @@ public class MustacheTests extends OpenSearchTestCase { Map vars = new HashMap<>(); Object data = randomFrom( new Object[] { singletonMap("key", "foo"), singletonMap("key", "bar") }, - Arrays.asList(singletonMap("key", "foo"), singletonMap("key", "bar"))); + Arrays.asList(singletonMap("key", "foo"), singletonMap("key", "bar")) + ); vars.put("data", data); assertThat(factory.newInstance(vars).execute(), equalTo("foo bar")); @@ -132,7 +132,6 @@ public class MustacheTests extends OpenSearchTestCase { assertThat(output, both(containsString("foo")).and(containsString("bar"))); } - public void testSizeAccessForCollectionsAndArrays() throws Exception { String[] randomArrayValues = generateRandomStringArray(10, 20, false); List randomList = Arrays.asList(generateRandomStringArray(10, 20, false)); @@ -198,23 +197,31 @@ public class MustacheTests extends OpenSearchTestCase { Map ctx = singletonMap("ctx", humans); - assertScript("{{#toJson}}.{{/toJson}}", ctx, - equalTo("{\"ctx\":{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":" + - "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}}")); + assertScript( + "{{#toJson}}.{{/toJson}}", + ctx, + equalTo( + "{\"ctx\":{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":" + + "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}}" + ) + ); - assertScript("{{#toJson}}ctx{{/toJson}}", ctx, - equalTo("{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":" + - "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}")); + assertScript( + "{{#toJson}}ctx{{/toJson}}", + ctx, + equalTo( + "{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":" + + "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}" + ) + ); - assertScript("{{#toJson}}ctx.first{{/toJson}}", ctx, - equalTo("{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}")); + assertScript("{{#toJson}}ctx.first{{/toJson}}", ctx, equalTo("{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}")); - assertScript("{{#toJson}}ctx.second{{/toJson}}", ctx, - equalTo("{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}")); + assertScript("{{#toJson}}ctx.second{{/toJson}}", ctx, equalTo("{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}")); } public void testSimpleArrayToJSON() throws Exception { - String[] array = new String[]{"one", "two", "three"}; + String[] array = new String[] { "one", "two", "three" }; Map ctx = singletonMap("array", array); assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("{\"array\":[\"one\",\"two\",\"three\"]}")); @@ -253,81 +260,86 @@ public class MustacheTests extends OpenSearchTestCase { public void testEmbeddedToJSON() throws Exception { XContentBuilder builder = jsonBuilder().startObject() - .startArray("bulks") - .startObject() - .field("index", "index-1") - .field("type", "type-1") - .field("id", 1) - .endObject() - .startObject() - .field("index", "index-2") - .field("type", "type-2") - .field("id", 2) - .endObject() - .endArray() - .endObject(); + .startArray("bulks") + .startObject() + .field("index", "index-1") + .field("type", "type-1") + .field("id", 1) + .endObject() + .startObject() + .field("index", "index-2") + .field("type", "type-2") + .field("id", 2) + .endObject() + .endArray() + .endObject(); - Map ctx = - singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2()); + Map ctx = singletonMap( + "ctx", + XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2() + ); - assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx, - equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}")); + assertScript( + "{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", + ctx, + equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}") + ); - assertScript("{{#ctx.bulks}}<{{#toJson}}id{{/toJson}}>{{/ctx.bulks}}", ctx, - equalTo("<1><2>")); + assertScript("{{#ctx.bulks}}<{{#toJson}}id{{/toJson}}>{{/ctx.bulks}}", ctx, equalTo("<1><2>")); } public void testSimpleArrayJoin() throws Exception { String template = "{{#join}}array{{/join}}"; - assertScript(template, singletonMap("array", new String[]{"one", "two", "three"}), equalTo("one,two,three")); - assertScript(template, singletonMap("array", new int[]{1, 2, 3}), equalTo("1,2,3")); - assertScript(template, singletonMap("array", new long[]{1L, 2L, 3L}), equalTo("1,2,3")); - assertScript(template, singletonMap("array", new double[]{1.5, 2.5, 3.5}), equalTo("1.5,2.5,3.5")); - assertScript(template, singletonMap("array", new boolean[]{true, false, true}), equalTo("true,false,true")); - assertScript(template, singletonMap("array", new boolean[]{true, false, true}), equalTo("true,false,true")); + assertScript(template, singletonMap("array", new String[] { "one", "two", "three" }), equalTo("one,two,three")); + assertScript(template, singletonMap("array", new int[] { 1, 2, 3 }), equalTo("1,2,3")); + assertScript(template, singletonMap("array", new long[] { 1L, 2L, 3L }), equalTo("1,2,3")); + assertScript(template, singletonMap("array", new double[] { 1.5, 2.5, 3.5 }), equalTo("1.5,2.5,3.5")); + assertScript(template, singletonMap("array", new boolean[] { true, false, true }), equalTo("true,false,true")); + assertScript(template, singletonMap("array", new boolean[] { true, false, true }), equalTo("true,false,true")); } public void testEmbeddedArrayJoin() throws Exception { XContentBuilder builder = jsonBuilder().startObject() - .startArray("people") - .startObject() - .field("name", "John Smith") - .startArray("emails") - .value("john@smith.com") - .value("john.smith@email.com") - .value("jsmith@email.com") - .endArray() - .endObject() - .startObject() - .field("name", "John Doe") - .startArray("emails") - .value("john@doe.com") - .value("john.doe@email.com") - .value("jdoe@email.com") - .endArray() - .endObject() - .endArray() - .endObject(); + .startArray("people") + .startObject() + .field("name", "John Smith") + .startArray("emails") + .value("john@smith.com") + .value("john.smith@email.com") + .value("jsmith@email.com") + .endArray() + .endObject() + .startObject() + .field("name", "John Doe") + .startArray("emails") + .value("john@doe.com") + .value("john.doe@email.com") + .value("jdoe@email.com") + .endArray() + .endObject() + .endArray() + .endObject(); - Map ctx = - singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2()); + Map ctx = singletonMap( + "ctx", + XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2() + ); - assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx, - equalTo("john@smith.com,john.smith@email.com,jsmith@email.com")); + assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx, equalTo("john@smith.com,john.smith@email.com,jsmith@email.com")); - assertScript("{{#join}}ctx.people.1.emails{{/join}}", ctx, - equalTo("john@doe.com,john.doe@email.com,jdoe@email.com")); + assertScript("{{#join}}ctx.people.1.emails{{/join}}", ctx, equalTo("john@doe.com,john.doe@email.com,jdoe@email.com")); - assertScript("{{#ctx.people}}to: {{#join}}emails{{/join}};{{/ctx.people}}", ctx, - equalTo("to: john@smith.com,john.smith@email.com,jsmith@email.com;to: john@doe.com,john.doe@email.com,jdoe@email.com;")); + assertScript( + "{{#ctx.people}}to: {{#join}}emails{{/join}};{{/ctx.people}}", + ctx, + equalTo("to: john@smith.com,john.smith@email.com,jsmith@email.com;to: john@doe.com,john.doe@email.com,jdoe@email.com;") + ); } public void testJoinWithToJson() { - Map params = singletonMap("terms", - Arrays.asList(singletonMap("term", "foo"), singletonMap("term", "bar"))); + Map params = singletonMap("terms", Arrays.asList(singletonMap("term", "foo"), singletonMap("term", "bar"))); - assertScript("{{#join}}{{#toJson}}terms{{/toJson}}{{/join}}", params, - equalTo("[{\"term\":\"foo\"},{\"term\":\"bar\"}]")); + assertScript("{{#join}}{{#toJson}}terms{{/toJson}}{{/join}}", params, equalTo("[{\"term\":\"foo\"},{\"term\":\"bar\"}]")); } public void testsUnsupportedTagsJoin() { @@ -355,12 +367,12 @@ public class MustacheTests extends OpenSearchTestCase { public void testUrlEncoder() { Map urls = new HashMap<>(); - urls.put("https://www.elastic.co", - "https%3A%2F%2Fwww.elastic.co"); - urls.put("", - "%3Clogstash-%7Bnow%2Fd%7D%3E"); - urls.put("?query=(foo:A OR baz:B) AND title:/joh?n(ath[oa]n)/ AND date:{* TO 2012-01}", - "%3Fquery%3D%28foo%3AA+OR+baz%3AB%29+AND+title%3A%2Fjoh%3Fn%28ath%5Boa%5Dn%29%2F+AND+date%3A%7B*+TO+2012-01%7D"); + urls.put("https://www.elastic.co", "https%3A%2F%2Fwww.elastic.co"); + urls.put("", "%3Clogstash-%7Bnow%2Fd%7D%3E"); + urls.put( + "?query=(foo:A OR baz:B) AND title:/joh?n(ath[oa]n)/ AND date:{* TO 2012-01}", + "%3Fquery%3D%28foo%3AA+OR+baz%3AB%29+AND+title%3A%2Fjoh%3Fn%28ath%5Boa%5Dn%29%2F+AND+date%3A%7B*+TO+2012-01%7D" + ); for (Map.Entry url : urls.entrySet()) { assertScript("{{#url}}{{params}}{{/url}}", singletonMap("params", url.getKey()), equalTo(url.getValue())); @@ -368,27 +380,44 @@ public class MustacheTests extends OpenSearchTestCase { } public void testUrlEncoderWithParam() throws Exception { - assertScript("{{#url}}{{index}}{{/url}}", singletonMap("index", ""), - equalTo("%3Clogstash-%7Bnow%2Fd%7BYYYY.MM.dd%7C%2B12%3A00%7D%7D%3E")); + assertScript( + "{{#url}}{{index}}{{/url}}", + singletonMap("index", ""), + equalTo("%3Clogstash-%7Bnow%2Fd%7BYYYY.MM.dd%7C%2B12%3A00%7D%7D%3E") + ); final String random = randomAlphaOfLength(10); - assertScript("{{#url}}prefix_{{s}}{{/url}}", singletonMap("s", random), - equalTo("prefix_" + URLEncoder.encode(random, StandardCharsets.UTF_8.name()))); + assertScript( + "{{#url}}prefix_{{s}}{{/url}}", + singletonMap("s", random), + equalTo("prefix_" + URLEncoder.encode(random, StandardCharsets.UTF_8.name())) + ); } public void testUrlEncoderWithJoin() { Map params = singletonMap("emails", Arrays.asList("john@smith.com", "john.smith@email.com", "jsmith@email.com")); - assertScript("?query={{#url}}{{#join}}emails{{/join}}{{/url}}", params, - equalTo("?query=john%40smith.com%2Cjohn.smith%40email.com%2Cjsmith%40email.com")); + assertScript( + "?query={{#url}}{{#join}}emails{{/join}}{{/url}}", + params, + equalTo("?query=john%40smith.com%2Cjohn.smith%40email.com%2Cjsmith%40email.com") + ); - params = singletonMap("indices", new String[]{"", "", ""}); - assertScript("{{#url}}https://localhost:9200/{{#join}}indices{{/join}}/_stats{{/url}}", params, - equalTo("https%3A%2F%2Flocalhost%3A9200%2F%3Clogstash-%7Bnow%2Fd-2d%7D" + - "%3E%2C%3Clogstash-%7Bnow%2Fd-1d%7D%3E%2C%3Clogstash-%7Bnow%2Fd%7D%3E%2F_stats")); + params = singletonMap("indices", new String[] { "", "", "" }); + assertScript( + "{{#url}}https://localhost:9200/{{#join}}indices{{/join}}/_stats{{/url}}", + params, + equalTo( + "https%3A%2F%2Flocalhost%3A9200%2F%3Clogstash-%7Bnow%2Fd-2d%7D" + + "%3E%2C%3Clogstash-%7Bnow%2Fd-1d%7D%3E%2C%3Clogstash-%7Bnow%2Fd%7D%3E%2F_stats" + ) + ); - params = singletonMap("fibonacci", new int[]{1, 1, 2, 3, 5, 8, 13, 21, 34, 55}); - assertScript("{{#url}}{{#join delimiter='+'}}fibonacci{{/join delimiter='+'}}{{/url}}", params, - equalTo("1%2B1%2B2%2B3%2B5%2B8%2B13%2B21%2B34%2B55")); + params = singletonMap("fibonacci", new int[] { 1, 1, 2, 3, 5, 8, 13, 21, 34, 55 }); + assertScript( + "{{#url}}{{#join delimiter='+'}}fibonacci{{/join delimiter='+'}}{{/url}}", + params, + equalTo("1%2B1%2B2%2B3%2B5%2B8%2B13%2B21%2B34%2B55") + ); } private void assertScript(String script, Map vars, Matcher matcher) { diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java index 28199874460..655d49a0273 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestMultiSearchTemplateActionTests.java @@ -49,12 +49,10 @@ public class RestMultiSearchTemplateActionTests extends RestActionTestCase { } public void testTypeInPath() { - String content = "{ \"index\": \"some_index\" } \n" + - "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; + String content = "{ \"index\": \"some_index\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) .withPath("/some_index/some_type/_msearch/template") .withContent(bytesContent, XContentType.JSON) .build(); @@ -66,12 +64,10 @@ public class RestMultiSearchTemplateActionTests extends RestActionTestCase { } public void testTypeInBody() { - String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + - "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; + String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withPath("/some_index/_msearch/template") + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/some_index/_msearch/template") .withContent(bytesContent, XContentType.JSON) .build(); // We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset. diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java index c62a22bb69d..4f95da755f8 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/RestSearchTemplateActionTests.java @@ -48,8 +48,7 @@ public class RestSearchTemplateActionTests extends RestActionTestCase { } public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) .withPath("/some_index/some_type/_search/template") .build(); @@ -61,8 +60,7 @@ public class RestSearchTemplateActionTests extends RestActionTestCase { Map params = new HashMap<>(); params.put("type", "some_type"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) .withPath("/some_index/_search/template") .withParams(params) .build(); diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java index af4507655e0..6c8e91d8c4d 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java @@ -61,10 +61,10 @@ public class SearchTemplateRequestTests extends AbstractWireSerializingTestCase< protected SearchTemplateRequest mutateInstance(SearchTemplateRequest instance) throws IOException { List> mutators = new ArrayList<>(); - mutators.add(request -> request.setScriptType( - randomValueOtherThan(request.getScriptType(), () -> randomFrom(ScriptType.values())))); - mutators.add(request -> request.setScript( - randomValueOtherThan(request.getScript(), () -> randomAlphaOfLength(50)))); + mutators.add( + request -> request.setScriptType(randomValueOtherThan(request.getScriptType(), () -> randomFrom(ScriptType.values()))) + ); + mutators.add(request -> request.setScript(randomValueOtherThan(request.getScript(), () -> randomAlphaOfLength(50)))); mutators.add(request -> { Map mutatedScriptParams = new HashMap<>(request.getScriptParams()); @@ -77,8 +77,14 @@ public class SearchTemplateRequestTests extends AbstractWireSerializingTestCase< mutators.add(request -> request.setExplain(!request.isExplain())); mutators.add(request -> request.setSimulate(!request.isSimulate())); - mutators.add(request -> request.setRequest(randomValueOtherThan(request.getRequest(), - () -> RandomSearchRequestGenerator.randomSearchRequest(SearchSourceBuilder::searchSource)))); + mutators.add( + request -> request.setRequest( + randomValueOtherThan( + request.getRequest(), + () -> RandomSearchRequestGenerator.randomSearchRequest(SearchSourceBuilder::searchSource) + ) + ) + ); SearchTemplateRequest mutatedInstance = copyInstance(instance); Consumer mutator = randomFrom(mutators); @@ -86,7 +92,6 @@ public class SearchTemplateRequestTests extends AbstractWireSerializingTestCase< return mutatedInstance; } - public static SearchTemplateRequest createRandomRequest() { SearchTemplateRequest request = new SearchTemplateRequest(); request.setScriptType(randomFrom(ScriptType.values())); @@ -102,8 +107,7 @@ public class SearchTemplateRequestTests extends AbstractWireSerializingTestCase< request.setProfile(randomBoolean()); request.setSimulate(randomBoolean()); - request.setRequest(RandomSearchRequestGenerator.randomSearchRequest( - SearchSourceBuilder::searchSource)); + request.setRequest(RandomSearchRequestGenerator.randomSearchRequest(SearchSourceBuilder::searchSource)); return request; } } diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java index d5e6593f853..4783b8d3f89 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java @@ -75,11 +75,12 @@ public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase @Override protected void assertEqualInstances(SearchTemplateRequest expectedInstance, SearchTemplateRequest newInstance) { assertTrue( - expectedInstance.isExplain() == newInstance.isExplain() && - expectedInstance.isProfile() == newInstance.isProfile() && - expectedInstance.getScriptType() == newInstance.getScriptType() && - Objects.equals(expectedInstance.getScript(), newInstance.getScript()) && - Objects.equals(expectedInstance.getScriptParams(), newInstance.getScriptParams())); + expectedInstance.isExplain() == newInstance.isExplain() + && expectedInstance.isProfile() == newInstance.isProfile() + && expectedInstance.getScriptType() == newInstance.getScriptType() + && Objects.equals(expectedInstance.getScript(), newInstance.getScript()) + && Objects.equals(expectedInstance.getScriptParams(), newInstance.getScriptParams()) + ); } @Override @@ -102,21 +103,19 @@ public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase XContentType contentType = randomFrom(XContentType.values()); XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) .startObject() - .field("source", "{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }") - .startObject("params") - .field("my_field", "foo") - .field("my_value", "bar") - .endObject() - .field("explain", false) - .field("profile", true) + .field("source", "{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }") + .startObject("params") + .field("my_field", "foo") + .field("my_value", "bar") + .endObject() + .field("explain", false) + .field("profile", true) .endObject(); XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); - assertToXContentEquivalent(BytesReference.bytes(expectedRequest), - BytesReference.bytes(actualRequest), - contentType); + assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); } public void testToXContentWithStoredTemplate() throws IOException { @@ -134,38 +133,35 @@ public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase XContentType contentType = randomFrom(XContentType.values()); XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) .startObject() - .field("id", "match_template") - .startObject("params") - .field("my_field", "foo") - .field("my_value", "bar") - .endObject() - .field("explain", true) - .field("profile", false) + .field("id", "match_template") + .startObject("params") + .field("my_field", "foo") + .field("my_value", "bar") + .endObject() + .field("explain", true) + .field("profile", false) .endObject(); XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); - assertToXContentEquivalent( - BytesReference.bytes(expectedRequest), - BytesReference.bytes(actualRequest), - contentType); + assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); } public void testFromXContentWithEmbeddedTemplate() throws Exception { - String source = "{" + - " 'source' : {\n" + - " 'query': {\n" + - " 'terms': {\n" + - " 'status': [\n" + - " '{{#status}}',\n" + - " '{{.}}',\n" + - " '{{/status}}'\n" + - " ]\n" + - " }\n" + - " }\n" + - " }" + - "}"; + String source = "{" + + " 'source' : {\n" + + " 'query': {\n" + + " 'terms': {\n" + + " 'status': [\n" + + " '{{#status}}',\n" + + " '{{.}}',\n" + + " '{{/status}}'\n" + + " ]\n" + + " }\n" + + " }\n" + + " }" + + "}"; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source)); assertThat(request.getScript(), equalTo("{\"query\":{\"terms\":{\"status\":[\"{{#status}}\",\"{{.}}\",\"{{/status}}\"]}}}")); @@ -174,17 +170,17 @@ public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase } public void testFromXContentWithEmbeddedTemplateAndParams() throws Exception { - String source = "{" + - " 'source' : {" + - " 'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } }," + - " 'size' : '{{my_size}}'" + - " }," + - " 'params' : {" + - " 'my_field' : 'foo'," + - " 'my_value' : 'bar'," + - " 'my_size' : 5" + - " }" + - "}"; + String source = "{" + + " 'source' : {" + + " 'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } }," + + " 'size' : '{{my_size}}'" + + " }," + + " 'params' : {" + + " 'my_field' : 'foo'," + + " 'my_value' : 'bar'," + + " 'my_size' : 5" + + " }" + + "}"; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source)); assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{my_field}}\":\"{{my_value}}\"}},\"size\":\"{{my_size}}\"}")); diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java index 69feb16a902..84734e55e24 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java @@ -82,19 +82,27 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase BASE_WHITELISTS = - Collections.singletonList(WhitelistLoader.loadFromResourceFiles( - Whitelist.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_WHITELIST_FILES)); + public static final List BASE_WHITELISTS = Collections.singletonList( + WhitelistLoader.loadFromResourceFiles(Whitelist.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_WHITELIST_FILES) + ); /** The {@link ClassLoader} used to look up the whitelisted Java classes, constructors, methods, and fields. */ public final ClassLoader classLoader; @@ -86,8 +85,13 @@ public final class Whitelist { public final List whitelistInstanceBindings; /** Standard constructor. All values must be not {@code null}. */ - public Whitelist(ClassLoader classLoader, List whitelistClasses, List whitelistImportedMethods, - List whitelistClassBindings, List whitelistInstanceBindings) { + public Whitelist( + ClassLoader classLoader, + List whitelistClasses, + List whitelistImportedMethods, + List whitelistClassBindings, + List whitelistInstanceBindings + ) { this.classLoader = Objects.requireNonNull(classLoader); this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java index 0a8437575db..3947be60054 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClass.java @@ -75,9 +75,14 @@ public final class WhitelistClass { public final Map, Object> painlessAnnotations; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistClass(String origin, String javaClassName, - List whitelistConstructors, List whitelistMethods, List whitelistFields, - List painlessAnnotations) { + public WhitelistClass( + String origin, + String javaClassName, + List whitelistConstructors, + List whitelistMethods, + List whitelistFields, + List painlessAnnotations + ) { this.origin = Objects.requireNonNull(origin); this.javaClassName = Objects.requireNonNull(javaClassName); @@ -89,9 +94,12 @@ public final class WhitelistClass { if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); } else { - this.painlessAnnotations = Collections.unmodifiableMap(Objects.requireNonNull(painlessAnnotations).stream() + this.painlessAnnotations = Collections.unmodifiableMap( + Objects.requireNonNull(painlessAnnotations) + .stream() .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ); } } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java index a4f127f7fad..387453f1ea8 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistClassBinding.java @@ -72,9 +72,14 @@ public class WhitelistClassBinding { public final Map, Object> painlessAnnotations; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistClassBinding(String origin, String targetJavaClassName, - String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - List painlessAnnotations) { + public WhitelistClassBinding( + String origin, + String targetJavaClassName, + String methodName, + String returnCanonicalTypeName, + List canonicalTypeNameParameters, + List painlessAnnotations + ) { this.origin = Objects.requireNonNull(origin); this.targetJavaClassName = Objects.requireNonNull(targetJavaClassName); @@ -86,9 +91,12 @@ public class WhitelistClassBinding { if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); } else { - this.painlessAnnotations = Collections.unmodifiableMap(Objects.requireNonNull(painlessAnnotations).stream() + this.painlessAnnotations = Collections.unmodifiableMap( + Objects.requireNonNull(painlessAnnotations) + .stream() .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ); } } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java index 71387609ac8..4b96d727e4e 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistConstructor.java @@ -67,9 +67,12 @@ public final class WhitelistConstructor { if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); } else { - this.painlessAnnotations = Collections.unmodifiableMap(Objects.requireNonNull(painlessAnnotations).stream() + this.painlessAnnotations = Collections.unmodifiableMap( + Objects.requireNonNull(painlessAnnotations) + .stream() .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ); } } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java index df91678cd6b..e6519ea68af 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistField.java @@ -67,9 +67,12 @@ public class WhitelistField { if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); } else { - this.painlessAnnotations = Collections.unmodifiableMap(Objects.requireNonNull(painlessAnnotations).stream() + this.painlessAnnotations = Collections.unmodifiableMap( + Objects.requireNonNull(painlessAnnotations) + .stream() .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ); } } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java index 5a67bfd50b7..6e5994622ec 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistInstanceBinding.java @@ -68,9 +68,14 @@ public class WhitelistInstanceBinding { public final Map, Object> painlessAnnotations; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistInstanceBinding(String origin, Object targetInstance, - String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - List painlessAnnotations) { + public WhitelistInstanceBinding( + String origin, + Object targetInstance, + String methodName, + String returnCanonicalTypeName, + List canonicalTypeNameParameters, + List painlessAnnotations + ) { this.origin = Objects.requireNonNull(origin); this.targetInstance = Objects.requireNonNull(targetInstance); @@ -82,9 +87,12 @@ public class WhitelistInstanceBinding { if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); } else { - this.painlessAnnotations = Collections.unmodifiableMap(Objects.requireNonNull(painlessAnnotations).stream() + this.painlessAnnotations = Collections.unmodifiableMap( + Objects.requireNonNull(painlessAnnotations) + .stream() .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ); } } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java index 07af1a9dc7a..6f123198ab4 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistLoader.java @@ -168,14 +168,17 @@ public final class WhitelistLoader { List whitelistStatics = new ArrayList<>(); List whitelistClassBindings = new ArrayList<>(); - // Execute a single pass through the whitelist text files. This will gather all the + // Execute a single pass through the whitelist text files. This will gather all the // constructors, methods, augmented methods, and fields for each whitelisted class. for (String filepath : filepaths) { String line; int number = -1; - try (LineNumberReader reader = new LineNumberReader( - new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8))) { + try ( + LineNumberReader reader = new LineNumberReader( + new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8) + ) + ) { String parseType = null; String whitelistClassOrigin = null; @@ -200,7 +203,8 @@ public final class WhitelistLoader { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { throw new IllegalArgumentException( - "invalid class definition: failed to parse class opening bracket [" + line + "]"); + "invalid class definition: failed to parse class opening bracket [" + line + "]" + ); } if (parseType != null) { @@ -229,7 +233,8 @@ public final class WhitelistLoader { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { throw new IllegalArgumentException( - "invalid static import definition: failed to parse static import opening bracket [" + line + "]"); + "invalid static import definition: failed to parse static import opening bracket [" + line + "]" + ); } if (parseType != null) { @@ -238,8 +243,8 @@ public final class WhitelistLoader { parseType = "static_import"; - // Handle the end of a definition and reset all previously gathered values. - // Expects the following format: '}' '\n' + // Handle the end of a definition and reset all previously gathered values. + // Expects the following format: '}' '\n' } else if (line.equals("}")) { if (parseType == null) { throw new IllegalArgumentException("invalid definition: extraneous closing bracket"); @@ -248,8 +253,16 @@ public final class WhitelistLoader { // Create a new WhitelistClass with all the previously gathered constructors, methods, // augmented methods, and fields, and add it to the list of whitelisted classes. if ("class".equals(parseType)) { - whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, - whitelistConstructors, whitelistMethods, whitelistFields, classAnnotations)); + whitelistClasses.add( + new WhitelistClass( + whitelistClassOrigin, + javaClassName, + whitelistConstructors, + whitelistMethods, + whitelistFields, + classAnnotations + ) + ); whitelistClassOrigin = null; javaClassName = null; @@ -262,8 +275,9 @@ public final class WhitelistLoader { // Reset the parseType. parseType = null; - // Handle static import definition types. - // Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' ( 'from_class' | 'bound_to' ) ID annotations? '\n' + // Handle static import definition types. + // Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' ( 'from_class' | 'bound_to' ) ID annotations? + // '\n' } else if ("static_import".equals(parseType)) { // Mark the origin of this parsable object. String origin = "[" + filepath + "]:[" + number + "]"; @@ -273,7 +287,8 @@ public final class WhitelistLoader { if (parameterStartIndex == -1) { throw new IllegalArgumentException( - "illegal static import definition: start of method parameters not found [" + line + "]"); + "illegal static import definition: start of method parameters not found [" + line + "]" + ); } String[] tokens = line.substring(0, parameterStartIndex).trim().split("\\s+"); @@ -294,11 +309,13 @@ public final class WhitelistLoader { if (parameterEndIndex == -1) { throw new IllegalArgumentException( - "illegal static import definition: end of method parameters not found [" + line + "]"); + "illegal static import definition: end of method parameters not found [" + line + "]" + ); } - String[] canonicalTypeNameParameters = - line.substring(parameterStartIndex + 1, parameterEndIndex).replaceAll("\\s+", "").split(","); + String[] canonicalTypeNameParameters = line.substring(parameterStartIndex + 1, parameterEndIndex) + .replaceAll("\\s+", "") + .split(","); // Handle the case for a method with no parameters. if ("".equals(canonicalTypeNameParameters[0])) { @@ -332,19 +349,39 @@ public final class WhitelistLoader { // Add a static import method or binding depending on the static import type. if ("from_class".equals(staticImportType)) { - whitelistStatics.add(new WhitelistMethod(origin, targetJavaClassName, - methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters), - annotations)); + whitelistStatics.add( + new WhitelistMethod( + origin, + targetJavaClassName, + methodName, + returnCanonicalTypeName, + Arrays.asList(canonicalTypeNameParameters), + annotations + ) + ); } else if ("bound_to".equals(staticImportType)) { - whitelistClassBindings.add(new WhitelistClassBinding(origin, targetJavaClassName, - methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters), - annotations)); + whitelistClassBindings.add( + new WhitelistClassBinding( + origin, + targetJavaClassName, + methodName, + returnCanonicalTypeName, + Arrays.asList(canonicalTypeNameParameters), + annotations + ) + ); } else { - throw new IllegalArgumentException("invalid static import definition: " + - "unexpected static import type [" + staticImportType + "] [" + line + "]"); + throw new IllegalArgumentException( + "invalid static import definition: " + + "unexpected static import type [" + + staticImportType + + "] [" + + line + + "]" + ); } - // Handle class definition types. + // Handle class definition types. } else if ("class".equals(parseType)) { // Mark the origin of this parsable object. String origin = "[" + filepath + "]:[" + number + "]"; @@ -357,7 +394,8 @@ public final class WhitelistLoader { if (parameterEndIndex == -1) { throw new IllegalArgumentException( - "illegal constructor definition: end of constructor parameters not found [" + line + "]"); + "illegal constructor definition: end of constructor parameters not found [" + line + "]" + ); } String[] canonicalTypeNameParameters = line.substring(1, parameterEndIndex).replaceAll("\\s+", "").split(","); @@ -370,14 +408,16 @@ public final class WhitelistLoader { // Parse the annotations if they exist. List annotations; int annotationIndex = line.indexOf('@'); - annotations = annotationIndex == -1 ? - Collections.emptyList() : parseWhitelistAnnotations(parsers, line.substring(annotationIndex)); + annotations = annotationIndex == -1 + ? Collections.emptyList() + : parseWhitelistAnnotations(parsers, line.substring(annotationIndex)); - whitelistConstructors.add(new WhitelistConstructor( - origin, Arrays.asList(canonicalTypeNameParameters), annotations)); + whitelistConstructors.add( + new WhitelistConstructor(origin, Arrays.asList(canonicalTypeNameParameters), annotations) + ); - // Handle the case for a method or augmented method definition. - // Expects the following format: ID ID? ID '(' ( ID ( ',' ID )* )? ')' annotations? '\n' + // Handle the case for a method or augmented method definition. + // Expects the following format: ID ID? ID '(' ( ID ( ',' ID )* )? ')' annotations? '\n' } else if (line.contains("(")) { // Parse the tokens prior to the method parameters. int parameterStartIndex = line.indexOf('('); @@ -404,11 +444,13 @@ public final class WhitelistLoader { if (parameterEndIndex == -1) { throw new IllegalArgumentException( - "illegal static import definition: end of method parameters not found [" + line + "]"); + "illegal static import definition: end of method parameters not found [" + line + "]" + ); } - String[] canonicalTypeNameParameters = - line.substring(parameterStartIndex + 1, parameterEndIndex).replaceAll("\\s+", "").split(","); + String[] canonicalTypeNameParameters = line.substring(parameterStartIndex + 1, parameterEndIndex) + .replaceAll("\\s+", "") + .split(","); // Handle the case for a method with no parameters. if ("".equals(canonicalTypeNameParameters[0])) { @@ -418,15 +460,23 @@ public final class WhitelistLoader { // Parse the annotations if they exist. List annotations; int annotationIndex = line.indexOf('@'); - annotations = annotationIndex == -1 ? - Collections.emptyList() : parseWhitelistAnnotations(parsers, line.substring(annotationIndex)); + annotations = annotationIndex == -1 + ? Collections.emptyList() + : parseWhitelistAnnotations(parsers, line.substring(annotationIndex)); - whitelistMethods.add(new WhitelistMethod(origin, javaAugmentedClassName, methodName, - returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters), - annotations)); + whitelistMethods.add( + new WhitelistMethod( + origin, + javaAugmentedClassName, + methodName, + returnCanonicalTypeName, + Arrays.asList(canonicalTypeNameParameters), + annotations + ) + ); - // Handle the case for a field definition. - // Expects the following format: ID ID annotations? '\n' + // Handle the case for a field definition. + // Expects the following format: ID ID annotations? '\n' } else { // Parse the annotations if they exist. List annotations; @@ -463,17 +513,16 @@ public final class WhitelistLoader { } } - ClassLoader loader = AccessController.doPrivileged((PrivilegedAction)resource::getClassLoader); + ClassLoader loader = AccessController.doPrivileged((PrivilegedAction) resource::getClassLoader); return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList()); } - private static List parseWhitelistAnnotations( - Map parsers, String line) { + private static List parseWhitelistAnnotations(Map parsers, String line) { List annotations; - if ("".equals(line.replaceAll("\\s+",""))) { + if ("".equals(line.replaceAll("\\s+", ""))) { annotations = Collections.emptyList(); } else { line = line.trim(); @@ -524,8 +573,9 @@ public final class WhitelistLoader { String argumentValue = argumentKeyValue[1]; - if (argumentValue.length() < 3 || argumentValue.charAt(0) != '"' || - argumentValue.charAt(argumentValue.length() - 1) != '"') { + if (argumentValue.length() < 3 + || argumentValue.charAt(0) != '"' + || argumentValue.charAt(argumentValue.length() - 1) != '"') { throw new IllegalArgumentException("invalid annotation: expected key=\"value\" [" + line + "]"); } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java index d995b871a8e..2a8e94206e2 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/WhitelistMethod.java @@ -85,9 +85,14 @@ public class WhitelistMethod { * augmentedCanonicalClassName; augmentedCanonicalClassName will be {@code null} unless the method * is augmented as described in the class documentation. */ - public WhitelistMethod(String origin, String augmentedCanonicalClassName, String methodName, - String returnCanonicalTypeName, List canonicalTypeNameParameters, - List painlessAnnotations) { + public WhitelistMethod( + String origin, + String augmentedCanonicalClassName, + String methodName, + String returnCanonicalTypeName, + List canonicalTypeNameParameters, + List painlessAnnotations + ) { this.origin = Objects.requireNonNull(origin); this.augmentedCanonicalClassName = augmentedCanonicalClassName; @@ -98,9 +103,12 @@ public class WhitelistMethod { if (painlessAnnotations.isEmpty()) { this.painlessAnnotations = Collections.emptyMap(); } else { - this.painlessAnnotations = Collections.unmodifiableMap(Objects.requireNonNull(painlessAnnotations).stream() + this.painlessAnnotations = Collections.unmodifiableMap( + Objects.requireNonNull(painlessAnnotations) + .stream() .map(painlessAnnotation -> new AbstractMap.SimpleEntry<>(painlessAnnotation.getClass(), painlessAnnotation)) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ); } } } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/InjectConstantAnnotation.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/InjectConstantAnnotation.java index 32a473b3268..5dd8a61c8ac 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/InjectConstantAnnotation.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/InjectConstantAnnotation.java @@ -43,6 +43,7 @@ import java.util.List; public class InjectConstantAnnotation { public static final String NAME = "inject_constant"; public final List injects; + public InjectConstantAnnotation(List injects) { this.injects = Collections.unmodifiableList(injects); } diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java index b3a00fb9dcf..ec270a33632 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/annotation/WhitelistAnnotationParser.java @@ -45,12 +45,12 @@ import java.util.stream.Stream; public interface WhitelistAnnotationParser { Map BASE_ANNOTATION_PARSERS = Collections.unmodifiableMap( - Stream.of( - new AbstractMap.SimpleEntry<>(NoImportAnnotation.NAME, NoImportAnnotationParser.INSTANCE), - new AbstractMap.SimpleEntry<>(DeprecatedAnnotation.NAME, DeprecatedAnnotationParser.INSTANCE), - new AbstractMap.SimpleEntry<>(NonDeterministicAnnotation.NAME, NonDeterministicAnnotationParser.INSTANCE), - new AbstractMap.SimpleEntry<>(InjectConstantAnnotation.NAME, InjectConstantAnnotationParser.INSTANCE) - ).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + Stream.of( + new AbstractMap.SimpleEntry<>(NoImportAnnotation.NAME, NoImportAnnotationParser.INSTANCE), + new AbstractMap.SimpleEntry<>(DeprecatedAnnotation.NAME, DeprecatedAnnotationParser.INSTANCE), + new AbstractMap.SimpleEntry<>(NonDeterministicAnnotation.NAME, NonDeterministicAnnotationParser.INSTANCE), + new AbstractMap.SimpleEntry<>(InjectConstantAnnotation.NAME, InjectConstantAnnotationParser.INSTANCE) + ).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) ); Object parse(Map arguments); diff --git a/modules/lang-painless/src/doc/java/org/opensearch/painless/ContextDocGenerator.java b/modules/lang-painless/src/doc/java/org/opensearch/painless/ContextDocGenerator.java index 3083a14cfc8..cc3fcaa828e 100644 --- a/modules/lang-painless/src/doc/java/org/opensearch/painless/ContextDocGenerator.java +++ b/modules/lang-painless/src/doc/java/org/opensearch/painless/ContextDocGenerator.java @@ -116,25 +116,26 @@ public final class ContextDocGenerator { } @SuppressForbidden(reason = "retrieving data from an internal API not exposed as part of the REST client") - private static List getContextInfos() throws IOException { - URLConnection getContextNames = new URL( - "http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context").openConnection(); + private static List getContextInfos() throws IOException { + URLConnection getContextNames = new URL("http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context") + .openConnection(); XContentParser parser = JsonXContent.jsonXContent.createParser(null, null, getContextNames.getInputStream()); parser.nextToken(); parser.nextToken(); @SuppressWarnings("unchecked") - List contextNames = (List)(Object)parser.list(); + List contextNames = (List) (Object) parser.list(); parser.close(); - ((HttpURLConnection)getContextNames).disconnect(); + ((HttpURLConnection) getContextNames).disconnect(); List contextInfos = new ArrayList<>(); for (String contextName : contextNames) { URLConnection getContextInfo = new URL( - "http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context?context=" + contextName).openConnection(); + "http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context?context=" + contextName + ).openConnection(); parser = JsonXContent.jsonXContent.createParser(null, null, getContextInfo.getInputStream()); contextInfos.add(PainlessContextInfo.fromXContent(parser)); - ((HttpURLConnection)getContextInfo).disconnect(); + ((HttpURLConnection) getContextInfo).disconnect(); } contextInfos.sort(Comparator.comparing(PainlessContextInfo::getName)); @@ -159,9 +160,11 @@ public final class ContextDocGenerator { } } - return staticInfoCounts.entrySet().stream().filter( - e -> e.getValue() == contextInfos.size() - ).map(Map.Entry::getKey).collect(Collectors.toSet()); + return staticInfoCounts.entrySet() + .stream() + .filter(e -> e.getValue() == contextInfos.size()) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); } private static List createContextStatics(PainlessContextInfo contextInfo) { @@ -183,9 +186,11 @@ public final class ContextDocGenerator { } } - return classInfoCounts.entrySet().stream().filter( - e -> e.getValue() == contextInfos.size() - ).map(Map.Entry::getKey).collect(Collectors.toSet()); + return classInfoCounts.entrySet() + .stream() + .filter(e -> e.getValue() == contextInfos.size()) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); } @SuppressForbidden(reason = "resolve api docs directory with environment") @@ -216,14 +221,22 @@ public final class ContextDocGenerator { stream.println(); } - private static void printSharedIndexPage(Path sharedDir, Map javaNamesToDisplayNames, - List staticInfos, List classInfos) throws IOException { + private static void printSharedIndexPage( + Path sharedDir, + Map javaNamesToDisplayNames, + List staticInfos, + List classInfos + ) throws IOException { Path sharedIndexPath = sharedDir.resolve("index.asciidoc"); - try (PrintStream sharedIndexStream = new PrintStream( + try ( + PrintStream sharedIndexStream = new PrintStream( Files.newOutputStream(sharedIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + false, + StandardCharsets.UTF_8.name() + ) + ) { printAutomatedMessage(sharedIndexStream); @@ -236,14 +249,23 @@ public final class ContextDocGenerator { } } - private static void printContextIndexPage(Path contextDir, Map javaNamesToDisplayNames, - PainlessContextInfo contextInfo, List staticInfos, List classInfos) throws IOException { + private static void printContextIndexPage( + Path contextDir, + Map javaNamesToDisplayNames, + PainlessContextInfo contextInfo, + List staticInfos, + List classInfos + ) throws IOException { Path contextIndexPath = contextDir.resolve("index.asciidoc"); - try (PrintStream contextIndexStream = new PrintStream( + try ( + PrintStream contextIndexStream = new PrintStream( Files.newOutputStream(contextIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + false, + StandardCharsets.UTF_8.name() + ) + ) { printAutomatedMessage(contextIndexStream); @@ -253,31 +275,39 @@ public final class ContextDocGenerator { contextIndexStream.println("The following specialized API is available in the " + getContextName(contextInfo) + " context."); contextIndexStream.println(); contextIndexStream.println( - "* See the <<" + SHARED_HEADER + ", " + SHARED_NAME + " API>> for further API available in all contexts."); + "* See the <<" + SHARED_HEADER + ", " + SHARED_NAME + " API>> for further API available in all contexts." + ); printIndex(contextIndexStream, getContextHeader(contextInfo), javaNamesToDisplayNames, staticInfos, classInfos); } } - private static void printIndex(PrintStream indexStream, String contextHeader, Map javaNamesToDisplayNames, - List staticInfos, List classInfos) { + private static void printIndex( + PrintStream indexStream, + String contextHeader, + Map javaNamesToDisplayNames, + List staticInfos, + List classInfos + ) { String currentPackageName = null; if (staticInfos.isEmpty() == false) { indexStream.println(); indexStream.println("==== Static Methods"); - indexStream.println("The following methods are directly callable without a class/instance qualifier. " + - "Note parameters denoted by a (*) are treated as read-only values."); + indexStream.println( + "The following methods are directly callable without a class/instance qualifier. " + + "Note parameters denoted by a (*) are treated as read-only values." + ); indexStream.println(); for (Object staticInfo : staticInfos) { if (staticInfo instanceof PainlessContextMethodInfo) { - printMethod(indexStream, javaNamesToDisplayNames, false, (PainlessContextMethodInfo)staticInfo); + printMethod(indexStream, javaNamesToDisplayNames, false, (PainlessContextMethodInfo) staticInfo); } else if (staticInfo instanceof PainlessContextClassBindingInfo) { - printClassBinding(indexStream, javaNamesToDisplayNames, (PainlessContextClassBindingInfo)staticInfo); + printClassBinding(indexStream, javaNamesToDisplayNames, (PainlessContextClassBindingInfo) staticInfo); } else if (staticInfo instanceof PainlessContextInstanceBindingInfo) { - printInstanceBinding(indexStream, javaNamesToDisplayNames, (PainlessContextInstanceBindingInfo)staticInfo); + printInstanceBinding(indexStream, javaNamesToDisplayNames, (PainlessContextInstanceBindingInfo) staticInfo); } else { throw new IllegalArgumentException("unexpected static info type"); } @@ -287,8 +317,10 @@ public final class ContextDocGenerator { if (classInfos.isEmpty() == false) { indexStream.println(); indexStream.println("==== Classes By Package"); - indexStream.println("The following classes are available grouped by their respective packages. Click on a class " + - "to view details about the available methods and fields."); + indexStream.println( + "The following classes are available grouped by their respective packages. Click on a class " + + "to view details about the available methods and fields." + ); indexStream.println(); for (PainlessContextClassInfo classInfo : classInfos) { @@ -299,8 +331,14 @@ public final class ContextDocGenerator { indexStream.println(); indexStream.println("==== " + currentPackageName); - indexStream.println("<<" + getPackageHeader(contextHeader, currentPackageName) + ", " + - "Expand details for " + currentPackageName + ">>"); + indexStream.println( + "<<" + + getPackageHeader(contextHeader, currentPackageName) + + ", " + + "Expand details for " + + currentPackageName + + ">>" + ); indexStream.println(); } @@ -315,38 +353,64 @@ public final class ContextDocGenerator { } private static void printSharedPackagesPages( - Path sharedDir, Map javaNamesToDisplayNames, List classInfos) throws IOException { + Path sharedDir, + Map javaNamesToDisplayNames, + List classInfos + ) throws IOException { Path sharedClassesPath = sharedDir.resolve("packages.asciidoc"); - try (PrintStream sharedPackagesStream = new PrintStream( + try ( + PrintStream sharedPackagesStream = new PrintStream( Files.newOutputStream(sharedClassesPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + false, + StandardCharsets.UTF_8.name() + ) + ) { printAutomatedMessage(sharedPackagesStream); printPackages(sharedPackagesStream, SHARED_NAME, SHARED_HEADER, javaNamesToDisplayNames, Collections.emptySet(), classInfos); } } - private static void printContextPackagesPages(Path contextDir, Map javaNamesToDisplayNames, - Set excludes, PainlessContextInfo contextInfo, List classInfos) - throws IOException { + private static void printContextPackagesPages( + Path contextDir, + Map javaNamesToDisplayNames, + Set excludes, + PainlessContextInfo contextInfo, + List classInfos + ) throws IOException { Path contextPackagesPath = contextDir.resolve("packages.asciidoc"); - try (PrintStream contextPackagesStream = new PrintStream( + try ( + PrintStream contextPackagesStream = new PrintStream( Files.newOutputStream(contextPackagesPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + false, + StandardCharsets.UTF_8.name() + ) + ) { printAutomatedMessage(contextPackagesStream); - printPackages(contextPackagesStream, - getContextName(contextInfo), getContextHeader(contextInfo), javaNamesToDisplayNames, excludes, classInfos); + printPackages( + contextPackagesStream, + getContextName(contextInfo), + getContextHeader(contextInfo), + javaNamesToDisplayNames, + excludes, + classInfos + ); } } - private static void printPackages(PrintStream packagesStream, String contextName, String contextHeader, - Map javaNamesToDisplayNames, Set excludes, List classInfos) - { + private static void printPackages( + PrintStream packagesStream, + String contextName, + String contextHeader, + Map javaNamesToDisplayNames, + Set excludes, + List classInfos + ) { String currentPackageName = null; @@ -363,8 +427,9 @@ public final class ContextDocGenerator { packagesStream.println(); packagesStream.println("[role=\"exclude\",id=\"" + getPackageHeader(contextHeader, currentPackageName) + "\"]"); packagesStream.println("=== " + contextName + " API for package " + currentPackageName); - packagesStream.println("See the <<" + contextHeader + ", " + contextName + " API>> " + - "for a high-level overview of all packages and classes."); + packagesStream.println( + "See the <<" + contextHeader + ", " + contextName + " API>> " + "for a high-level overview of all packages and classes." + ); } String className = getType(javaNamesToDisplayNames, classInfo.getName()); @@ -398,13 +463,17 @@ public final class ContextDocGenerator { packagesStream.println(); } - private static void printRootIndexPage(Path rootDir, - List contextInfos, Set isSpecialized) throws IOException { + private static void printRootIndexPage(Path rootDir, List contextInfos, Set isSpecialized) + throws IOException { Path rootIndexPath = rootDir.resolve("index.asciidoc"); - try (PrintStream rootIndexStream = new PrintStream( + try ( + PrintStream rootIndexStream = new PrintStream( Files.newOutputStream(rootIndexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + false, + StandardCharsets.UTF_8.name() + ) + ) { printAutomatedMessage(rootIndexStream); @@ -439,8 +508,11 @@ public final class ContextDocGenerator { } private static void printConstructor( - PrintStream stream, Map javaNamesToDisplayNames, - String className, PainlessContextConstructorInfo constructorInfo) { + PrintStream stream, + Map javaNamesToDisplayNames, + String className, + PainlessContextConstructorInfo constructorInfo + ) { stream.print("* "); @@ -452,9 +524,7 @@ public final class ContextDocGenerator { stream.print("("); - for (int parameterIndex = 0; - parameterIndex < constructorInfo.getParameters().size(); - ++parameterIndex) { + for (int parameterIndex = 0; parameterIndex < constructorInfo.getParameters().size(); ++parameterIndex) { stream.print(getType(javaNamesToDisplayNames, constructorInfo.getParameters().get(parameterIndex))); @@ -467,8 +537,11 @@ public final class ContextDocGenerator { } private static void printMethod( - PrintStream stream, Map javaNamesToDisplayNames, - boolean isStatic, PainlessContextMethodInfo methodInfo) { + PrintStream stream, + Map javaNamesToDisplayNames, + boolean isStatic, + PainlessContextMethodInfo methodInfo + ) { stream.print("* " + (isStatic ? "static " : "")); stream.print(getType(javaNamesToDisplayNames, methodInfo.getRtn()) + " "); @@ -481,9 +554,7 @@ public final class ContextDocGenerator { stream.print("("); - for (int parameterIndex = 0; - parameterIndex < methodInfo.getParameters().size(); - ++parameterIndex) { + for (int parameterIndex = 0; parameterIndex < methodInfo.getParameters().size(); ++parameterIndex) { stream.print(getType(javaNamesToDisplayNames, methodInfo.getParameters().get(parameterIndex))); @@ -496,7 +567,10 @@ public final class ContextDocGenerator { } private static void printClassBinding( - PrintStream stream, Map javaNamesToDisplayNames, PainlessContextClassBindingInfo classBindingInfo) { + PrintStream stream, + Map javaNamesToDisplayNames, + PainlessContextClassBindingInfo classBindingInfo + ) { stream.print("* " + getType(javaNamesToDisplayNames, classBindingInfo.getRtn()) + " " + classBindingInfo.getName() + "("); @@ -504,7 +578,8 @@ public final class ContextDocGenerator { // temporary fix to not print org.opensearch.script.ScoreScript parameter until // class instance bindings are created and the information is appropriately added to the context info classes if ("org.opensearch.script.ScoreScript".equals( - getType(javaNamesToDisplayNames, classBindingInfo.getParameters().get(parameterIndex)))) { + getType(javaNamesToDisplayNames, classBindingInfo.getParameters().get(parameterIndex)) + )) { continue; } @@ -523,7 +598,10 @@ public final class ContextDocGenerator { } private static void printInstanceBinding( - PrintStream stream, Map javaNamesToDisplayNames, PainlessContextInstanceBindingInfo instanceBindingInfo) { + PrintStream stream, + Map javaNamesToDisplayNames, + PainlessContextInstanceBindingInfo instanceBindingInfo + ) { stream.print("* " + getType(javaNamesToDisplayNames, instanceBindingInfo.getRtn()) + " " + instanceBindingInfo.getName() + "("); @@ -539,8 +617,11 @@ public final class ContextDocGenerator { } private static void printField( - PrintStream stream, Map javaNamesToDisplayNames, - boolean isStatic, PainlessContextFieldInfo fieldInfo) { + PrintStream stream, + Map javaNamesToDisplayNames, + boolean isStatic, + PainlessContextFieldInfo fieldInfo + ) { stream.print("* " + (isStatic ? "static " : "")); stream.print(getType(javaNamesToDisplayNames, fieldInfo.getType()) + " "); @@ -609,9 +690,7 @@ public final class ContextDocGenerator { javaDocLink.append(constructorInfo.getDeclaring().replace('.', '/')); javaDocLink.append(".html#("); - for (int parameterIndex = 0; - parameterIndex < constructorInfo.getParameters().size(); - ++parameterIndex) { + for (int parameterIndex = 0; parameterIndex < constructorInfo.getParameters().size(); ++parameterIndex) { javaDocLink.append(getLinkType(constructorInfo.getParameters().get(parameterIndex))); @@ -634,9 +713,7 @@ public final class ContextDocGenerator { javaDocLink.append(methodInfo.getName()); javaDocLink.append("("); - for (int parameterIndex = 0; - parameterIndex < methodInfo.getParameters().size(); - ++parameterIndex) { + for (int parameterIndex = 0; parameterIndex < methodInfo.getParameters().size(); ++parameterIndex) { javaDocLink.append(getLinkType(methodInfo.getParameters().get(parameterIndex))); @@ -728,21 +805,21 @@ public final class ContextDocGenerator { String sv2; if (si1 instanceof PainlessContextMethodInfo) { - sv1 = ((PainlessContextMethodInfo)si1).getSortValue(); + sv1 = ((PainlessContextMethodInfo) si1).getSortValue(); } else if (si1 instanceof PainlessContextClassBindingInfo) { - sv1 = ((PainlessContextClassBindingInfo)si1).getSortValue(); + sv1 = ((PainlessContextClassBindingInfo) si1).getSortValue(); } else if (si1 instanceof PainlessContextInstanceBindingInfo) { - sv1 = ((PainlessContextInstanceBindingInfo)si1).getSortValue(); + sv1 = ((PainlessContextInstanceBindingInfo) si1).getSortValue(); } else { throw new IllegalArgumentException("unexpected static info type"); } if (si2 instanceof PainlessContextMethodInfo) { - sv2 = ((PainlessContextMethodInfo)si2).getSortValue(); + sv2 = ((PainlessContextMethodInfo) si2).getSortValue(); } else if (si2 instanceof PainlessContextClassBindingInfo) { - sv2 = ((PainlessContextClassBindingInfo)si2).getSortValue(); + sv2 = ((PainlessContextClassBindingInfo) si2).getSortValue(); } else if (si2 instanceof PainlessContextInstanceBindingInfo) { - sv2 = ((PainlessContextInstanceBindingInfo)si2).getSortValue(); + sv2 = ((PainlessContextInstanceBindingInfo) si2).getSortValue(); } else { throw new IllegalArgumentException("unexpected static info type"); } @@ -754,15 +831,24 @@ public final class ContextDocGenerator { } private static List sortClassInfos( - Set classExcludes, List classInfos) { + Set classExcludes, + List classInfos + ) { classInfos = new ArrayList<>(classInfos); - classInfos.removeIf(v -> - "void".equals(v.getName()) || "boolean".equals(v.getName()) || "byte".equals(v.getName()) || - "short".equals(v.getName()) || "char".equals(v.getName()) || "int".equals(v.getName()) || - "long".equals(v.getName()) || "float".equals(v.getName()) || "double".equals(v.getName()) || - "org.opensearch.painless.lookup.def".equals(v.getName()) || - isInternalClass(v.getName()) || classExcludes.contains(v) + classInfos.removeIf( + v -> "void".equals(v.getName()) + || "boolean".equals(v.getName()) + || "byte".equals(v.getName()) + || "short".equals(v.getName()) + || "char".equals(v.getName()) + || "int".equals(v.getName()) + || "long".equals(v.getName()) + || "float".equals(v.getName()) + || "double".equals(v.getName()) + || "org.opensearch.painless.lookup.def".equals(v.getName()) + || isInternalClass(v.getName()) + || classExcludes.contains(v) ); classInfos.sort((c1, c2) -> { @@ -799,8 +885,7 @@ public final class ContextDocGenerator { String className = classInfo.getName(); if (classInfo.isImported()) { - javaNamesToDisplayNames.put(className, - className.substring(className.lastIndexOf('.') + 1).replace('$', '.')); + javaNamesToDisplayNames.put(className, className.substring(className.lastIndexOf('.') + 1).replace('$', '.')); } else { javaNamesToDisplayNames.put(className, className.replace('$', '.')); } @@ -810,8 +895,8 @@ public final class ContextDocGenerator { } private static boolean isInternalClass(String javaName) { - return javaName.equals("org.opensearch.script.ScoreScript") || - javaName.equals("org.opensearch.script.ScoreScript$ExplanationHolder"); + return javaName.equals("org.opensearch.script.ScoreScript") + || javaName.equals("org.opensearch.script.ScoreScript$ExplanationHolder"); } private ContextDocGenerator() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/opensearch/painless/AnalyzerCaster.java index 9ce1fd5f79b..e375ff14db6 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/AnalyzerCaster.java @@ -87,7 +87,7 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(def.class, Float.class, explicit); } else if (expected == Double.class) { return PainlessCast.originalTypetoTargetType(def.class, Double.class, explicit); - // TODO: remove this when the transition from Joda to Java datetimes is completed + // TODO: remove this when the transition from Joda to Java datetimes is completed } else if (expected == ZonedDateTime.class) { return PainlessCast.originalTypetoTargetType(def.class, ZonedDateTime.class, explicit); } @@ -95,7 +95,7 @@ public final class AnalyzerCaster { if (expected == char.class && explicit) { return PainlessCast.originalTypetoTargetType(String.class, char.class, true); } - // TODO: remove this when the transition from Joda to Java datetimes is completed + // TODO: remove this when the transition from Joda to Java datetimes is completed } else if (actual == JodaCompatibleZonedDateTime.class) { if (expected == ZonedDateTime.class) { return PainlessCast.originalTypetoTargetType(JodaCompatibleZonedDateTime.class, ZonedDateTime.class, explicit); @@ -412,17 +412,23 @@ public final class AnalyzerCaster { } } - if ( - actual == def.class || - (actual != void.class && expected == def.class) || - expected.isAssignableFrom(actual) || - (actual.isAssignableFrom(expected) && explicit) - ) { + if (actual == def.class + || (actual != void.class && expected == def.class) + || expected.isAssignableFrom(actual) + || (actual.isAssignableFrom(expected) && explicit)) { return PainlessCast.originalTypetoTargetType(actual, expected, explicit); } else { - throw location.createError(new ClassCastException("Cannot cast from " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] to " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "].")); + throw location.createError( + new ClassCastException( + "Cannot cast from " + + "[" + + PainlessLookupUtility.typeToCanonicalTypeName(actual) + + "] to " + + "[" + + PainlessLookupUtility.typeToCanonicalTypeName(expected) + + "]." + ) + ); } } @@ -433,32 +439,43 @@ public final class AnalyzerCaster { if (fsort == tsort) { return constant; } else if (fsort == String.class && tsort == char.class) { - return Utility.StringTochar((String)constant); + return Utility.StringTochar((String) constant); } else if (fsort == char.class && tsort == String.class) { - return Utility.charToString((char)constant); + return Utility.charToString((char) constant); } else if (fsort.isPrimitive() && fsort != boolean.class && tsort.isPrimitive() && tsort != boolean.class) { Number number; if (fsort == char.class) { - number = (int)(char)constant; + number = (int) (char) constant; } else { - number = (Number)constant; + number = (Number) constant; } - if (tsort == byte.class) return number.byteValue(); + if (tsort == byte.class) return number.byteValue(); else if (tsort == short.class) return number.shortValue(); - else if (tsort == char.class) return (char)number.intValue(); + else if (tsort == char.class) return (char) number.intValue(); else if (tsort == int.class) return number.intValue(); else if (tsort == long.class) return number.longValue(); else if (tsort == float.class) return number.floatValue(); else if (tsort == double.class) return number.doubleValue(); else { - throw location.createError(new IllegalStateException("Cannot cast from " + - "[" + cast.originalType.getCanonicalName() + "] to [" + cast.targetType.getCanonicalName() + "].")); + throw location.createError( + new IllegalStateException( + "Cannot cast from " + + "[" + + cast.originalType.getCanonicalName() + + "] to [" + + cast.targetType.getCanonicalName() + + "]." + ) + ); } } else { - throw location.createError(new IllegalStateException("Cannot cast from " + - "[" + cast.originalType.getCanonicalName() + "] to [" + cast.targetType.getCanonicalName() + "].")); + throw location.createError( + new IllegalStateException( + "Cannot cast from " + "[" + cast.originalType.getCanonicalName() + "] to [" + cast.targetType.getCanonicalName() + "]." + ) + ); } } @@ -487,12 +504,16 @@ public final class AnalyzerCaster { if (from0 == long.class || from1 == long.class) { return long.class; - } else if (from0 == int.class || from1 == int.class || - from0 == char.class || from1 == char.class || - from0 == short.class || from1 == short.class || - from0 == byte.class || from1 == byte.class) { - return int.class; - } + } else if (from0 == int.class + || from1 == int.class + || from0 == char.class + || from1 == char.class + || from0 == short.class + || from1 == short.class + || from0 == byte.class + || from1 == byte.class) { + return int.class; + } return null; } @@ -563,8 +584,8 @@ public final class AnalyzerCaster { } } else if (from1 == char.class) { if (from0 == short.class || from0 == byte.class) { - return int.class; - } else { + return int.class; + } else { return null; } } else { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ClassWriter.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ClassWriter.java index 85e98b8d7b9..94bf0cf674e 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ClassWriter.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ClassWriter.java @@ -47,7 +47,7 @@ import java.util.BitSet; * Manages the top level writers for class and possibly * clinit if necessary. */ -public class ClassWriter implements Closeable { +public class ClassWriter implements Closeable { /** * Converts Java reflection modifiers to ASM access constants. @@ -58,17 +58,17 @@ public class ClassWriter implements Closeable { public static int buildAccess(int modifiers, boolean synthetic) { int access = synthetic ? Opcodes.ACC_SYNTHETIC : 0; - if (Modifier.isFinal(modifiers)) access |= Opcodes.ACC_FINAL; - if (Modifier.isInterface(modifiers)) access |= Opcodes.ACC_INTERFACE; - if (Modifier.isNative(modifiers)) access |= Opcodes.ACC_NATIVE; - if (Modifier.isPrivate(modifiers)) access |= Opcodes.ACC_PRIVATE; - if (Modifier.isProtected(modifiers)) access |= Opcodes.ACC_PROTECTED; - if (Modifier.isPublic(modifiers)) access |= Opcodes.ACC_PUBLIC; - if (Modifier.isStatic(modifiers)) access |= Opcodes.ACC_STATIC; - if (Modifier.isStrict(modifiers)) access |= Opcodes.ACC_STRICT; + if (Modifier.isFinal(modifiers)) access |= Opcodes.ACC_FINAL; + if (Modifier.isInterface(modifiers)) access |= Opcodes.ACC_INTERFACE; + if (Modifier.isNative(modifiers)) access |= Opcodes.ACC_NATIVE; + if (Modifier.isPrivate(modifiers)) access |= Opcodes.ACC_PRIVATE; + if (Modifier.isProtected(modifiers)) access |= Opcodes.ACC_PROTECTED; + if (Modifier.isPublic(modifiers)) access |= Opcodes.ACC_PUBLIC; + if (Modifier.isStatic(modifiers)) access |= Opcodes.ACC_STATIC; + if (Modifier.isStrict(modifiers)) access |= Opcodes.ACC_STRICT; if (Modifier.isSynchronized(modifiers)) access |= Opcodes.ACC_SYNCHRONIZED; - if (Modifier.isTransient(modifiers)) access |= Opcodes.ACC_TRANSIENT; - if (Modifier.isVolatile(modifiers)) access |= Opcodes.ACC_VOLATILE; + if (Modifier.isTransient(modifiers)) access |= Opcodes.ACC_TRANSIENT; + if (Modifier.isVolatile(modifiers)) access |= Opcodes.ACC_VOLATILE; return access; } @@ -79,8 +79,16 @@ public class ClassWriter implements Closeable { protected final org.objectweb.asm.ClassWriter classWriter; protected final ClassVisitor classVisitor; - public ClassWriter(CompilerSettings compilerSettings, BitSet statements, Printer debugStream, - Class baseClass, int classFrames, int classAccess, String className, String[] classInterfaces) { + public ClassWriter( + CompilerSettings compilerSettings, + BitSet statements, + Printer debugStream, + Class baseClass, + int classFrames, + int classAccess, + String className, + String[] classInterfaces + ) { this.compilerSettings = compilerSettings; this.statements = statements; @@ -97,8 +105,14 @@ public class ClassWriter implements Closeable { } classVisitor = visitor; - classVisitor.visit(WriterConstants.CLASS_VERSION, classAccess, className, null, - Type.getType(baseClass).getInternalName(), classInterfaces); + classVisitor.visit( + WriterConstants.CLASS_VERSION, + classAccess, + className, + null, + Type.getType(baseClass).getInternalName(), + classInterfaces + ); } public ClassVisitor getClassVisitor() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java index 901a0b146f4..5d302e2698f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java @@ -236,7 +236,7 @@ final class Compiler { // TODO: Make this phase optional #60156 new DocFieldsPhase().visitClass(root, scriptScope); new PainlessUserTreeToIRTreePhase().visitClass(root, scriptScope); - ClassNode classNode = (ClassNode)scriptScope.getDecoration(root, IRNodeDecoration.class).getIRNode(); + ClassNode classNode = (ClassNode) scriptScope.getDecoration(root, IRNodeDecoration.class).getIRNode(); new DefaultStringConcatenationOptimizationPhase().visitClass(classNode, null); new DefaultConstantFoldingOptimizationPhase().visitClass(classNode, null); byte[] bytes = classNode.write(); @@ -271,7 +271,7 @@ final class Compiler { // TODO: Make this phase optional #60156 new DocFieldsPhase().visitClass(root, scriptScope); new PainlessUserTreeToIRTreePhase().visitClass(root, scriptScope); - ClassNode classNode = (ClassNode)scriptScope.getDecoration(root, IRNodeDecoration.class).getIRNode(); + ClassNode classNode = (ClassNode) scriptScope.getDecoration(root, IRNodeDecoration.class).getIRNode(); new DefaultStringConcatenationOptimizationPhase().visitClass(classNode, null); new DefaultConstantFoldingOptimizationPhase().visitClass(classNode, null); classNode.setDebugStream(debugStream); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/CompilerSettings.java b/modules/lang-painless/src/main/java/org/opensearch/painless/CompilerSettings.java index 09bdcda59ad..001b58f7ab5 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/CompilerSettings.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/CompilerSettings.java @@ -48,14 +48,22 @@ public final class CompilerSettings { * disabled. If {@code use-limit}, the default, regexes are enabled but limited in complexity according to the * {@code script.painless.regex.limit-factor} setting. */ - public static final Setting REGEX_ENABLED = - new Setting<>("script.painless.regex.enabled", RegexEnabled.LIMITED.value, RegexEnabled::parse, Property.NodeScope); + public static final Setting REGEX_ENABLED = new Setting<>( + "script.painless.regex.enabled", + RegexEnabled.LIMITED.value, + RegexEnabled::parse, + Property.NodeScope + ); /** * How complex can a regex be? This is the number of characters that can be considered expressed as a multiple of string length. */ - public static final Setting REGEX_LIMIT_FACTOR = - Setting.intSetting("script.painless.regex.limit-factor", 6, 1, Property.NodeScope); + public static final Setting REGEX_LIMIT_FACTOR = Setting.intSetting( + "script.painless.regex.limit-factor", + 6, + 1, + Property.NodeScope + ); /** * Constant to be used when specifying the maximum loop counter when compiling a script. @@ -99,7 +107,6 @@ public final class CompilerSettings { */ private RegexEnabled regexesEnabled = RegexEnabled.LIMITED; - /** * How complex can regexes be? Expressed as a multiple of the input string. */ @@ -128,7 +135,7 @@ public final class CompilerSettings { * parsing problems. */ public boolean isPicky() { - return picky; + return picky; } /** @@ -136,7 +143,7 @@ public final class CompilerSettings { * @see #isPicky */ public void setPicky(boolean picky) { - this.picky = picky; + this.picky = picky; } /** @@ -212,6 +219,7 @@ public final class CompilerSettings { TRUE("true"), FALSE("false"), LIMITED("limited"); + final String value; RegexEnabled(String value) { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java index 9b8e45fd089..6fe0888c867 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Def.java @@ -80,38 +80,75 @@ public final class Def { private static final class ArrayLengthHelper { private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); - private static final Map,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( - Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class, - char[].class, float[].class, double[].class, Object[].class) - .collect(Collectors.toMap(Function.identity(), type -> { - try { - return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic( - PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), "getArrayLength", MethodType.methodType(int.class, type)); - } catch (ReflectiveOperationException e) { - throw new AssertionError(e); - } - })) + private static final Map, MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( + Stream.of( + boolean[].class, + byte[].class, + short[].class, + int[].class, + long[].class, + char[].class, + float[].class, + double[].class, + Object[].class + ).collect(Collectors.toMap(Function.identity(), type -> { + try { + return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic( + PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), + "getArrayLength", + MethodType.methodType(int.class, type) + ); + } catch (ReflectiveOperationException e) { + throw new AssertionError(e); + } + })) ); private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class); - static int getArrayLength(final boolean[] array) { return array.length; } - static int getArrayLength(final byte[] array) { return array.length; } - static int getArrayLength(final short[] array) { return array.length; } - static int getArrayLength(final int[] array) { return array.length; } - static int getArrayLength(final long[] array) { return array.length; } - static int getArrayLength(final char[] array) { return array.length; } - static int getArrayLength(final float[] array) { return array.length; } - static int getArrayLength(final double[] array) { return array.length; } - static int getArrayLength(final Object[] array) { return array.length; } + static int getArrayLength(final boolean[] array) { + return array.length; + } + + static int getArrayLength(final byte[] array) { + return array.length; + } + + static int getArrayLength(final short[] array) { + return array.length; + } + + static int getArrayLength(final int[] array) { + return array.length; + } + + static int getArrayLength(final long[] array) { + return array.length; + } + + static int getArrayLength(final char[] array) { + return array.length; + } + + static int getArrayLength(final float[] array) { + return array.length; + } + + static int getArrayLength(final double[] array) { + return array.length; + } + + static int getArrayLength(final Object[] array) { + return array.length; + } static MethodHandle arrayLengthGetter(Class arrayType) { if (!arrayType.isArray()) { throw new IllegalArgumentException("type must be an array"); } - return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ? - ARRAY_TYPE_MH_MAPPING.get(arrayType) : - OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType)); + return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) + ? ARRAY_TYPE_MH_MAPPING.get(arrayType) + : OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType)); } private ArrayLengthHelper() {} @@ -138,15 +175,21 @@ public final class Def { final MethodHandles.Lookup methodHandlesLookup = MethodHandles.publicLookup(); try { - MAP_GET = methodHandlesLookup.findVirtual(Map.class , "get", MethodType.methodType(Object.class, Object.class)); - MAP_PUT = methodHandlesLookup.findVirtual(Map.class , "put", MethodType.methodType(Object.class, Object.class, Object.class)); + MAP_GET = methodHandlesLookup.findVirtual(Map.class, "get", MethodType.methodType(Object.class, Object.class)); + MAP_PUT = methodHandlesLookup.findVirtual(Map.class, "put", MethodType.methodType(Object.class, Object.class, Object.class)); LIST_GET = methodHandlesLookup.findVirtual(List.class, "get", MethodType.methodType(Object.class, int.class)); LIST_SET = methodHandlesLookup.findVirtual(List.class, "set", MethodType.methodType(Object.class, int.class, Object.class)); ITERATOR = methodHandlesLookup.findVirtual(Iterable.class, "iterator", MethodType.methodType(Iterator.class)); - MAP_INDEX_NORMALIZE = methodHandlesLookup.findStatic(Def.class, "mapIndexNormalize", - MethodType.methodType(Object.class, Map.class, Object.class)); - LIST_INDEX_NORMALIZE = methodHandlesLookup.findStatic(Def.class, "listIndexNormalize", - MethodType.methodType(int.class, List.class, int.class)); + MAP_INDEX_NORMALIZE = methodHandlesLookup.findStatic( + Def.class, + "mapIndexNormalize", + MethodType.methodType(Object.class, Map.class, Object.class) + ); + LIST_INDEX_NORMALIZE = methodHandlesLookup.findStatic( + Def.class, + "listIndexNormalize", + MethodType.methodType(int.class, List.class, int.class) + ); } catch (final ReflectiveOperationException roe) { throw new AssertionError(roe); } @@ -155,8 +198,11 @@ public final class Def { // https://bugs.openjdk.java.net/browse/JDK-8156915 MethodHandle arrayLengthMHFactory; try { - arrayLengthMHFactory = methodHandlesLookup.findStatic(MethodHandles.class, "arrayLength", - MethodType.methodType(MethodHandle.class, Class.class)); + arrayLengthMHFactory = methodHandlesLookup.findStatic( + MethodHandles.class, + "arrayLength", + MethodType.methodType(MethodHandle.class, Class.class) + ); } catch (final ReflectiveOperationException roe) { arrayLengthMHFactory = null; } @@ -205,57 +251,73 @@ public final class Def { * @throws IllegalArgumentException if no matching whitelisted method was found. * @throws Throwable if a method reference cannot be converted to an functional interface */ - static MethodHandle lookupMethod(PainlessLookup painlessLookup, FunctionTable functions, Map constants, - MethodHandles.Lookup methodHandlesLookup, MethodType callSiteType, Class receiverClass, String name, Object[] args) - throws Throwable { + static MethodHandle lookupMethod( + PainlessLookup painlessLookup, + FunctionTable functions, + Map constants, + MethodHandles.Lookup methodHandlesLookup, + MethodType callSiteType, + Class receiverClass, + String name, + Object[] args + ) throws Throwable { - String recipeString = (String) args[0]; - int numArguments = callSiteType.parameterCount(); - // simple case: no lambdas - if (recipeString.isEmpty()) { - PainlessMethod painlessMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, numArguments - 1); + String recipeString = (String) args[0]; + int numArguments = callSiteType.parameterCount(); + // simple case: no lambdas + if (recipeString.isEmpty()) { + PainlessMethod painlessMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, numArguments - 1); - if (painlessMethod == null) { - throw new IllegalArgumentException("dynamic method " + - "[" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + (numArguments - 1) + "] not found"); - } + if (painlessMethod == null) { + throw new IllegalArgumentException( + "dynamic method " + + "[" + + typeToCanonicalTypeName(receiverClass) + + ", " + + name + + "/" + + (numArguments - 1) + + "] not found" + ); + } - MethodHandle handle = painlessMethod.methodHandle; - Object[] injections = PainlessLookupUtility.buildInjections(painlessMethod, constants); + MethodHandle handle = painlessMethod.methodHandle; + Object[] injections = PainlessLookupUtility.buildInjections(painlessMethod, constants); - if (injections.length > 0) { - // method handle contains the "this" pointer so start injections at 1 - handle = MethodHandles.insertArguments(handle, 1, injections); - } + if (injections.length > 0) { + // method handle contains the "this" pointer so start injections at 1 + handle = MethodHandles.insertArguments(handle, 1, injections); + } - return handle; - } + return handle; + } - // convert recipe string to a bitset for convenience (the code below should be refactored...) - BitSet lambdaArgs = new BitSet(recipeString.length()); - for (int i = 0; i < recipeString.length(); i++) { - lambdaArgs.set(recipeString.charAt(i)); - } + // convert recipe string to a bitset for convenience (the code below should be refactored...) + BitSet lambdaArgs = new BitSet(recipeString.length()); + for (int i = 0; i < recipeString.length(); i++) { + lambdaArgs.set(recipeString.charAt(i)); + } - // otherwise: first we have to compute the "real" arity. This is because we have extra arguments: - // e.g. f(a, g(x), b, h(y), i()) looks like f(a, g, x, b, h, y, i). - int arity = callSiteType.parameterCount() - 1; - int upTo = 1; - for (int i = 1; i < numArguments; i++) { - if (lambdaArgs.get(i - 1)) { - String signature = (String) args[upTo++]; - int numCaptures = Integer.parseInt(signature.substring(signature.indexOf(',')+1)); - arity -= numCaptures; - } - } + // otherwise: first we have to compute the "real" arity. This is because we have extra arguments: + // e.g. f(a, g(x), b, h(y), i()) looks like f(a, g, x, b, h, y, i). + int arity = callSiteType.parameterCount() - 1; + int upTo = 1; + for (int i = 1; i < numArguments; i++) { + if (lambdaArgs.get(i - 1)) { + String signature = (String) args[upTo++]; + int numCaptures = Integer.parseInt(signature.substring(signature.indexOf(',') + 1)); + arity -= numCaptures; + } + } - // lookup the method with the proper arity, then we know everything (e.g. interface types of parameters). - // based on these we can finally link any remaining lambdas that were deferred. - PainlessMethod method = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity); + // lookup the method with the proper arity, then we know everything (e.g. interface types of parameters). + // based on these we can finally link any remaining lambdas that were deferred. + PainlessMethod method = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity); if (method == null) { throw new IllegalArgumentException( - "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found"); + "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found" + ); } MethodHandle handle = method.methodHandle; @@ -266,74 +328,83 @@ public final class Def { handle = MethodHandles.insertArguments(handle, 1, injections); } - int replaced = 0; - upTo = 1; - for (int i = 1; i < numArguments; i++) { - // its a functional reference, replace the argument with an impl - if (lambdaArgs.get(i - 1)) { - // decode signature of form 'type.call,2' - String signature = (String) args[upTo++]; - int separator = signature.lastIndexOf('.'); - int separator2 = signature.indexOf(','); - String type = signature.substring(1, separator); - String call = signature.substring(separator+1, separator2); - int numCaptures = Integer.parseInt(signature.substring(separator2+1)); - MethodHandle filter; - Class interfaceType = method.typeParameters.get(i - 1 - replaced); - if (signature.charAt(0) == 'S') { - // the implementation is strongly typed, now that we know the interface type, - // we have everything. - filter = lookupReferenceInternal(painlessLookup, - functions, - constants, - methodHandlesLookup, - interfaceType, - type, - call, - numCaptures - ); - } else if (signature.charAt(0) == 'D') { - // the interface type is now known, but we need to get the implementation. - // this is dynamically based on the receiver type (and cached separately, underneath - // this cache). It won't blow up since we never nest here (just references) - Class[] captures = new Class[numCaptures]; - for (int capture = 0; capture < captures.length; capture++) { - captures[capture] = callSiteType.parameterType(i + 1 + capture); - } - MethodType nestedType = MethodType.methodType(interfaceType, captures); - CallSite nested = DefBootstrap.bootstrap(painlessLookup, - functions, - constants, - methodHandlesLookup, - call, - nestedType, - 0, - DefBootstrap.REFERENCE, - PainlessLookupUtility.typeToCanonicalTypeName(interfaceType)); - filter = nested.dynamicInvoker(); - } else { - throw new AssertionError(); - } - // the filter now ignores the signature (placeholder) on the stack - filter = MethodHandles.dropArguments(filter, 0, String.class); - handle = MethodHandles.collectArguments(handle, i, filter); - i += numCaptures; - replaced += numCaptures; - } - } + int replaced = 0; + upTo = 1; + for (int i = 1; i < numArguments; i++) { + // its a functional reference, replace the argument with an impl + if (lambdaArgs.get(i - 1)) { + // decode signature of form 'type.call,2' + String signature = (String) args[upTo++]; + int separator = signature.lastIndexOf('.'); + int separator2 = signature.indexOf(','); + String type = signature.substring(1, separator); + String call = signature.substring(separator + 1, separator2); + int numCaptures = Integer.parseInt(signature.substring(separator2 + 1)); + MethodHandle filter; + Class interfaceType = method.typeParameters.get(i - 1 - replaced); + if (signature.charAt(0) == 'S') { + // the implementation is strongly typed, now that we know the interface type, + // we have everything. + filter = lookupReferenceInternal( + painlessLookup, + functions, + constants, + methodHandlesLookup, + interfaceType, + type, + call, + numCaptures + ); + } else if (signature.charAt(0) == 'D') { + // the interface type is now known, but we need to get the implementation. + // this is dynamically based on the receiver type (and cached separately, underneath + // this cache). It won't blow up since we never nest here (just references) + Class[] captures = new Class[numCaptures]; + for (int capture = 0; capture < captures.length; capture++) { + captures[capture] = callSiteType.parameterType(i + 1 + capture); + } + MethodType nestedType = MethodType.methodType(interfaceType, captures); + CallSite nested = DefBootstrap.bootstrap( + painlessLookup, + functions, + constants, + methodHandlesLookup, + call, + nestedType, + 0, + DefBootstrap.REFERENCE, + PainlessLookupUtility.typeToCanonicalTypeName(interfaceType) + ); + filter = nested.dynamicInvoker(); + } else { + throw new AssertionError(); + } + // the filter now ignores the signature (placeholder) on the stack + filter = MethodHandles.dropArguments(filter, 0, String.class); + handle = MethodHandles.collectArguments(handle, i, filter); + i += numCaptures; + replaced += numCaptures; + } + } - return handle; - } + return handle; + } - /** - * Returns an implementation of interfaceClass that calls receiverClass.name - *

- * This is just like LambdaMetaFactory, only with a dynamic type. The interface type is known, - * so we simply need to lookup the matching implementation method based on receiver type. - */ - static MethodHandle lookupReference(PainlessLookup painlessLookup, FunctionTable functions, Map constants, - MethodHandles.Lookup methodHandlesLookup, String interfaceClass, Class receiverClass, String name) - throws Throwable { + /** + * Returns an implementation of interfaceClass that calls receiverClass.name + *

+ * This is just like LambdaMetaFactory, only with a dynamic type. The interface type is known, + * so we simply need to lookup the matching implementation method based on receiver type. + */ + static MethodHandle lookupReference( + PainlessLookup painlessLookup, + FunctionTable functions, + Map constants, + MethodHandles.Lookup methodHandlesLookup, + String interfaceClass, + Class receiverClass, + String name + ) throws Throwable { Class interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass); if (interfaceType == null) { @@ -347,36 +418,50 @@ public final class Def { PainlessMethod implMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity); if (implMethod == null) { throw new IllegalArgumentException( - "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found"); + "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found" + ); } - return lookupReferenceInternal(painlessLookup, functions, constants, - methodHandlesLookup, interfaceType, PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass), - implMethod.javaMethod.getName(), 1); - } + return lookupReferenceInternal( + painlessLookup, + functions, + constants, + methodHandlesLookup, + interfaceType, + PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass), + implMethod.javaMethod.getName(), + 1 + ); + } - /** Returns a method handle to an implementation of clazz, given method reference signature. */ + /** Returns a method handle to an implementation of clazz, given method reference signature. */ private static MethodHandle lookupReferenceInternal( - PainlessLookup painlessLookup, FunctionTable functions, Map constants, - MethodHandles.Lookup methodHandlesLookup, Class clazz, String type, String call, int captures - ) throws Throwable { + PainlessLookup painlessLookup, + FunctionTable functions, + Map constants, + MethodHandles.Lookup methodHandlesLookup, + Class clazz, + String type, + String call, + int captures + ) throws Throwable { final FunctionRef ref = FunctionRef.create(painlessLookup, functions, null, clazz, type, call, captures, constants); final CallSite callSite = LambdaBootstrap.lambdaBootstrap( - methodHandlesLookup, - ref.interfaceMethodName, - ref.factoryMethodType, - ref.interfaceMethodType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateMethodType, - ref.isDelegateInterface ? 1 : 0, - ref.isDelegateAugmented ? 1 : 0, - ref.delegateInjections + methodHandlesLookup, + ref.interfaceMethodName, + ref.factoryMethodType, + ref.interfaceMethodType, + ref.delegateClassName, + ref.delegateInvokeType, + ref.delegateMethodName, + ref.delegateMethodType, + ref.isDelegateInterface ? 1 : 0, + ref.isDelegateAugmented ? 1 : 0, + ref.delegateInjections ); return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, ref.factoryMethodType.parameterArray())); - } + } /** * Looks up handle for a dynamic field getter (field load) @@ -432,8 +517,7 @@ public final class Def { } } - throw new IllegalArgumentException( - "dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); + throw new IllegalArgumentException("dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); } /** @@ -485,8 +569,7 @@ public final class Def { } } - throw new IllegalArgumentException( - "dynamic setter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); + throw new IllegalArgumentException("dynamic setter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); } /** @@ -505,8 +588,9 @@ public final class Def { } else if (List.class.isAssignableFrom(receiverClass)) { return LIST_INDEX_NORMALIZE; } - throw new IllegalArgumentException("Attempting to address a non-array-like type " + - "[" + receiverClass.getCanonicalName() + "] as an array."); + throw new IllegalArgumentException( + "Attempting to address a non-array-like type " + "[" + receiverClass.getCanonicalName() + "] as an array." + ); } /** @@ -524,8 +608,9 @@ public final class Def { } else if (List.class.isAssignableFrom(receiverClass)) { return LIST_SET; } - throw new IllegalArgumentException("Attempting to address a non-array type " + - "[" + receiverClass.getCanonicalName() + "] as an array."); + throw new IllegalArgumentException( + "Attempting to address a non-array type " + "[" + receiverClass.getCanonicalName() + "] as an array." + ); } /** @@ -543,8 +628,9 @@ public final class Def { } else if (List.class.isAssignableFrom(receiverClass)) { return LIST_GET; } - throw new IllegalArgumentException("Attempting to address a non-array type " + - "[" + receiverClass.getCanonicalName() + "] as an array."); + throw new IllegalArgumentException( + "Attempting to address a non-array type " + "[" + receiverClass.getCanonicalName() + "] as an array." + ); } /** Helper class for isolating MethodHandles and methods to get iterators over arrays @@ -555,17 +641,28 @@ public final class Def { private static final class ArrayIteratorHelper { private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); - private static final Map,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( - Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class, - char[].class, float[].class, double[].class, Object[].class) - .collect(Collectors.toMap(Function.identity(), type -> { - try { - return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic( - PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), "iterator", MethodType.methodType(Iterator.class, type)); - } catch (ReflectiveOperationException e) { - throw new AssertionError(e); - } - })) + private static final Map, MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( + Stream.of( + boolean[].class, + byte[].class, + short[].class, + int[].class, + long[].class, + char[].class, + float[].class, + double[].class, + Object[].class + ).collect(Collectors.toMap(Function.identity(), type -> { + try { + return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic( + PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), + "iterator", + MethodType.methodType(Iterator.class, type) + ); + } catch (ReflectiveOperationException e) { + throw new AssertionError(e); + } + })) ); private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class); @@ -573,64 +670,144 @@ public final class Def { static Iterator iterator(final boolean[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Boolean next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Boolean next() { + return array[index++]; + } }; } + static Iterator iterator(final byte[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Byte next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Byte next() { + return array[index++]; + } }; } + static Iterator iterator(final short[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Short next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Short next() { + return array[index++]; + } }; } + static Iterator iterator(final int[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Integer next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Integer next() { + return array[index++]; + } }; } + static Iterator iterator(final long[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Long next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Long next() { + return array[index++]; + } }; } + static Iterator iterator(final char[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Character next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Character next() { + return array[index++]; + } }; } + static Iterator iterator(final float[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Float next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Float next() { + return array[index++]; + } }; } + static Iterator iterator(final double[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Double next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Double next() { + return array[index++]; + } }; } + static Iterator iterator(final Object[] array) { return new Iterator() { int index = 0; - @Override public boolean hasNext() { return index < array.length; } - @Override public Object next() { return array[index++]; } + + @Override + public boolean hasNext() { + return index < array.length; + } + + @Override + public Object next() { + return array[index++]; + } }; } @@ -638,13 +815,14 @@ public final class Def { if (!arrayType.isArray()) { throw new IllegalArgumentException("type must be an array"); } - return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ? - ARRAY_TYPE_MH_MAPPING.get(arrayType) : - OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType)); + return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) + ? ARRAY_TYPE_MH_MAPPING.get(arrayType) + : OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType)); } private ArrayIteratorHelper() {} } + /** * Returns a method handle to do iteration (for enhanced for loop) * @param receiverClass Class of the array to load the value from @@ -664,255 +842,301 @@ public final class Def { public static boolean defToboolean(final Object value) { if (value instanceof Boolean) { - return (boolean)value; + return (boolean) value; } else { - throw new ClassCastException("cannot cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - boolean.class.getCanonicalName()); + throw new ClassCastException( + "cannot cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + boolean.class.getCanonicalName() + ); } } public static byte defTobyteImplicit(final Object value) { if (value instanceof Byte) { - return (byte)value; + return (byte) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - byte.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + byte.class.getCanonicalName() + ); } } public static short defToshortImplicit(final Object value) { if (value instanceof Byte) { - return (byte)value; + return (byte) value; } else if (value instanceof Short) { - return (short)value; + return (short) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - short.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + short.class.getCanonicalName() + ); } } public static char defTocharImplicit(final Object value) { if (value instanceof Character) { - return (char)value; + return (char) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - char.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + char.class.getCanonicalName() + ); } } public static int defTointImplicit(final Object value) { if (value instanceof Byte) { - return (byte)value; + return (byte) value; } else if (value instanceof Short) { - return (short)value; + return (short) value; } else if (value instanceof Character) { - return (char)value; + return (char) value; } else if (value instanceof Integer) { - return (int)value; + return (int) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - int.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + int.class.getCanonicalName() + ); } } public static long defTolongImplicit(final Object value) { if (value instanceof Byte) { - return (byte)value; + return (byte) value; } else if (value instanceof Short) { - return (short)value; + return (short) value; } else if (value instanceof Character) { - return (char)value; + return (char) value; } else if (value instanceof Integer) { - return (int)value; + return (int) value; } else if (value instanceof Long) { - return (long)value; + return (long) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - long.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + long.class.getCanonicalName() + ); } } public static float defTofloatImplicit(final Object value) { if (value instanceof Byte) { - return (byte)value; + return (byte) value; } else if (value instanceof Short) { - return (short)value; + return (short) value; } else if (value instanceof Character) { - return (char)value; + return (char) value; } else if (value instanceof Integer) { - return (int)value; + return (int) value; } else if (value instanceof Long) { - return (long)value; + return (long) value; } else if (value instanceof Float) { - return (float)value; + return (float) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - float.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + float.class.getCanonicalName() + ); } } public static double defTodoubleImplicit(final Object value) { if (value instanceof Byte) { - return (byte)value; + return (byte) value; } else if (value instanceof Short) { - return (short)value; + return (short) value; } else if (value instanceof Character) { - return (char)value; + return (char) value; } else if (value instanceof Integer) { - return (int)value; + return (int) value; } else if (value instanceof Long) { - return (long)value; + return (long) value; } else if (value instanceof Float) { - return (float)value; + return (float) value; } else if (value instanceof Double) { - return (double)value; + return (double) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - double.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + double.class.getCanonicalName() + ); } } public static byte defTobyteExplicit(final Object value) { if (value instanceof Character) { - return (byte)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).byteValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - byte.class.getCanonicalName()); - } + return (byte) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).byteValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + byte.class.getCanonicalName() + ); + } } public static short defToshortExplicit(final Object value) { if (value instanceof Character) { - return (short)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).shortValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - short.class.getCanonicalName()); - } + return (short) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).shortValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + short.class.getCanonicalName() + ); + } } public static char defTocharExplicit(final Object value) { if (value instanceof String) { - return Utility.StringTochar((String)value); + return Utility.StringTochar((String) value); } else if (value instanceof Character) { - return (char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return (char)((Number)value).intValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - char.class.getCanonicalName()); - } + return (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return (char) ((Number) value).intValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + char.class.getCanonicalName() + ); + } } public static int defTointExplicit(final Object value) { if (value instanceof Character) { - return (char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).intValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - int.class.getCanonicalName()); - } + return (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).intValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + int.class.getCanonicalName() + ); + } } public static long defTolongExplicit(final Object value) { if (value instanceof Character) { - return (char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).longValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - long.class.getCanonicalName()); - } + return (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).longValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + long.class.getCanonicalName() + ); + } } public static float defTofloatExplicit(final Object value) { if (value instanceof Character) { - return (char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).floatValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "float [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - byte.class.getCanonicalName()); - } + return (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).floatValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "float [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + byte.class.getCanonicalName() + ); + } } public static double defTodoubleExplicit(final Object value) { if (value instanceof Character) { - return (char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).doubleValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - byte.class.getCanonicalName()); - } + return (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).doubleValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + byte.class.getCanonicalName() + ); + } } // Conversion methods for def to boxed types. @@ -921,11 +1145,15 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Boolean) { - return (Boolean)value; + return (Boolean) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Boolean.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Boolean.class.getCanonicalName() + ); } } @@ -933,11 +1161,15 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Byte) { - return (Byte)value; + return (Byte) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Byte.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Byte.class.getCanonicalName() + ); } } @@ -945,13 +1177,17 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Byte) { - return (short)(byte)value; + return (short) (byte) value; } else if (value instanceof Short) { - return (Short)value; + return (Short) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Short.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Short.class.getCanonicalName() + ); } } @@ -959,11 +1195,15 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Character) { - return (Character)value; + return (Character) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Character.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Character.class.getCanonicalName() + ); } } @@ -971,17 +1211,21 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Byte) { - return (int)(byte)value; + return (int) (byte) value; } else if (value instanceof Short) { - return (int)(short)value; + return (int) (short) value; } else if (value instanceof Character) { - return (int)(char)value; + return (int) (char) value; } else if (value instanceof Integer) { - return (Integer)value; + return (Integer) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Integer.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Integer.class.getCanonicalName() + ); } } @@ -989,19 +1233,23 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Byte) { - return (long)(byte)value; + return (long) (byte) value; } else if (value instanceof Short) { - return (long)(short)value; + return (long) (short) value; } else if (value instanceof Character) { - return (long)(char)value; + return (long) (char) value; } else if (value instanceof Integer) { - return (long)(int)value; + return (long) (int) value; } else if (value instanceof Long) { - return (Long)value; + return (Long) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Long.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Long.class.getCanonicalName() + ); } } @@ -1009,21 +1257,25 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Byte) { - return (float)(byte)value; + return (float) (byte) value; } else if (value instanceof Short) { - return (float)(short)value; + return (float) (short) value; } else if (value instanceof Character) { - return (float)(char)value; + return (float) (char) value; } else if (value instanceof Integer) { - return (float)(int)value; + return (float) (int) value; } else if (value instanceof Long) { - return (float)(long)value; + return (float) (long) value; } else if (value instanceof Float) { - return (Float)value; + return (Float) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Float.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Float.class.getCanonicalName() + ); } } @@ -1031,23 +1283,27 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Byte) { - return (double)(byte)value; + return (double) (byte) value; } else if (value instanceof Short) { - return (double)(short)value; + return (double) (short) value; } else if (value instanceof Character) { - return (double)(char)value; + return (double) (char) value; } else if (value instanceof Integer) { - return (double)(int)value; + return (double) (int) value; } else if (value instanceof Long) { - return (double)(long)value; + return (double) (long) value; } else if (value instanceof Float) { - return (double)(float)value; + return (double) (float) value; } else if (value instanceof Double) { return (Double) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Double.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Double.class.getCanonicalName() + ); } } @@ -1055,160 +1311,178 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Character) { - return (byte)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).byteValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Byte.class.getCanonicalName()); - } + return (byte) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).byteValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Byte.class.getCanonicalName() + ); + } } public static Short defToShortExplicit(final Object value) { if (value == null) { return null; } else if (value instanceof Character) { - return (short)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).shortValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Short.class.getCanonicalName()); - } + return (short) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).shortValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Short.class.getCanonicalName() + ); + } } public static Character defToCharacterExplicit(final Object value) { if (value == null) { return null; } else if (value instanceof String) { - return Utility.StringTochar((String)value); + return Utility.StringTochar((String) value); } else if (value instanceof Character) { - return (Character)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return (char)((Number)value).intValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Character.class.getCanonicalName()); - } + return (Character) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return (char) ((Number) value).intValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Character.class.getCanonicalName() + ); + } } public static Integer defToIntegerExplicit(final Object value) { if (value == null) { return null; } else if (value instanceof Character) { - return (int)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).intValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Integer.class.getCanonicalName()); - } + return (int) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).intValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Integer.class.getCanonicalName() + ); + } } public static Long defToLongExplicit(final Object value) { if (value == null) { return null; } else if (value instanceof Character) { - return (long)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).longValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Long.class.getCanonicalName()); - } + return (long) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).longValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Long.class.getCanonicalName() + ); + } } public static Float defToFloatExplicit(final Object value) { if (value == null) { return null; } else if (value instanceof Character) { - return (float)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).floatValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Float.class.getCanonicalName()); - } + return (float) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).floatValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Float.class.getCanonicalName() + ); + } } public static Double defToDoubleExplicit(final Object value) { if (value == null) { return null; } else if (value instanceof Character) { - return (double)(char)value; - } else if ( - value instanceof Byte || - value instanceof Short || - value instanceof Integer || - value instanceof Long || - value instanceof Float || - value instanceof Double - ) { - return ((Number)value).doubleValue(); - } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - Double.class.getCanonicalName()); - } + return (double) (char) value; + } else if (value instanceof Byte + || value instanceof Short + || value instanceof Integer + || value instanceof Long + || value instanceof Float + || value instanceof Double) { + return ((Number) value).doubleValue(); + } else { + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + Double.class.getCanonicalName() + ); + } } public static String defToStringImplicit(final Object value) { if (value == null) { return null; } else if (value instanceof String) { - return (String)value; + return (String) value; } else { - throw new ClassCastException("cannot implicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - String.class.getCanonicalName()); + throw new ClassCastException( + "cannot implicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + String.class.getCanonicalName() + ); } } @@ -1216,23 +1490,27 @@ public final class Def { if (value == null) { return null; } else if (value instanceof Character) { - return Utility.charToString((char)value); + return Utility.charToString((char) value); } else if (value instanceof String) { - return (String)value; + return (String) value; } else { - throw new ClassCastException("cannot explicitly cast " + - "def [" + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + "] to " + - String.class.getCanonicalName()); + throw new ClassCastException( + "cannot explicitly cast " + + "def [" + + PainlessLookupUtility.typeToUnboxedType(value.getClass()).getCanonicalName() + + "] to " + + String.class.getCanonicalName() + ); } } // TODO: remove this when the transition from Joda to Java datetimes is completed public static ZonedDateTime defToZonedDateTime(final Object value) { if (value instanceof JodaCompatibleZonedDateTime) { - return ((JodaCompatibleZonedDateTime)value).getZonedDateTime(); + return ((JodaCompatibleZonedDateTime) value).getZonedDateTime(); } - return (ZonedDateTime)value; + return (ZonedDateTime) value; } /** @@ -1256,38 +1534,75 @@ public final class Def { private static final class ArrayIndexNormalizeHelper { private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); - private static final Map,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( - Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class, - char[].class, float[].class, double[].class, Object[].class) - .collect(Collectors.toMap(Function.identity(), type -> { - try { - return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), "normalizeIndex", - MethodType.methodType(int.class, type, int.class)); - } catch (ReflectiveOperationException e) { - throw new AssertionError(e); - } - })) + private static final Map, MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( + Stream.of( + boolean[].class, + byte[].class, + short[].class, + int[].class, + long[].class, + char[].class, + float[].class, + double[].class, + Object[].class + ).collect(Collectors.toMap(Function.identity(), type -> { + try { + return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic( + PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), + "normalizeIndex", + MethodType.methodType(int.class, type, int.class) + ); + } catch (ReflectiveOperationException e) { + throw new AssertionError(e); + } + })) ); private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class); - static int normalizeIndex(final boolean[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final byte[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final short[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final int[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final long[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final char[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final float[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final double[] array, final int index) { return index >= 0 ? index : index + array.length; } - static int normalizeIndex(final Object[] array, final int index) { return index >= 0 ? index : index + array.length; } + static int normalizeIndex(final boolean[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final byte[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final short[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final int[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final long[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final char[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final float[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final double[] array, final int index) { + return index >= 0 ? index : index + array.length; + } + + static int normalizeIndex(final Object[] array, final int index) { + return index >= 0 ? index : index + array.length; + } static MethodHandle arrayIndexNormalizer(Class arrayType) { if (!arrayType.isArray()) { throw new IllegalArgumentException("type must be an array"); } - return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ? - ARRAY_TYPE_MH_MAPPING.get(arrayType) : - OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType)); + return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) + ? ARRAY_TYPE_MH_MAPPING.get(arrayType) + : OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType)); } private ArrayIndexNormalizeHelper() {} diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java index e4e620ac7f7..97e2b6f2466 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/DefBootstrap.java @@ -128,8 +128,17 @@ public final class DefBootstrap { private final Object[] args; int depth; // pkg-protected for testing - PIC(PainlessLookup painlessLookup, FunctionTable functions, Map constants, - MethodHandles.Lookup methodHandlesLookup, String name, MethodType type, int initialDepth, int flavor, Object[] args) { + PIC( + PainlessLookup painlessLookup, + FunctionTable functions, + Map constants, + MethodHandles.Lookup methodHandlesLookup, + String name, + MethodType type, + int initialDepth, + int flavor, + Object[] args + ) { super(type); if (type.parameterType(0) != Object.class) { throw new BootstrapMethodError("The receiver type (1st arg) of invokedynamic descriptor must be Object."); @@ -143,9 +152,7 @@ public final class DefBootstrap { this.args = args; this.depth = initialDepth; - MethodHandle fallback = FALLBACK.bindTo(this) - .asCollector(Object[].class, type.parameterCount()) - .asType(type); + MethodHandle fallback = FALLBACK.bindTo(this).asCollector(Object[].class, type.parameterCount()).asType(type); setTarget(fallback); } @@ -162,7 +169,7 @@ public final class DefBootstrap { * Does a slow lookup against the whitelist. */ private MethodHandle lookup(int flavor, String name, Class receiver) throws Throwable { - switch(flavor) { + switch (flavor) { case METHOD_CALL: return Def.lookupMethod(painlessLookup, functions, constants, methodHandlesLookup, type(), receiver, name, args); case LOAD: @@ -179,7 +186,8 @@ public final class DefBootstrap { return Def.lookupReference(painlessLookup, functions, constants, methodHandlesLookup, (String) args[0], receiver, name); case INDEX_NORMALIZE: return Def.lookupIndexNormalize(receiver); - default: throw new AssertionError(); + default: + throw new AssertionError(); } } @@ -201,8 +209,7 @@ public final class DefBootstrap { } } }; - return MethodHandles.foldArguments(MethodHandles.exactInvoker(type), - MEGAMORPHIC_LOOKUP.bindTo(megamorphicCache)); + return MethodHandles.foldArguments(MethodHandles.exactInvoker(type), MEGAMORPHIC_LOOKUP.bindTo(megamorphicCache)); } /** @@ -239,14 +246,26 @@ public final class DefBootstrap { final MethodHandles.Lookup methodHandlesLookup = MethodHandles.lookup(); final MethodHandles.Lookup publicMethodHandlesLookup = MethodHandles.publicLookup(); try { - CHECK_CLASS = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkClass", - MethodType.methodType(boolean.class, Class.class, Object.class)); - FALLBACK = methodHandlesLookup.findVirtual(methodHandlesLookup.lookupClass(), "fallback", - MethodType.methodType(Object.class, Object[].class)); - MethodHandle mh = publicMethodHandlesLookup.findVirtual(ClassValue.class, "get", - MethodType.methodType(Object.class, Class.class)); - mh = MethodHandles.filterArguments(mh, 1, - publicMethodHandlesLookup.findVirtual(Object.class, "getClass", MethodType.methodType(Class.class))); + CHECK_CLASS = methodHandlesLookup.findStatic( + methodHandlesLookup.lookupClass(), + "checkClass", + MethodType.methodType(boolean.class, Class.class, Object.class) + ); + FALLBACK = methodHandlesLookup.findVirtual( + methodHandlesLookup.lookupClass(), + "fallback", + MethodType.methodType(Object.class, Object[].class) + ); + MethodHandle mh = publicMethodHandlesLookup.findVirtual( + ClassValue.class, + "get", + MethodType.methodType(Object.class, Class.class) + ); + mh = MethodHandles.filterArguments( + mh, + 1, + publicMethodHandlesLookup.findVirtual(Object.class, "getClass", MethodType.methodType(Class.class)) + ); MEGAMORPHIC_LOOKUP = mh.asType(mh.type().changeReturnType(MethodHandle.class)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); @@ -273,9 +292,7 @@ public final class DefBootstrap { initialized = true; } - MethodHandle fallback = FALLBACK.bindTo(this) - .asCollector(Object[].class, type.parameterCount()) - .asType(type); + MethodHandle fallback = FALLBACK.bindTo(this).asCollector(Object[].class, type.parameterCount()).asType(type); setTarget(fallback); } @@ -284,7 +301,7 @@ public final class DefBootstrap { * Does a slow lookup for the operator */ private MethodHandle lookup(Object[] args) throws Throwable { - switch(flavor) { + switch (flavor) { case UNARY_OPERATOR: case SHIFT_OPERATOR: // shifts are treated as unary, as java allows long arguments without a cast (but bits are ignored) @@ -307,7 +324,8 @@ public final class DefBootstrap { } return binary; } - default: throw new AssertionError(); + default: + throw new AssertionError(); } } @@ -355,35 +373,31 @@ public final class DefBootstrap { if (type.parameterType(1) != Object.class) { // case 1: only the receiver is unknown, just check that MethodHandle unaryTest = CHECK_LHS.bindTo(clazz0); - test = unaryTest.asType(unaryTest.type() - .changeParameterType(0, type.parameterType(0))); + test = unaryTest.asType(unaryTest.type().changeParameterType(0, type.parameterType(0))); } else if (type.parameterType(0) != Object.class) { // case 2: only the argument is unknown, just check that MethodHandle unaryTest = CHECK_RHS.bindTo(clazz0).bindTo(clazz1); - test = unaryTest.asType(unaryTest.type() - .changeParameterType(0, type.parameterType(0)) - .changeParameterType(1, type.parameterType(1))); + test = unaryTest.asType( + unaryTest.type().changeParameterType(0, type.parameterType(0)).changeParameterType(1, type.parameterType(1)) + ); } else { // case 3: check both receiver and argument MethodHandle binaryTest = CHECK_BOTH.bindTo(clazz0).bindTo(clazz1); - test = binaryTest.asType(binaryTest.type() - .changeParameterType(0, type.parameterType(0)) - .changeParameterType(1, type.parameterType(1))); + test = binaryTest.asType( + binaryTest.type().changeParameterType(0, type.parameterType(0)).changeParameterType(1, type.parameterType(1)) + ); } } else { // unary operator MethodHandle receiverTest = CHECK_LHS.bindTo(args[0].getClass()); - test = receiverTest.asType(receiverTest.type() - .changeParameterType(0, type.parameterType(0))); + test = receiverTest.asType(receiverTest.type().changeParameterType(0, type.parameterType(0))); } MethodHandle guard = MethodHandles.guardWithTest(test, target, getTarget()); // very special cases, where even the receiver can be null (see JLS rules for string concat) // we wrap + with an NPE catcher, and use our generic method in that case. if (flavor == BINARY_OPERATOR && (flags & OPERATOR_ALLOWS_NULL) != 0) { - MethodHandle handler = MethodHandles.dropArguments(lookupGeneric().asType(type()), - 0, - NullPointerException.class); + MethodHandle handler = MethodHandles.dropArguments(lookupGeneric().asType(type()), 0, NullPointerException.class); guard = MethodHandles.catchException(guard, NullPointerException.class, handler); } @@ -424,14 +438,26 @@ public final class DefBootstrap { static { final MethodHandles.Lookup methodHandlesLookup = MethodHandles.lookup(); try { - CHECK_LHS = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkLHS", - MethodType.methodType(boolean.class, Class.class, Object.class)); - CHECK_RHS = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkRHS", - MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class)); - CHECK_BOTH = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkBoth", - MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class)); - FALLBACK = methodHandlesLookup.findVirtual(methodHandlesLookup.lookupClass(), "fallback", - MethodType.methodType(Object.class, Object[].class)); + CHECK_LHS = methodHandlesLookup.findStatic( + methodHandlesLookup.lookupClass(), + "checkLHS", + MethodType.methodType(boolean.class, Class.class, Object.class) + ); + CHECK_RHS = methodHandlesLookup.findStatic( + methodHandlesLookup.lookupClass(), + "checkRHS", + MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class) + ); + CHECK_BOTH = methodHandlesLookup.findStatic( + methodHandlesLookup.lookupClass(), + "checkBoth", + MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class) + ); + FALLBACK = methodHandlesLookup.findVirtual( + methodHandlesLookup.lookupClass(), + "fallback", + MethodType.methodType(Object.class, Object[].class) + ); } catch (ReflectiveOperationException e) { throw new AssertionError(e); } @@ -452,10 +478,19 @@ public final class DefBootstrap { * see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic */ @SuppressWarnings("unchecked") - public static CallSite bootstrap(PainlessLookup painlessLookup, FunctionTable functions, Map constants, - MethodHandles.Lookup methodHandlesLookup, String name, MethodType type, int initialDepth, int flavor, Object... args) { + public static CallSite bootstrap( + PainlessLookup painlessLookup, + FunctionTable functions, + Map constants, + MethodHandles.Lookup methodHandlesLookup, + String name, + MethodType type, + int initialDepth, + int flavor, + Object... args + ) { // validate arguments - switch(flavor) { + switch (flavor) { // "function-call" like things get a polymorphic cache case METHOD_CALL: if (args.length == 0) { @@ -502,7 +537,7 @@ public final class DefBootstrap { if (args[0] instanceof Integer == false) { throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]); } - int flags = (int)args[0]; + int flags = (int) args[0]; if ((flags & OPERATOR_ALLOWS_NULL) != 0 && flavor != BINARY_OPERATOR) { // we just don't need it anywhere else. throw new BootstrapMethodError("This parameter is only supported for BINARY_OPERATORs"); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/DefMath.java b/modules/lang-painless/src/main/java/org/opensearch/painless/DefMath.java index cb5f2ec0893..0d502ab505e 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/DefMath.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/DefMath.java @@ -77,19 +77,18 @@ public class DefMath { private static Object not(Object unary) { if (unary instanceof Long) { - return ~(Long)unary; + return ~(Long) unary; } else if (unary instanceof Integer) { - return ~(Integer)unary; + return ~(Integer) unary; } else if (unary instanceof Short) { - return ~(Short)unary; + return ~(Short) unary; } else if (unary instanceof Character) { - return ~(Character)unary; + return ~(Character) unary; } else if (unary instanceof Byte) { - return ~(Byte)unary; + return ~(Byte) unary; } - throw new ClassCastException("Cannot apply [~] operation to type " + - "[" + unary.getClass().getCanonicalName() + "]."); + throw new ClassCastException("Cannot apply [~] operation to type " + "[" + unary.getClass().getCanonicalName() + "]."); } // unary negation and plus: applicable to all numeric types @@ -116,23 +115,22 @@ public class DefMath { private static Object neg(final Object unary) { if (unary instanceof Double) { - return -(double)unary; + return -(double) unary; } else if (unary instanceof Long) { - return -(long)unary; + return -(long) unary; } else if (unary instanceof Integer) { - return -(int)unary; + return -(int) unary; } else if (unary instanceof Float) { - return -(float)unary; + return -(float) unary; } else if (unary instanceof Short) { - return -(short)unary; + return -(short) unary; } else if (unary instanceof Character) { - return -(char)unary; + return -(char) unary; } else if (unary instanceof Byte) { - return -(byte)unary; + return -(byte) unary; } - throw new ClassCastException("Cannot apply [-] operation to type " + - "[" + unary.getClass().getCanonicalName() + "]."); + throw new ClassCastException("Cannot apply [-] operation to type " + "[" + unary.getClass().getCanonicalName() + "]."); } private static int plus(int v) { @@ -157,23 +155,22 @@ public class DefMath { private static Object plus(final Object unary) { if (unary instanceof Double) { - return +(double)unary; + return +(double) unary; } else if (unary instanceof Long) { - return +(long)unary; + return +(long) unary; } else if (unary instanceof Integer) { - return +(int)unary; + return +(int) unary; } else if (unary instanceof Float) { - return +(float)unary; + return +(float) unary; } else if (unary instanceof Short) { - return +(short)unary; + return +(short) unary; } else if (unary instanceof Character) { - return +(char)unary; + return +(char) unary; } else if (unary instanceof Byte) { - return +(byte)unary; + return +(byte) unary; } - throw new ClassCastException("Cannot apply [+] operation to type " + - "[" + unary.getClass().getCanonicalName() + "]."); + throw new ClassCastException("Cannot apply [+] operation to type " + "[" + unary.getClass().getCanonicalName() + "]."); } // multiplication/division/remainder/subtraction: applicable to all integer types @@ -202,43 +199,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() * ((Number)right).doubleValue(); + return ((Number) left).doubleValue() * ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() * ((Number)right).floatValue(); + return ((Number) left).floatValue() * ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() * ((Number)right).longValue(); + return ((Number) left).longValue() * ((Number) right).longValue(); } else { - return ((Number)left).intValue() * ((Number)right).intValue(); + return ((Number) left).intValue() * ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() * (char)right; + return ((Number) left).doubleValue() * (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() * (char)right; + return ((Number) left).longValue() * (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() * (char)right; + return ((Number) left).floatValue() * (char) right; } else { - return ((Number)left).intValue() * (char)right; + return ((Number) left).intValue() * (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left * ((Number)right).doubleValue(); + return (char) left * ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left * ((Number)right).longValue(); + return (char) left * ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left * ((Number)right).floatValue(); + return (char) left * ((Number) right).floatValue(); } else { - return (char)left * ((Number)right).intValue(); + return (char) left * ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left * (char)right; + return (char) left * (char) right; } } - throw new ClassCastException("Cannot apply [*] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [*] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } private static int div(int a, int b) { @@ -265,43 +268,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() / ((Number)right).doubleValue(); + return ((Number) left).doubleValue() / ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() / ((Number)right).floatValue(); + return ((Number) left).floatValue() / ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() / ((Number)right).longValue(); + return ((Number) left).longValue() / ((Number) right).longValue(); } else { - return ((Number)left).intValue() / ((Number)right).intValue(); + return ((Number) left).intValue() / ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() / (char)right; + return ((Number) left).doubleValue() / (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() / (char)right; + return ((Number) left).longValue() / (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() / (char)right; + return ((Number) left).floatValue() / (char) right; } else { - return ((Number)left).intValue() / (char)right; + return ((Number) left).intValue() / (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left / ((Number)right).doubleValue(); + return (char) left / ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left / ((Number)right).longValue(); + return (char) left / ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left / ((Number)right).floatValue(); + return (char) left / ((Number) right).floatValue(); } else { - return (char)left / ((Number)right).intValue(); + return (char) left / ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left / (char)right; + return (char) left / (char) right; } } - throw new ClassCastException("Cannot apply [/] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [/] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } private static int rem(int a, int b) { @@ -328,43 +337,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() % ((Number)right).doubleValue(); + return ((Number) left).doubleValue() % ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() % ((Number)right).floatValue(); + return ((Number) left).floatValue() % ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() % ((Number)right).longValue(); + return ((Number) left).longValue() % ((Number) right).longValue(); } else { - return ((Number)left).intValue() % ((Number)right).intValue(); + return ((Number) left).intValue() % ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() % (char)right; + return ((Number) left).doubleValue() % (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() % (char)right; + return ((Number) left).longValue() % (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() % (char)right; + return ((Number) left).floatValue() % (char) right; } else { - return ((Number)left).intValue() % (char)right; + return ((Number) left).intValue() % (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left % ((Number)right).doubleValue(); + return (char) left % ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left % ((Number)right).longValue(); + return (char) left % ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left % ((Number)right).floatValue(); + return (char) left % ((Number) right).floatValue(); } else { - return (char)left % ((Number)right).intValue(); + return (char) left % ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left % (char)right; + return (char) left % (char) right; } } - throw new ClassCastException("Cannot apply [%] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [%] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } // addition: applicable to all numeric types. @@ -398,43 +413,49 @@ public class DefMath { } else if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() + ((Number)right).doubleValue(); + return ((Number) left).doubleValue() + ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() + ((Number)right).floatValue(); + return ((Number) left).floatValue() + ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() + ((Number)right).longValue(); + return ((Number) left).longValue() + ((Number) right).longValue(); } else { - return ((Number)left).intValue() + ((Number)right).intValue(); + return ((Number) left).intValue() + ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() + (char)right; + return ((Number) left).doubleValue() + (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() + (char)right; + return ((Number) left).longValue() + (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() + (char)right; + return ((Number) left).floatValue() + (char) right; } else { - return ((Number)left).intValue() + (char)right; + return ((Number) left).intValue() + (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left + ((Number)right).doubleValue(); + return (char) left + ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left + ((Number)right).longValue(); + return (char) left + ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left + ((Number)right).floatValue(); + return (char) left + ((Number) right).floatValue(); } else { - return (char)left + ((Number)right).intValue(); + return (char) left + ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left + (char)right; + return (char) left + (char) right; } } - throw new ClassCastException("Cannot apply [+] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [+] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } private static int sub(int a, int b) { @@ -461,43 +482,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() - ((Number)right).doubleValue(); + return ((Number) left).doubleValue() - ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() - ((Number)right).floatValue(); + return ((Number) left).floatValue() - ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() - ((Number)right).longValue(); + return ((Number) left).longValue() - ((Number) right).longValue(); } else { - return ((Number)left).intValue() - ((Number)right).intValue(); + return ((Number) left).intValue() - ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() - (char)right; + return ((Number) left).doubleValue() - (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() - (char)right; + return ((Number) left).longValue() - (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() - (char)right; + return ((Number) left).floatValue() - (char) right; } else { - return ((Number)left).intValue() - (char)right; + return ((Number) left).intValue() - (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left - ((Number)right).doubleValue(); + return (char) left - ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left - ((Number)right).longValue(); + return (char) left - ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left - ((Number)right).floatValue(); + return (char) left - ((Number) right).floatValue(); } else { - return (char)left - ((Number)right).intValue(); + return (char) left - ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left - (char)right; + return (char) left - (char) right; } } - throw new ClassCastException("Cannot apply [-] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [-] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } // eq: applicable to any arbitrary type, including nulls for both arguments!!! @@ -526,50 +553,50 @@ public class DefMath { if (left != null && right != null) { if (left instanceof Double) { if (right instanceof Number) { - return (double)left == ((Number)right).doubleValue(); + return (double) left == ((Number) right).doubleValue(); } else if (right instanceof Character) { - return (double)left == (char)right; + return (double) left == (char) right; } } else if (right instanceof Double) { if (left instanceof Number) { - return ((Number)left).doubleValue() == (double)right; + return ((Number) left).doubleValue() == (double) right; } else if (left instanceof Character) { - return (char)left == ((Number)right).doubleValue(); + return (char) left == ((Number) right).doubleValue(); } } else if (left instanceof Float) { if (right instanceof Number) { - return (float)left == ((Number)right).floatValue(); + return (float) left == ((Number) right).floatValue(); } else if (right instanceof Character) { - return (float)left == (char)right; + return (float) left == (char) right; } } else if (right instanceof Float) { if (left instanceof Number) { - return ((Number)left).floatValue() == (float)right; + return ((Number) left).floatValue() == (float) right; } else if (left instanceof Character) { - return (char)left == ((Number)right).floatValue(); + return (char) left == ((Number) right).floatValue(); } } else if (left instanceof Long) { if (right instanceof Number) { - return (long)left == ((Number)right).longValue(); + return (long) left == ((Number) right).longValue(); } else if (right instanceof Character) { - return (long)left == (char)right; + return (long) left == (char) right; } } else if (right instanceof Long) { if (left instanceof Number) { - return ((Number)left).longValue() == (long)right; + return ((Number) left).longValue() == (long) right; } else if (left instanceof Character) { - return (char)left == ((Number)right).longValue(); + return (char) left == ((Number) right).longValue(); } } else if (left instanceof Number) { if (right instanceof Number) { - return ((Number)left).intValue() == ((Number)right).intValue(); + return ((Number) left).intValue() == ((Number) right).intValue(); } else if (right instanceof Character) { - return ((Number)left).intValue() == (char)right; + return ((Number) left).intValue() == (char) right; } } else if (right instanceof Number && left instanceof Character) { - return (char)left == ((Number)right).intValue(); + return (char) left == ((Number) right).intValue(); } else if (left instanceof Character && right instanceof Character) { - return (char)left == (char)right; + return (char) left == (char) right; } return left.equals(right); @@ -604,43 +631,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() < ((Number)right).doubleValue(); + return ((Number) left).doubleValue() < ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() < ((Number)right).floatValue(); + return ((Number) left).floatValue() < ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() < ((Number)right).longValue(); + return ((Number) left).longValue() < ((Number) right).longValue(); } else { - return ((Number)left).intValue() < ((Number)right).intValue(); + return ((Number) left).intValue() < ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() < (char)right; + return ((Number) left).doubleValue() < (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() < (char)right; + return ((Number) left).longValue() < (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() < (char)right; + return ((Number) left).floatValue() < (char) right; } else { - return ((Number)left).intValue() < (char)right; + return ((Number) left).intValue() < (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left < ((Number)right).doubleValue(); + return (char) left < ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left < ((Number)right).longValue(); + return (char) left < ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left < ((Number)right).floatValue(); + return (char) left < ((Number) right).floatValue(); } else { - return (char)left < ((Number)right).intValue(); + return (char) left < ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left < (char)right; + return (char) left < (char) right; } } - throw new ClassCastException("Cannot apply [<] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [<] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } private static boolean lte(int a, int b) { @@ -667,43 +700,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() <= ((Number)right).doubleValue(); + return ((Number) left).doubleValue() <= ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() <= ((Number)right).floatValue(); + return ((Number) left).floatValue() <= ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() <= ((Number)right).longValue(); + return ((Number) left).longValue() <= ((Number) right).longValue(); } else { - return ((Number)left).intValue() <= ((Number)right).intValue(); + return ((Number) left).intValue() <= ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() <= (char)right; + return ((Number) left).doubleValue() <= (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() <= (char)right; + return ((Number) left).longValue() <= (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() <= (char)right; + return ((Number) left).floatValue() <= (char) right; } else { - return ((Number)left).intValue() <= (char)right; + return ((Number) left).intValue() <= (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left <= ((Number)right).doubleValue(); + return (char) left <= ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left <= ((Number)right).longValue(); + return (char) left <= ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left <= ((Number)right).floatValue(); + return (char) left <= ((Number) right).floatValue(); } else { - return (char)left <= ((Number)right).intValue(); + return (char) left <= ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left <= (char)right; + return (char) left <= (char) right; } } - throw new ClassCastException("Cannot apply [<=] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [<=] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } private static boolean gt(int a, int b) { @@ -730,43 +769,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() > ((Number)right).doubleValue(); + return ((Number) left).doubleValue() > ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() > ((Number)right).floatValue(); + return ((Number) left).floatValue() > ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() > ((Number)right).longValue(); + return ((Number) left).longValue() > ((Number) right).longValue(); } else { - return ((Number)left).intValue() > ((Number)right).intValue(); + return ((Number) left).intValue() > ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() > (char)right; + return ((Number) left).doubleValue() > (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() > (char)right; + return ((Number) left).longValue() > (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() > (char)right; + return ((Number) left).floatValue() > (char) right; } else { - return ((Number)left).intValue() > (char)right; + return ((Number) left).intValue() > (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left > ((Number)right).doubleValue(); + return (char) left > ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left > ((Number)right).longValue(); + return (char) left > ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left > ((Number)right).floatValue(); + return (char) left > ((Number) right).floatValue(); } else { - return (char)left > ((Number)right).intValue(); + return (char) left > ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left > (char)right; + return (char) left > (char) right; } } - throw new ClassCastException("Cannot apply [>] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [>] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } private static boolean gte(int a, int b) { @@ -793,43 +838,49 @@ public class DefMath { if (left instanceof Number) { if (right instanceof Number) { if (left instanceof Double || right instanceof Double) { - return ((Number)left).doubleValue() >= ((Number)right).doubleValue(); + return ((Number) left).doubleValue() >= ((Number) right).doubleValue(); } else if (left instanceof Float || right instanceof Float) { - return ((Number)left).floatValue() >= ((Number)right).floatValue(); + return ((Number) left).floatValue() >= ((Number) right).floatValue(); } else if (left instanceof Long || right instanceof Long) { - return ((Number)left).longValue() >= ((Number)right).longValue(); + return ((Number) left).longValue() >= ((Number) right).longValue(); } else { - return ((Number)left).intValue() >= ((Number)right).intValue(); + return ((Number) left).intValue() >= ((Number) right).intValue(); } } else if (right instanceof Character) { if (left instanceof Double) { - return ((Number)left).doubleValue() >= (char)right; + return ((Number) left).doubleValue() >= (char) right; } else if (left instanceof Long) { - return ((Number)left).longValue() >= (char)right; + return ((Number) left).longValue() >= (char) right; } else if (left instanceof Float) { - return ((Number)left).floatValue() >= (char)right; + return ((Number) left).floatValue() >= (char) right; } else { - return ((Number)left).intValue() >= (char)right; + return ((Number) left).intValue() >= (char) right; } } } else if (left instanceof Character) { if (right instanceof Number) { if (right instanceof Double) { - return (char)left >= ((Number)right).doubleValue(); + return (char) left >= ((Number) right).doubleValue(); } else if (right instanceof Long) { - return (char)left >= ((Number)right).longValue(); + return (char) left >= ((Number) right).longValue(); } else if (right instanceof Float) { - return (char)left >= ((Number)right).floatValue(); + return (char) left >= ((Number) right).floatValue(); } else { - return (char)left >= ((Number)right).intValue(); + return (char) left >= ((Number) right).intValue(); } } else if (right instanceof Character) { - return (char)left >= (char)right; + return (char) left >= (char) right; } } - throw new ClassCastException("Cannot apply [>] operation to types " + - "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + throw new ClassCastException( + "Cannot apply [>] operation to types " + + "[" + + left.getClass().getCanonicalName() + + "] and [" + + right.getClass().getCanonicalName() + + "]." + ); } // helper methods to convert an integral according to numeric promotion @@ -837,11 +888,11 @@ public class DefMath { private static long longIntegralValue(Object o) { if (o instanceof Long) { - return (long)o; + return (long) o; } else if (o instanceof Integer || o instanceof Short || o instanceof Byte) { - return ((Number)o).longValue(); + return ((Number) o).longValue(); } else if (o instanceof Character) { - return (char)o; + return (char) o; } else { throw new ClassCastException("Cannot convert [" + o.getClass().getCanonicalName() + "] to an integral value."); } @@ -849,9 +900,9 @@ public class DefMath { private static int intIntegralValue(Object o) { if (o instanceof Integer || o instanceof Short || o instanceof Byte) { - return ((Number)o).intValue(); + return ((Number) o).intValue(); } else if (o instanceof Character) { - return (char)o; + return (char) o; } else { throw new ClassCastException("Cannot convert [" + o.getClass().getCanonicalName() + "] to an integral value."); } @@ -881,7 +932,7 @@ public class DefMath { private static Object and(Object left, Object right) { if (left instanceof Boolean && right instanceof Boolean) { - return (boolean)left & (boolean)right; + return (boolean) left & (boolean) right; } else if (left instanceof Long || right instanceof Long) { return longIntegralValue(left) & longIntegralValue(right); } else { @@ -911,7 +962,7 @@ public class DefMath { private static Object xor(Object left, Object right) { if (left instanceof Boolean && right instanceof Boolean) { - return (boolean)left ^ (boolean)right; + return (boolean) left ^ (boolean) right; } else if (left instanceof Long || right instanceof Long) { return longIntegralValue(left) ^ longIntegralValue(right); } else { @@ -941,7 +992,7 @@ public class DefMath { private static Object or(Object left, Object right) { if (left instanceof Boolean && right instanceof Boolean) { - return (boolean)left | (boolean)right; + return (boolean) left | (boolean) right; } else if (left instanceof Long || right instanceof Long) { return longIntegralValue(left) | longIntegralValue(right); } else { @@ -974,7 +1025,7 @@ public class DefMath { public static Object lsh(Object left, long right) { if (left instanceof Long) { - return (long)(left) << right; + return (long) (left) << right; } else { return intIntegralValue(left) << right; } @@ -1002,7 +1053,7 @@ public class DefMath { public static Object rsh(Object left, long right) { if (left instanceof Long) { - return (long)left >> right; + return (long) left >> right; } else { return intIntegralValue(left) >> right; } @@ -1030,7 +1081,7 @@ public class DefMath { public static Object ush(Object left, long right) { if (left instanceof Long) { - return (long)(left) >>> right; + return (long) (left) >>> right; } else { return intIntegralValue(left) >>> right; } @@ -1084,35 +1135,35 @@ public class DefMath { private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); - private static final Map,Map> TYPE_OP_MAPPING = Collections.unmodifiableMap( + private static final Map, Map> TYPE_OP_MAPPING = Collections.unmodifiableMap( Stream.of(boolean.class, int.class, long.class, float.class, double.class, Object.class) .collect(Collectors.toMap(Function.identity(), type -> { try { - Map map = new HashMap<>(); + Map map = new HashMap<>(); MethodType unary = MethodType.methodType(type, type); MethodType binary = MethodType.methodType(type, type, type); MethodType comparison = MethodType.methodType(boolean.class, type, type); MethodType shift = MethodType.methodType(type, type, long.class); Class clazz = PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(); - map.put("not", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "not", unary)); - map.put("neg", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "neg", unary)); - map.put("plus", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "plus", unary)); - map.put("mul", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "mul", binary)); - map.put("div", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "div", binary)); - map.put("rem", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "rem", binary)); - map.put("add", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "add", binary)); - map.put("sub", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "sub", binary)); - map.put("and", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "and", binary)); - map.put("or", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "or", binary)); - map.put("xor", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "xor", binary)); - map.put("eq", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "eq", comparison)); - map.put("lt", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lt", comparison)); - map.put("lte", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lte", comparison)); - map.put("gt", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "gt", comparison)); - map.put("gte", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "gte", comparison)); - map.put("lsh", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lsh", shift)); - map.put("rsh", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "rsh", shift)); - map.put("ush", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "ush", shift)); + map.put("not", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "not", unary)); + map.put("neg", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "neg", unary)); + map.put("plus", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "plus", unary)); + map.put("mul", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "mul", binary)); + map.put("div", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "div", binary)); + map.put("rem", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "rem", binary)); + map.put("add", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "add", binary)); + map.put("sub", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "sub", binary)); + map.put("and", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "and", binary)); + map.put("or", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "or", binary)); + map.put("xor", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "xor", binary)); + map.put("eq", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "eq", comparison)); + map.put("lt", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lt", comparison)); + map.put("lte", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lte", comparison)); + map.put("gt", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "gt", comparison)); + map.put("gte", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "gte", comparison)); + map.put("lsh", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lsh", shift)); + map.put("rsh", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "rsh", shift)); + map.put("ush", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "ush", shift)); return map; } catch (ReflectiveOperationException e) { throw new AssertionError(e); @@ -1189,9 +1240,9 @@ public class DefMath { /** Slowly returns a Number for o. Just for supporting dynamicCast */ static Number getNumber(Object o) { if (o instanceof Number) { - return (Number)o; + return (Number) o; } else if (o instanceof Character) { - return Integer.valueOf((char)o); + return Integer.valueOf((char) o); } else { throw new ClassCastException("Cannot convert [" + o.getClass() + "] to a Number"); } @@ -1202,12 +1253,16 @@ public class DefMath { static { final MethodHandles.Lookup methodHandlesLookup = MethodHandles.lookup(); try { - DYNAMIC_CAST = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), - "dynamicCast", - MethodType.methodType(Object.class, Class.class, Object.class)); - DYNAMIC_RECEIVER_CAST = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), - "dynamicReceiverCast", - MethodType.methodType(Object.class, Object.class, Object.class)); + DYNAMIC_CAST = methodHandlesLookup.findStatic( + methodHandlesLookup.lookupClass(), + "dynamicCast", + MethodType.methodType(Object.class, Class.class, Object.class) + ); + DYNAMIC_RECEIVER_CAST = methodHandlesLookup.findStatic( + methodHandlesLookup.lookupClass(), + "dynamicReceiverCast", + MethodType.methodType(Object.class, Object.class, Object.class) + ); } catch (ReflectiveOperationException e) { throw new AssertionError(e); } @@ -1216,9 +1271,9 @@ public class DefMath { /** Looks up generic method, with a dynamic cast to the receiver's type. (compound assignment) */ public static MethodHandle dynamicCast(MethodHandle target) { // adapt dynamic receiver cast to the generic method - MethodHandle cast = DYNAMIC_RECEIVER_CAST.asType(MethodType.methodType(target.type().returnType(), - target.type().returnType(), - target.type().parameterType(0))); + MethodHandle cast = DYNAMIC_RECEIVER_CAST.asType( + MethodType.methodType(target.type().returnType(), target.type().returnType(), target.type().parameterType(0)) + ); // drop the RHS parameter cast = MethodHandles.dropArguments(cast, 2, target.type().parameterType(1)); // combine: f(x,y) -> g(f(x,y), x, y); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java index 3c0c16f16f6..097960dfbe6 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/FunctionRef.java @@ -69,8 +69,16 @@ public class FunctionRef { * @param numberOfCaptures number of captured arguments * @param constants constants used for injection when necessary */ - public static FunctionRef create(PainlessLookup painlessLookup, FunctionTable functionTable, Location location, - Class targetClass, String typeName, String methodName, int numberOfCaptures, Map constants) { + public static FunctionRef create( + PainlessLookup painlessLookup, + FunctionTable functionTable, + Location location, + Class targetClass, + String typeName, + String methodName, + int numberOfCaptures, + Map constants + ) { Objects.requireNonNull(painlessLookup); Objects.requireNonNull(targetClass); @@ -84,8 +92,16 @@ public class FunctionRef { interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(targetClass); if (interfaceMethod == null) { - throw new IllegalArgumentException("cannot convert function reference [" + typeName + "::" + methodName + "] " + - "to a non-functional interface [" + targetClassName + "]"); + throw new IllegalArgumentException( + "cannot convert function reference [" + + typeName + + "::" + + methodName + + "] " + + "to a non-functional interface [" + + targetClassName + + "]" + ); } String interfaceMethodName = interfaceMethod.javaMethod.getName(); @@ -113,9 +129,19 @@ public class FunctionRef { LocalFunction localFunction = functionTable.getFunction(localFunctionKey); if (localFunction == null) { - throw new IllegalArgumentException("function reference [this::" + localFunctionKey + "] " + - "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + - "not found" + (localFunctionKey.contains("$") ? " due to an incorrect number of arguments" : "") + throw new IllegalArgumentException( + "function reference [this::" + + localFunctionKey + + "] " + + "matching [" + + targetClassName + + ", " + + interfaceMethodName + + "/" + + interfaceTypeParametersSize + + "] " + + "not found" + + (localFunctionKey.contains("$") ? " due to an incorrect number of arguments" : "") ); } @@ -137,9 +163,21 @@ public class FunctionRef { PainlessConstructor painlessConstructor = painlessLookup.lookupPainlessConstructor(typeName, interfaceTypeParametersSize); if (painlessConstructor == null) { - throw new IllegalArgumentException("function reference [" + typeName + "::new/" + interfaceTypeParametersSize + "] " + - "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + - "not found"); + throw new IllegalArgumentException( + "function reference [" + + typeName + + "::new/" + + interfaceTypeParametersSize + + "] " + + "matching [" + + targetClassName + + ", " + + interfaceMethodName + + "/" + + interfaceTypeParametersSize + + "] " + + "not found" + ); } delegateClassName = painlessConstructor.javaConstructor.getDeclaringClass().getName(); @@ -158,18 +196,40 @@ public class FunctionRef { } boolean captured = numberOfCaptures == 1; - PainlessMethod painlessMethod = - painlessLookup.lookupPainlessMethod(typeName, true, methodName, interfaceTypeParametersSize); + PainlessMethod painlessMethod = painlessLookup.lookupPainlessMethod( + typeName, + true, + methodName, + interfaceTypeParametersSize + ); if (painlessMethod == null) { - painlessMethod = painlessLookup.lookupPainlessMethod(typeName, false, methodName, - captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1); + painlessMethod = painlessLookup.lookupPainlessMethod( + typeName, + false, + methodName, + captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1 + ); if (painlessMethod == null) { throw new IllegalArgumentException( - "function reference " + "[" + typeName + "::" + methodName + "/" + interfaceTypeParametersSize + "] " + - "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + - "not found"); + "function reference " + + "[" + + typeName + + "::" + + methodName + + "/" + + interfaceTypeParametersSize + + "] " + + "matching [" + + targetClassName + + ", " + + interfaceMethodName + + "/" + + interfaceTypeParametersSize + + "] " + + "not found" + ); } } else if (captured) { throw new IllegalStateException("internal error"); @@ -213,14 +273,23 @@ public class FunctionRef { } } - MethodType factoryMethodType = MethodType.methodType(targetClass, - delegateMethodType.dropParameterTypes(numberOfCaptures, delegateMethodType.parameterCount())); + MethodType factoryMethodType = MethodType.methodType( + targetClass, + delegateMethodType.dropParameterTypes(numberOfCaptures, delegateMethodType.parameterCount()) + ); delegateMethodType = delegateMethodType.dropParameterTypes(0, numberOfCaptures); - return new FunctionRef(interfaceMethodName, interfaceMethodType, - delegateClassName, isDelegateInterface, isDelegateAugmented, - delegateInvokeType, delegateMethodName, delegateMethodType, delegateInjections, - factoryMethodType + return new FunctionRef( + interfaceMethodName, + interfaceMethodType, + delegateClassName, + isDelegateInterface, + isDelegateAugmented, + delegateInvokeType, + delegateMethodName, + delegateMethodType, + delegateInjections, + factoryMethodType ); } catch (IllegalArgumentException iae) { if (location != null) { @@ -253,10 +322,17 @@ public class FunctionRef { public final MethodType factoryMethodType; private FunctionRef( - String interfaceMethodName, MethodType interfaceMethodType, - String delegateClassName, boolean isDelegateInterface, boolean isDelegateAugmented, - int delegateInvokeType, String delegateMethodName, MethodType delegateMethodType, Object[] delegateInjections, - MethodType factoryMethodType) { + String interfaceMethodName, + MethodType interfaceMethodType, + String delegateClassName, + boolean isDelegateInterface, + boolean isDelegateAugmented, + int delegateInvokeType, + String delegateMethodName, + MethodType delegateMethodType, + Object[] delegateInjections, + MethodType factoryMethodType + ) { this.interfaceMethodName = interfaceMethodName; this.interfaceMethodType = interfaceMethodType; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Globals.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Globals.java index 6661b17c8f3..594133a3e9c 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Globals.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Globals.java @@ -40,7 +40,7 @@ import java.util.Map; * Program-wide globals (initializers, synthetic methods, etc) */ public class Globals { - private final Map constantInitializers = new HashMap<>(); + private final Map constantInitializers = new HashMap<>(); private final BitSet statements; /** Create a new Globals from the set of statement boundaries */ @@ -56,7 +56,7 @@ public class Globals { } /** Returns the current initializers */ - public Map getConstantInitializers() { + public Map getConstantInitializers() { return constantInitializers; } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java b/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java index c72d72bef85..d0af4651d2d 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java @@ -213,19 +213,19 @@ public final class LambdaBootstrap { * @throws LambdaConversionException Thrown when an illegal type conversion occurs at link time */ public static CallSite lambdaBootstrap( - Lookup lookup, - String interfaceMethodName, - MethodType factoryMethodType, - MethodType interfaceMethodType, - String delegateClassName, - int delegateInvokeType, - String delegateMethodName, - MethodType delegateMethodType, - int isDelegateInterface, - int isDelegateAugmented, - Object... injections) - throws LambdaConversionException { - Compiler.Loader loader = (Compiler.Loader)lookup.lookupClass().getClassLoader(); + Lookup lookup, + String interfaceMethodName, + MethodType factoryMethodType, + MethodType interfaceMethodType, + String delegateClassName, + int delegateInvokeType, + String delegateMethodName, + MethodType delegateMethodType, + int isDelegateInterface, + int isDelegateAugmented, + Object... injections + ) throws LambdaConversionException { + Compiler.Loader loader = (Compiler.Loader) lookup.lookupClass().getClassLoader(); String lambdaClassName = Type.getInternalName(lookup.lookupClass()) + "$$Lambda" + loader.newLambdaIdentifier(); Type lambdaClassType = Type.getObjectType(lambdaClassName); Type delegateClassType = Type.getObjectType(delegateClassName.replace('.', '/')); @@ -246,9 +246,21 @@ public final class LambdaBootstrap { delegateInvokeType = H_INVOKESTATIC; } - generateInterfaceMethod(cw, factoryMethodType, lambdaClassType, interfaceMethodName, - interfaceMethodType, delegateClassType, delegateInvokeType, - delegateMethodName, delegateMethodType, isDelegateInterface == 1, isDelegateAugmented == 1, captures, injections); + generateInterfaceMethod( + cw, + factoryMethodType, + lambdaClassType, + interfaceMethodName, + interfaceMethodType, + delegateClassType, + delegateInvokeType, + delegateMethodName, + delegateMethodType, + isDelegateInterface == 1, + isDelegateAugmented == 1, + captures, + injections + ); endLambdaClass(cw); @@ -264,13 +276,12 @@ public final class LambdaBootstrap { * Validates some conversions at link time. Currently, only ensures that the lambda method * with a return value cannot delegate to a delegate method with no return type. */ - private static void validateTypes(MethodType interfaceMethodType, MethodType delegateMethodType) - throws LambdaConversionException { + private static void validateTypes(MethodType interfaceMethodType, MethodType delegateMethodType) throws LambdaConversionException { - if (interfaceMethodType.returnType() != void.class && - delegateMethodType.returnType() == void.class) { - throw new LambdaConversionException("lambda expects return type [" - + interfaceMethodType.returnType() + "], but found return type [void]"); + if (interfaceMethodType.returnType() != void.class && delegateMethodType.returnType() == void.class) { + throw new LambdaConversionException( + "lambda expects return type [" + interfaceMethodType.returnType() + "], but found return type [void]" + ); } } @@ -282,8 +293,7 @@ public final class LambdaBootstrap { int modifiers = ACC_PUBLIC | ACC_SUPER | ACC_FINAL | ACC_SYNTHETIC; ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS); - cw.visit(CLASS_VERSION, - modifiers, lambdaClassName, null, baseClass, new String[] { Type.getInternalName(lambdaInterface) }); + cw.visit(CLASS_VERSION, modifiers, lambdaClassName, null, baseClass, new String[] { Type.getInternalName(lambdaInterface) }); return cw; } @@ -299,12 +309,10 @@ public final class LambdaBootstrap { Capture[] captures = new Capture[captureTotal]; for (int captureCount = 0; captureCount < captureTotal; ++captureCount) { - captures[captureCount] = - new Capture(captureCount, factoryMethodType.parameterType(captureCount)); + captures[captureCount] = new Capture(captureCount, factoryMethodType.parameterType(captureCount)); int modifiers = ACC_PRIVATE | ACC_FINAL; - FieldVisitor fv = cw.visitField( - modifiers, captures[captureCount].name, captures[captureCount].desc, null, null); + FieldVisitor fv = cw.visitField(modifiers, captures[captureCount].name, captures[captureCount].desc, null, null); fv.visitEnd(); } @@ -316,21 +324,19 @@ public final class LambdaBootstrap { * arguments if any and store them in their respective * member fields. */ - private static void generateLambdaConstructor( - ClassWriter cw, - Type lambdaClassType, - MethodType factoryMethodType, - Capture[] captures) { + private static void generateLambdaConstructor(ClassWriter cw, Type lambdaClassType, MethodType factoryMethodType, Capture[] captures) { String conDesc = factoryMethodType.changeReturnType(void.class).toMethodDescriptorString(); Method conMeth = new Method(CTOR_METHOD_NAME, conDesc); Type baseConType = Type.getType(Object.class); - Method baseConMeth = new Method(CTOR_METHOD_NAME, - MethodType.methodType(void.class).toMethodDescriptorString()); + Method baseConMeth = new Method(CTOR_METHOD_NAME, MethodType.methodType(void.class).toMethodDescriptorString()); int modifiers = (captures.length > 0) ? ACC_PRIVATE : ACC_PUBLIC; - GeneratorAdapter constructor = new GeneratorAdapter(modifiers, conMeth, - cw.visitMethod(modifiers, CTOR_METHOD_NAME, conDesc, null, null)); + GeneratorAdapter constructor = new GeneratorAdapter( + modifiers, + conMeth, + cw.visitMethod(modifiers, CTOR_METHOD_NAME, conDesc, null, null) + ); constructor.visitCode(); constructor.loadThis(); constructor.invokeConstructor(baseConType, baseConMeth); @@ -338,8 +344,7 @@ public final class LambdaBootstrap { for (int captureCount = 0; captureCount < captures.length; ++captureCount) { constructor.loadThis(); constructor.loadArg(captureCount); - constructor.putField( - lambdaClassType, captures[captureCount].name, captures[captureCount].type); + constructor.putField(lambdaClassType, captures[captureCount].name, captures[captureCount].type); } constructor.returnValue(); @@ -360,15 +365,22 @@ public final class LambdaBootstrap { /** * Generates a factory method to delegate to constructors. */ - private static void generateStaticCtorDelegator(ClassWriter cw, int access, String delegatorMethodName, - Type delegateClassType, MethodType delegateMethodType) { + private static void generateStaticCtorDelegator( + ClassWriter cw, + int access, + String delegatorMethodName, + Type delegateClassType, + MethodType delegateMethodType + ) { Method wrapperMethod = new Method(delegatorMethodName, delegateMethodType.toMethodDescriptorString()); - Method constructorMethod = - new Method(CTOR_METHOD_NAME, delegateMethodType.changeReturnType(void.class).toMethodDescriptorString()); + Method constructorMethod = new Method(CTOR_METHOD_NAME, delegateMethodType.changeReturnType(void.class).toMethodDescriptorString()); int modifiers = access | ACC_STATIC; - GeneratorAdapter factory = new GeneratorAdapter(modifiers, wrapperMethod, - cw.visitMethod(modifiers, delegatorMethodName, delegateMethodType.toMethodDescriptorString(), null, null)); + GeneratorAdapter factory = new GeneratorAdapter( + modifiers, + wrapperMethod, + cw.visitMethod(modifiers, delegatorMethodName, delegateMethodType.toMethodDescriptorString(), null, null) + ); factory.visitCode(); factory.newInstance(delegateClassType); factory.dup(); @@ -383,34 +395,36 @@ public final class LambdaBootstrap { * with {@code INVOKEDYNAMIC} using the {@link #delegateBootstrap} type converter. */ private static void generateInterfaceMethod( - ClassWriter cw, - MethodType factoryMethodType, - Type lambdaClassType, - String interfaceMethodName, - MethodType interfaceMethodType, - Type delegateClassType, - int delegateInvokeType, - String delegateMethodName, - MethodType delegateMethodType, - boolean isDelegateInterface, - boolean isDelegateAugmented, - Capture[] captures, - Object... injections) - throws LambdaConversionException { + ClassWriter cw, + MethodType factoryMethodType, + Type lambdaClassType, + String interfaceMethodName, + MethodType interfaceMethodType, + Type delegateClassType, + int delegateInvokeType, + String delegateMethodName, + MethodType delegateMethodType, + boolean isDelegateInterface, + boolean isDelegateAugmented, + Capture[] captures, + Object... injections + ) throws LambdaConversionException { String lamDesc = interfaceMethodType.toMethodDescriptorString(); Method lamMeth = new Method(lambdaClassType.getInternalName(), lamDesc); int modifiers = ACC_PUBLIC; - GeneratorAdapter iface = new GeneratorAdapter(modifiers, lamMeth, - cw.visitMethod(modifiers, interfaceMethodName, lamDesc, null, null)); + GeneratorAdapter iface = new GeneratorAdapter( + modifiers, + lamMeth, + cw.visitMethod(modifiers, interfaceMethodName, lamDesc, null, null) + ); iface.visitCode(); // Loads any captured variables onto the stack. for (int captureCount = 0; captureCount < captures.length; ++captureCount) { iface.loadThis(); - iface.getField( - lambdaClassType, captures[captureCount].name, captures[captureCount].type); + iface.getField(lambdaClassType, captures[captureCount].name, captures[captureCount].type); } // Loads any passed in arguments onto the stack. @@ -418,18 +432,15 @@ public final class LambdaBootstrap { // Handles the case for a lambda function or a static reference method. // interfaceMethodType and delegateMethodType both have the captured types - // inserted into their type signatures. This later allows the delegate + // inserted into their type signatures. This later allows the delegate // method to be invoked dynamically and have the interface method types // appropriately converted to the delegate method types. // Example: Integer::parseInt // Example: something.each(x -> x + 1) if (delegateInvokeType == H_INVOKESTATIC) { - interfaceMethodType = - interfaceMethodType.insertParameterTypes(0, factoryMethodType.parameterArray()); - delegateMethodType = - delegateMethodType.insertParameterTypes(0, factoryMethodType.parameterArray()); - } else if (delegateInvokeType == H_INVOKEVIRTUAL || - delegateInvokeType == H_INVOKEINTERFACE) { + interfaceMethodType = interfaceMethodType.insertParameterTypes(0, factoryMethodType.parameterArray()); + delegateMethodType = delegateMethodType.insertParameterTypes(0, factoryMethodType.parameterArray()); + } else if (delegateInvokeType == H_INVOKEVIRTUAL || delegateInvokeType == H_INVOKEINTERFACE) { // Handles the case for a virtual or interface reference method with no captures. // delegateMethodType drops the 'this' parameter because it will be re-inserted // when the method handle for the dynamically invoked delegate method is created. @@ -438,29 +449,30 @@ public final class LambdaBootstrap { Class clazz = delegateMethodType.parameterType(0); delegateClassType = Type.getType(clazz); delegateMethodType = delegateMethodType.dropParameterTypes(0, 1); - // Handles the case for a virtual or interface reference method with 'this' - // captured. interfaceMethodType inserts the 'this' type into its - // method signature. This later allows the delegate - // method to be invoked dynamically and have the interface method types - // appropriately converted to the delegate method types. - // Example: something::toString + // Handles the case for a virtual or interface reference method with 'this' + // captured. interfaceMethodType inserts the 'this' type into its + // method signature. This later allows the delegate + // method to be invoked dynamically and have the interface method types + // appropriately converted to the delegate method types. + // Example: something::toString } else if (captures.length == 1) { Class clazz = factoryMethodType.parameterType(0); delegateClassType = Type.getType(clazz); interfaceMethodType = interfaceMethodType.insertParameterTypes(0, clazz); } else { - throw new LambdaConversionException( - "unexpected number of captures [ " + captures.length + "]"); + throw new LambdaConversionException("unexpected number of captures [ " + captures.length + "]"); } } else { - throw new IllegalStateException( - "unexpected invocation type [" + delegateInvokeType + "]"); + throw new IllegalStateException("unexpected invocation type [" + delegateInvokeType + "]"); } - Handle delegateHandle = - new Handle(delegateInvokeType, delegateClassType.getInternalName(), - delegateMethodName, delegateMethodType.toMethodDescriptorString(), - isDelegateInterface); + Handle delegateHandle = new Handle( + delegateInvokeType, + delegateClassType.getInternalName(), + delegateMethodName, + delegateMethodType.toMethodDescriptorString(), + isDelegateInterface + ); // Fill in args for indy. Always add the delegate handle and // whether it's static or not then injections as necessary. Object[] args = new Object[2 + injections.length]; @@ -468,10 +480,11 @@ public final class LambdaBootstrap { args[1] = delegateInvokeType == H_INVOKESTATIC && isDelegateAugmented == false ? 0 : 1; System.arraycopy(injections, 0, args, 2, injections.length); iface.invokeDynamic( - delegateMethodName, - Type.getMethodType(interfaceMethodType.toMethodDescriptorString()).getDescriptor(), - DELEGATE_BOOTSTRAP_HANDLE, - args); + delegateMethodName, + Type.getMethodType(interfaceMethodType.toMethodDescriptorString()).getDescriptor(), + DELEGATE_BOOTSTRAP_HANDLE, + args + ); iface.returnValue(); iface.endMethod(); @@ -488,29 +501,24 @@ public final class LambdaBootstrap { * Defines the {@link Class} for the lambda class using the same {@link Compiler.Loader} * that originally defined the class for the Painless script. */ - private static Class createLambdaClass( - Compiler.Loader loader, - ClassWriter cw, - Type lambdaClassType) { + private static Class createLambdaClass(Compiler.Loader loader, ClassWriter cw, Type lambdaClassType) { byte[] classBytes = cw.toByteArray(); // DEBUG: // new ClassReader(classBytes).accept(new TraceClassVisitor(new PrintWriter(System.out)), ClassReader.SKIP_DEBUG); - return AccessController.doPrivileged((PrivilegedAction>)() -> - loader.defineLambda(lambdaClassType.getClassName(), classBytes)); + return AccessController.doPrivileged( + (PrivilegedAction>) () -> loader.defineLambda(lambdaClassType.getClassName(), classBytes) + ); } /** * Creates an {@link ConstantCallSite} that will return the same instance * of the generated lambda class every time this linked factory method is called. */ - private static CallSite createNoCaptureCallSite( - MethodType factoryMethodType, - Class lambdaClass) { + private static CallSite createNoCaptureCallSite(MethodType factoryMethodType, Class lambdaClass) { try { - return new ConstantCallSite(MethodHandles.constant( - factoryMethodType.returnType(), lambdaClass.getConstructor().newInstance())); + return new ConstantCallSite(MethodHandles.constant(factoryMethodType.returnType(), lambdaClass.getConstructor().newInstance())); } catch (ReflectiveOperationException exception) { throw new IllegalStateException("unable to instantiate lambda class", exception); } @@ -519,14 +527,10 @@ public final class LambdaBootstrap { /** * Creates an {@link ConstantCallSite} */ - private static CallSite createCaptureCallSite( - Lookup lookup, - MethodType factoryMethodType, - Class lambdaClass) { + private static CallSite createCaptureCallSite(Lookup lookup, MethodType factoryMethodType, Class lambdaClass) { try { - return new ConstantCallSite( - lookup.findStatic(lambdaClass, LAMBDA_FACTORY_METHOD_NAME, factoryMethodType)); + return new ConstantCallSite(lookup.findStatic(lambdaClass, LAMBDA_FACTORY_METHOD_NAME, factoryMethodType)); } catch (ReflectiveOperationException exception) { throw new IllegalStateException("unable to create lambda class", exception); } @@ -540,12 +544,14 @@ public final class LambdaBootstrap { * of either a lot more code or requiring many {@link Class}es to be looked * up at link-time. */ - public static CallSite delegateBootstrap(Lookup lookup, - String delegateMethodName, - MethodType interfaceMethodType, - MethodHandle delegateMethodHandle, - int isVirtual, - Object... injections) { + public static CallSite delegateBootstrap( + Lookup lookup, + String delegateMethodName, + MethodType interfaceMethodType, + MethodHandle delegateMethodHandle, + int isVirtual, + Object... injections + ) { if (injections.length > 0) { delegateMethodHandle = MethodHandles.insertArguments(delegateMethodHandle, isVirtual, injections); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/opensearch/painless/MethodWriter.java index 63903078195..028acdb7d87 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/MethodWriter.java @@ -118,12 +118,16 @@ public final class MethodWriter extends GeneratorAdapter { private final BitSet statements; private final CompilerSettings settings; - private final Deque> stringConcatArgs = - (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE == null) ? null : new ArrayDeque<>(); + private final Deque> stringConcatArgs = (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE == null) ? null : new ArrayDeque<>(); public MethodWriter(int access, Method method, ClassVisitor cw, BitSet statements, CompilerSettings settings) { - super(Opcodes.ASM5, cw.visitMethod(access, method.getName(), method.getDescriptor(), null, null), - access, method.getName(), method.getDescriptor()); + super( + Opcodes.ASM5, + cw.visitMethod(access, method.getName(), method.getDescriptor(), null, null), + access, + method.getName(), + method.getDescriptor() + ); this.statements = statements; this.settings = settings; @@ -140,7 +144,7 @@ public final class MethodWriter extends GeneratorAdapter { // (e.g. nodes get assigned wrong offsets by antlr walker) // TODO: introduce a way to ignore internal statements so this assert is not triggered // TODO: https://github.com/elastic/elasticsearch/issues/51836 - //assert statements.get(offset) == false; + // assert statements.get(offset) == false; statements.set(offset); } @@ -177,7 +181,7 @@ public final class MethodWriter extends GeneratorAdapter { invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); } else if (cast.originalType == String.class && cast.targetType == char.class) { invokeStatic(UTILITY_TYPE, STRING_TO_CHAR); - // TODO: remove this when the transition from Joda to Java datetimes is completed + // TODO: remove this when the transition from Joda to Java datetimes is completed } else if (cast.originalType == JodaCompatibleZonedDateTime.class && cast.targetType == ZonedDateTime.class) { invokeStatic(UTILITY_TYPE, JCZDT_TO_ZONEDDATETIME); } else if (cast.unboxOriginalType != null && cast.boxTargetType != null) { @@ -198,46 +202,46 @@ public final class MethodWriter extends GeneratorAdapter { box(getType(cast.boxTargetType)); } else if (cast.originalType == def.class) { if (cast.explicitCast) { - if (cast.targetType == boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BOOLEAN); - else if (cast.targetType == byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BYTE_EXPLICIT); - else if (cast.targetType == short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_SHORT_EXPLICIT); - else if (cast.targetType == char.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_CHAR_EXPLICIT); - else if (cast.targetType == int.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_INT_EXPLICIT); - else if (cast.targetType == long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_LONG_EXPLICIT); - else if (cast.targetType == float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_FLOAT_EXPLICIT); - else if (cast.targetType == double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_DOUBLE_EXPLICIT); - else if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BOOLEAN); - else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_EXPLICIT); - else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_EXPLICIT); + if (cast.targetType == boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BOOLEAN); + else if (cast.targetType == byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BYTE_EXPLICIT); + else if (cast.targetType == short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_SHORT_EXPLICIT); + else if (cast.targetType == char.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_CHAR_EXPLICIT); + else if (cast.targetType == int.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_INT_EXPLICIT); + else if (cast.targetType == long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_LONG_EXPLICIT); + else if (cast.targetType == float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_FLOAT_EXPLICIT); + else if (cast.targetType == double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_DOUBLE_EXPLICIT); + else if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BOOLEAN); + else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_EXPLICIT); + else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_EXPLICIT); else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_CHARACTER_EXPLICIT); - else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_EXPLICIT); - else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_EXPLICIT); - else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_EXPLICIT); - else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_EXPLICIT); - else if (cast.targetType == String.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_STRING_EXPLICIT); + else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_EXPLICIT); + else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_EXPLICIT); + else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_EXPLICIT); + else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_EXPLICIT); + else if (cast.targetType == String.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_STRING_EXPLICIT); // TODO: remove this when the transition from Joda to Java datetimes is completed else if (cast.targetType == ZonedDateTime.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_ZONEDDATETIME); else { writeCast(cast.originalType, cast.targetType); } } else { - if (cast.targetType == boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BOOLEAN); - else if (cast.targetType == byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BYTE_IMPLICIT); - else if (cast.targetType == short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_SHORT_IMPLICIT); - else if (cast.targetType == char.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_CHAR_IMPLICIT); - else if (cast.targetType == int.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_INT_IMPLICIT); - else if (cast.targetType == long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_LONG_IMPLICIT); - else if (cast.targetType == float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_FLOAT_IMPLICIT); - else if (cast.targetType == double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_DOUBLE_IMPLICIT); - else if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BOOLEAN); - else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_IMPLICIT); - else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_IMPLICIT); + if (cast.targetType == boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BOOLEAN); + else if (cast.targetType == byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BYTE_IMPLICIT); + else if (cast.targetType == short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_SHORT_IMPLICIT); + else if (cast.targetType == char.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_CHAR_IMPLICIT); + else if (cast.targetType == int.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_INT_IMPLICIT); + else if (cast.targetType == long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_LONG_IMPLICIT); + else if (cast.targetType == float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_FLOAT_IMPLICIT); + else if (cast.targetType == double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_DOUBLE_IMPLICIT); + else if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BOOLEAN); + else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_IMPLICIT); + else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_IMPLICIT); else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_CHARACTER_IMPLICIT); - else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_IMPLICIT); - else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_IMPLICIT); - else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_IMPLICIT); - else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_IMPLICIT); - else if (cast.targetType == String.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_STRING_IMPLICIT); + else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_IMPLICIT); + else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_IMPLICIT); + else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_IMPLICIT); + else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_IMPLICIT); + else if (cast.targetType == String.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_STRING_IMPLICIT); // TODO: remove this when the transition from Joda to Java datetimes is completed else if (cast.targetType == ZonedDateTime.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_ZONEDDATETIME); else { @@ -325,24 +329,24 @@ public final class MethodWriter extends GeneratorAdapter { } } else { // Java 8: push a StringBuilder append - if (clazz == boolean.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); - else if (clazz == char.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); - else if (clazz == byte.class || - clazz == short.class || - clazz == int.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); - else if (clazz == long.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); - else if (clazz == float.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); - else if (clazz == double.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); - else if (clazz == String.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); - else invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT); + if (clazz == boolean.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); + else if (clazz == char.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); + else if (clazz == byte.class || clazz == short.class || clazz == int.class) invokeVirtual( + STRINGBUILDER_TYPE, + STRINGBUILDER_APPEND_INT + ); + else if (clazz == long.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); + else if (clazz == float.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); + else if (clazz == double.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); + else if (clazz == String.class) invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); + else invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT); } } public void writeToStrings() { if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) { // Java 9+: use type information and push invokeDynamic - final String desc = Type.getMethodDescriptor(STRING_TYPE, - stringConcatArgs.pop().stream().toArray(Type[]::new)); + final String desc = Type.getMethodDescriptor(STRING_TYPE, stringConcatArgs.pop().stream().toArray(Type[]::new)); invokeDynamic("concat", desc, INDY_STRING_CONCAT_BOOTSTRAP_HANDLE); } else { // Java 8: call toString() on StringBuilder @@ -351,8 +355,14 @@ public final class MethodWriter extends GeneratorAdapter { } /** Writes a dynamic binary instruction: returnType, lhs, and rhs can be different */ - public void writeDynamicBinaryInstruction(Location location, Class returnType, Class lhs, Class rhs, - Operation operation, int flags) { + public void writeDynamicBinaryInstruction( + Location location, + Class returnType, + Class lhs, + Class rhs, + Operation operation, + int flags + ) { Type methodType = Type.getMethodType(getType(returnType), getType(lhs), getType(rhs)); switch (operation) { @@ -403,10 +413,13 @@ public final class MethodWriter extends GeneratorAdapter { /** Writes a static binary instruction */ public void writeBinaryInstruction(Location location, Class clazz, Operation operation) { - if ( (clazz == float.class || clazz == double.class) && - (operation == Operation.LSH || operation == Operation.USH || - operation == Operation.RSH || operation == Operation.BWAND || - operation == Operation.XOR || operation == Operation.BWOR)) { + if ((clazz == float.class || clazz == double.class) + && (operation == Operation.LSH + || operation == Operation.USH + || operation == Operation.RSH + || operation == Operation.BWAND + || operation == Operation.XOR + || operation == Operation.BWOR)) { throw location.createError(new IllegalStateException("Illegal tree structure.")); } @@ -515,8 +528,13 @@ public final class MethodWriter extends GeneratorAdapter { // true to reference the appropriate class constant when calling a static interface // method since java 8 did not check, but java 9 and 10 do if (painlessMethod.javaMethod.getDeclaringClass().isInterface()) { - visitMethodInsn(Opcodes.INVOKESTATIC, type.getInternalName(), - painlessMethod.javaMethod.getName(), method.getDescriptor(), true); + visitMethodInsn( + Opcodes.INVOKESTATIC, + type.getInternalName(), + painlessMethod.javaMethod.getName(), + method.getDescriptor(), + true + ); } else { invokeStatic(type, method); } @@ -539,10 +557,10 @@ public final class MethodWriter extends GeneratorAdapter { System.arraycopy(functionRef.delegateInjections, 0, args, 7, functionRef.delegateInjections.length); invokeDynamic( - functionRef.interfaceMethodName, - functionRef.factoryMethodType.toMethodDescriptorString(), - LAMBDA_BOOTSTRAP_HANDLE, - args + functionRef.interfaceMethodName, + functionRef.factoryMethodType.toMethodDescriptorString(), + LAMBDA_BOOTSTRAP_HANDLE, + args ); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Operation.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Operation.java index 9856ae35152..e27a0b6d8bd 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Operation.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Operation.java @@ -40,31 +40,31 @@ package org.opensearch.painless; */ public enum Operation { - MUL ( "*" , "multiplication" ), - DIV ( "/" , "division" ), - REM ( "%" , "remainder" ), - ADD ( "+" , "addition" ), - SUB ( "-" , "subtraction" ), - FIND ( "=~" , "find" ), - MATCH ( "==~" , "match" ), - LSH ( "<<" , "left shift" ), - RSH ( ">>" , "right shift" ), - USH ( ">>>" , "unsigned shift" ), - BWNOT ( "~" , "bitwise not" ), - BWAND ( "&" , "bitwise and" ), - XOR ( "^" , "bitwise xor" ), - BWOR ( "|" , "boolean or" ), - NOT ( "!" , "boolean not" ), - AND ( "&&" , "boolean and" ), - OR ( "||" , "boolean or" ), - LT ( "<" , "less than" ), - LTE ( "<=" , "less than or equals" ), - GT ( ">" , "greater than" ), - GTE ( ">=" , "greater than or equals" ), - EQ ( "==" , "equals" ), - EQR ( "===" , "reference equals" ), - NE ( "!=" , "not equals" ), - NER ( "!==" , "reference not equals" ); + MUL("*", "multiplication"), + DIV("/", "division"), + REM("%", "remainder"), + ADD("+", "addition"), + SUB("-", "subtraction"), + FIND("=~", "find"), + MATCH("==~", "match"), + LSH("<<", "left shift"), + RSH(">>", "right shift"), + USH(">>>", "unsigned shift"), + BWNOT("~", "bitwise not"), + BWAND("&", "bitwise and"), + XOR("^", "bitwise xor"), + BWOR("|", "boolean or"), + NOT("!", "boolean not"), + AND("&&", "boolean and"), + OR("||", "boolean or"), + LT("<", "less than"), + LTE("<=", "less than or equals"), + GT(">", "greater than"), + GTE(">=", "greater than or equals"), + EQ("==", "equals"), + EQR("===", "reference equals"), + NE("!=", "not equals"), + NER("!==", "reference not equals"); public final String symbol; public final String name; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessError.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessError.java index b9d0b7d5d0c..93ebd97eff6 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessError.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessError.java @@ -47,6 +47,6 @@ public class PainlessError extends Error { * @param message The error message. */ public PainlessError(final String message) { - super(message); + super(message); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java index f4acea8ab05..4c693243d2a 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessPlugin.java @@ -32,7 +32,6 @@ package org.opensearch.painless; - import org.apache.lucene.util.SetOnce; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionResponse; @@ -130,12 +129,19 @@ public final class PainlessPlugin extends Plugin implements ScriptPlugin, Extens } @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier) { + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver expressionResolver, + Supplier repositoriesServiceSupplier + ) { // this is a hack to bind the painless script engine in guice (all components are added to guice), so that // the painless context api. this is a temporary measure until transport actions do no require guice return Collections.singletonList(painlessScriptEngine.get()); @@ -148,11 +154,11 @@ public final class PainlessPlugin extends Plugin implements ScriptPlugin, Extens @Override public void loadExtensions(ExtensionLoader loader) { - loader.loadExtensions(PainlessExtension.class).stream() + loader.loadExtensions(PainlessExtension.class) + .stream() .flatMap(extension -> extension.getContextWhitelists().entrySet().stream()) .forEach(entry -> { - List existing = whitelists.computeIfAbsent(entry.getKey(), - c -> new ArrayList<>(Whitelist.BASE_WHITELISTS)); + List existing = whitelists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>(Whitelist.BASE_WHITELISTS)); existing.addAll(entry.getValue()); }); } @@ -171,10 +177,15 @@ public final class PainlessPlugin extends Plugin implements ScriptPlugin, Extens } @Override - public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster) { + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { List handlers = new ArrayList<>(); handlers.add(new PainlessExecuteAction.RestAction()); handlers.add(new PainlessContextAction.RestAction()); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScript.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScript.java index c9e2c94aef2..766d9baaa0b 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScript.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScript.java @@ -101,7 +101,7 @@ public interface PainlessScript { pos = new ScriptException.Position(originalOffset, startOffset, endOffset); } break; - // but filter our own internal stacks (e.g. indy bootstrap) + // but filter our own internal stacks (e.g. indy bootstrap) } else if (!shouldFilter(element)) { scriptStack.add(element.toString()); } @@ -115,9 +115,9 @@ public interface PainlessScript { /** returns true for methods that are part of the runtime */ default boolean shouldFilter(StackTraceElement element) { - return element.getClassName().startsWith("org.opensearch.painless.") || - element.getClassName().startsWith("java.lang.invoke.") || - element.getClassName().startsWith("sun.invoke."); + return element.getClassName().startsWith("org.opensearch.painless.") + || element.getClassName().startsWith("java.lang.invoke.") + || element.getClassName().startsWith("sun.invoke."); } /** diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java index f9cd93500ae..1663185ae7c 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java @@ -85,9 +85,7 @@ public final class PainlessScriptEngine implements ScriptEngine { static { final Permissions none = new Permissions(); none.setReadOnly(); - COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] { - new ProtectionDomain(null, none) - }); + COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] { new ProtectionDomain(null, none) }); } /** @@ -113,8 +111,10 @@ public final class PainlessScriptEngine implements ScriptEngine { for (Map.Entry, List> entry : contexts.entrySet()) { ScriptContext context = entry.getKey(); PainlessLookup lookup = PainlessLookupBuilder.buildFromWhitelists(entry.getValue()); - contextsToCompilers.put(context, - new Compiler(context.instanceClazz, context.factoryClazz, context.statefulFactoryClazz, lookup)); + contextsToCompilers.put( + context, + new Compiler(context.instanceClazz, context.factoryClazz, context.statefulFactoryClazz, lookup) + ); contextsToLookups.put(context, lookup); } @@ -136,12 +136,7 @@ public final class PainlessScriptEngine implements ScriptEngine { } @Override - public T compile( - String scriptName, - String scriptSource, - ScriptContext context, - Map params - ) { + public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { Compiler compiler = contextsToCompilers.get(context); // Check we ourselves are not being called by unprivileged code. @@ -180,11 +175,7 @@ public final class PainlessScriptEngine implements ScriptEngine { * @param The factory class. * @return A factory class that will return script instances. */ - private Type generateStatefulFactory( - Loader loader, - ScriptContext context, - ScriptScope scriptScope - ) { + private Type generateStatefulFactory(Loader loader, ScriptContext context, ScriptScope scriptScope) { int classFrames = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS; int classAccess = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL; String interfaceBase = Type.getType(context.statefulFactoryClazz).getInternalName(); @@ -205,17 +196,29 @@ public final class PainlessScriptEngine implements ScriptEngine { } for (int count = 0; count < newFactory.getParameterTypes().length; ++count) { - writer.visitField(Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL, "$arg" + count, - Type.getType(newFactory.getParameterTypes()[count]).getDescriptor(), null, null).visitEnd(); + writer.visitField( + Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL, + "$arg" + count, + Type.getType(newFactory.getParameterTypes()[count]).getDescriptor(), + null, + null + ).visitEnd(); } - org.objectweb.asm.commons.Method base = - new org.objectweb.asm.commons.Method("", MethodType.methodType(void.class).toMethodDescriptorString()); - org.objectweb.asm.commons.Method init = new org.objectweb.asm.commons.Method("", - MethodType.methodType(void.class, newFactory.getParameterTypes()).toMethodDescriptorString()); + org.objectweb.asm.commons.Method base = new org.objectweb.asm.commons.Method( + "", + MethodType.methodType(void.class).toMethodDescriptorString() + ); + org.objectweb.asm.commons.Method init = new org.objectweb.asm.commons.Method( + "", + MethodType.methodType(void.class, newFactory.getParameterTypes()).toMethodDescriptorString() + ); - GeneratorAdapter constructor = new GeneratorAdapter(Opcodes.ASM5, init, - writer.visitMethod(Opcodes.ACC_PUBLIC, init.getName(), init.getDescriptor(), null, null)); + GeneratorAdapter constructor = new GeneratorAdapter( + Opcodes.ASM5, + init, + writer.visitMethod(Opcodes.ACC_PUBLIC, init.getName(), init.getDescriptor(), null, null) + ); constructor.visitCode(); constructor.loadThis(); constructor.invokeConstructor(OBJECT_TYPE, base); @@ -239,18 +242,24 @@ public final class PainlessScriptEngine implements ScriptEngine { } } - org.objectweb.asm.commons.Method instance = new org.objectweb.asm.commons.Method(newInstance.getName(), - MethodType.methodType(newInstance.getReturnType(), newInstance.getParameterTypes()).toMethodDescriptorString()); + org.objectweb.asm.commons.Method instance = new org.objectweb.asm.commons.Method( + newInstance.getName(), + MethodType.methodType(newInstance.getReturnType(), newInstance.getParameterTypes()).toMethodDescriptorString() + ); List> parameters = new ArrayList<>(Arrays.asList(newFactory.getParameterTypes())); parameters.addAll(Arrays.asList(newInstance.getParameterTypes())); - org.objectweb.asm.commons.Method constru = new org.objectweb.asm.commons.Method("", - MethodType.methodType(void.class, parameters.toArray(new Class[] {})).toMethodDescriptorString()); + org.objectweb.asm.commons.Method constru = new org.objectweb.asm.commons.Method( + "", + MethodType.methodType(void.class, parameters.toArray(new Class[] {})).toMethodDescriptorString() + ); - GeneratorAdapter adapter = new GeneratorAdapter(Opcodes.ASM5, instance, - writer.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_FINAL, - instance.getName(), instance.getDescriptor(), null, null)); + GeneratorAdapter adapter = new GeneratorAdapter( + Opcodes.ASM5, + instance, + writer.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_FINAL, instance.getName(), instance.getDescriptor(), null, null) + ); adapter.visitCode(); adapter.newInstance(WriterConstants.CLASS_TYPE); adapter.dup(); @@ -286,14 +295,9 @@ public final class PainlessScriptEngine implements ScriptEngine { * @param The factory class. * @return A factory class that will return script instances. */ - private T generateFactory( - Loader loader, - ScriptContext context, - Type classType, - ScriptScope scriptScope - ) { + private T generateFactory(Loader loader, ScriptContext context, Type classType, ScriptScope scriptScope) { int classFrames = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS; - int classAccess = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER| Opcodes.ACC_FINAL; + int classAccess = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL; String interfaceBase = Type.getType(context.factoryClazz).getInternalName(); String className = interfaceBase + "$Factory"; String[] classInterfaces = new String[] { interfaceBase }; @@ -301,11 +305,16 @@ public final class PainlessScriptEngine implements ScriptEngine { ClassWriter writer = new ClassWriter(classFrames); writer.visit(WriterConstants.CLASS_VERSION, classAccess, className, null, OBJECT_TYPE.getInternalName(), classInterfaces); - org.objectweb.asm.commons.Method init = - new org.objectweb.asm.commons.Method("", MethodType.methodType(void.class).toMethodDescriptorString()); + org.objectweb.asm.commons.Method init = new org.objectweb.asm.commons.Method( + "", + MethodType.methodType(void.class).toMethodDescriptorString() + ); - GeneratorAdapter constructor = new GeneratorAdapter(Opcodes.ASM5, init, - writer.visitMethod(Opcodes.ACC_PUBLIC, init.getName(), init.getDescriptor(), null, null)); + GeneratorAdapter constructor = new GeneratorAdapter( + Opcodes.ASM5, + init, + writer.visitMethod(Opcodes.ACC_PUBLIC, init.getName(), init.getDescriptor(), null, null) + ); constructor.visitCode(); constructor.loadThis(); constructor.invokeConstructor(OBJECT_TYPE, init); @@ -325,14 +334,20 @@ public final class PainlessScriptEngine implements ScriptEngine { } } - org.objectweb.asm.commons.Method instance = new org.objectweb.asm.commons.Method(reflect.getName(), - MethodType.methodType(reflect.getReturnType(), reflect.getParameterTypes()).toMethodDescriptorString()); - org.objectweb.asm.commons.Method constru = new org.objectweb.asm.commons.Method("", - MethodType.methodType(void.class, reflect.getParameterTypes()).toMethodDescriptorString()); + org.objectweb.asm.commons.Method instance = new org.objectweb.asm.commons.Method( + reflect.getName(), + MethodType.methodType(reflect.getReturnType(), reflect.getParameterTypes()).toMethodDescriptorString() + ); + org.objectweb.asm.commons.Method constru = new org.objectweb.asm.commons.Method( + "", + MethodType.methodType(void.class, reflect.getParameterTypes()).toMethodDescriptorString() + ); - GeneratorAdapter adapter = new GeneratorAdapter(Opcodes.ASM5, instance, - writer.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_FINAL, - instance.getName(), instance.getDescriptor(), null, null)); + GeneratorAdapter adapter = new GeneratorAdapter( + Opcodes.ASM5, + instance, + writer.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_FINAL, instance.getName(), instance.getDescriptor(), null, null) + ); adapter.visitCode(); adapter.newInstance(classType); adapter.dup(); @@ -344,11 +359,16 @@ public final class PainlessScriptEngine implements ScriptEngine { writeNeedsMethods(context.factoryClazz, writer, scriptScope.getUsedVariables()); String methodName = "isResultDeterministic"; - org.objectweb.asm.commons.Method isResultDeterministic = new org.objectweb.asm.commons.Method(methodName, - MethodType.methodType(boolean.class).toMethodDescriptorString()); + org.objectweb.asm.commons.Method isResultDeterministic = new org.objectweb.asm.commons.Method( + methodName, + MethodType.methodType(boolean.class).toMethodDescriptorString() + ); - GeneratorAdapter deterAdapter = new GeneratorAdapter(Opcodes.ASM5, isResultDeterministic, - writer.visitMethod(Opcodes.ACC_PUBLIC, methodName, isResultDeterministic.getDescriptor(), null, null)); + GeneratorAdapter deterAdapter = new GeneratorAdapter( + Opcodes.ASM5, + isResultDeterministic, + writer.visitMethod(Opcodes.ACC_PUBLIC, methodName, isResultDeterministic.getDescriptor(), null, null) + ); deterAdapter.visitCode(); deterAdapter.push(scriptScope.isDeterministic()); deterAdapter.returnValue(); @@ -361,10 +381,15 @@ public final class PainlessScriptEngine implements ScriptEngine { if (docFieldsReflect.getParameterCount() != 0) { throw new IllegalArgumentException("doc_fields may not take parameters"); } - org.objectweb.asm.commons.Method docFields = new org.objectweb.asm.commons.Method(docFieldsReflect.getName(), - MethodType.methodType(List.class).toMethodDescriptorString()); - GeneratorAdapter docAdapter = new GeneratorAdapter(Opcodes.ASM5, docFields, - writer.visitMethod(Opcodes.ACC_PUBLIC, docFieldsReflect.getName(), docFields.getDescriptor(), null, null)); + org.objectweb.asm.commons.Method docFields = new org.objectweb.asm.commons.Method( + docFieldsReflect.getName(), + MethodType.methodType(List.class).toMethodDescriptorString() + ); + GeneratorAdapter docAdapter = new GeneratorAdapter( + Opcodes.ASM5, + docFields, + writer.visitMethod(Opcodes.ACC_PUBLIC, docFieldsReflect.getName(), docFields.getDescriptor(), null, null) + ); docAdapter.visitCode(); docAdapter.newInstance(WriterConstants.ARRAY_LIST_TYPE); docAdapter.dup(); @@ -388,23 +413,31 @@ public final class PainlessScriptEngine implements ScriptEngine { } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. throw new IllegalStateException( - "An internal error occurred attempting to define the factory class [" + className + "].", exception); + "An internal error occurred attempting to define the factory class [" + className + "].", + exception + ); } } private void writeNeedsMethods(Class clazz, ClassWriter writer, Set extractedVariables) { for (Method method : clazz.getMethods()) { - if (method.getName().startsWith("needs") && - method.getReturnType().equals(boolean.class) && method.getParameterTypes().length == 0) { + if (method.getName().startsWith("needs") + && method.getReturnType().equals(boolean.class) + && method.getParameterTypes().length == 0) { String name = method.getName(); name = name.substring(5); name = Character.toLowerCase(name.charAt(0)) + name.substring(1); - org.objectweb.asm.commons.Method needs = new org.objectweb.asm.commons.Method(method.getName(), - MethodType.methodType(boolean.class).toMethodDescriptorString()); + org.objectweb.asm.commons.Method needs = new org.objectweb.asm.commons.Method( + method.getName(), + MethodType.methodType(boolean.class).toMethodDescriptorString() + ); - GeneratorAdapter adapter = new GeneratorAdapter(Opcodes.ASM5, needs, - writer.visitMethod(Opcodes.ACC_PUBLIC, needs.getName(), needs.getDescriptor(), null, null)); + GeneratorAdapter adapter = new GeneratorAdapter( + Opcodes.ASM5, + needs, + writer.visitMethod(Opcodes.ACC_PUBLIC, needs.getName(), needs.getDescriptor(), null, null) + ); adapter.visitCode(); adapter.push(extractedVariables.contains(name)); adapter.returnValue(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java index 8b05fcbaf4f..a9d5a7abbb1 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java @@ -83,25 +83,38 @@ public class ScriptClassInfo { returnType = m.getReturnType(); } else { throw new IllegalArgumentException( - "Painless can only implement interfaces that have a single method named [execute] but [" + baseClass.getName() - + "] has more than one."); + "Painless can only implement interfaces that have a single method named [execute] but [" + + baseClass.getName() + + "] has more than one." + ); } - } else if (m.getName().startsWith("needs") && - m.getReturnType() == boolean.class && - m.getParameterTypes().length == 0) { + } else if (m.getName().startsWith("needs") && m.getReturnType() == boolean.class && m.getParameterTypes().length == 0) { needsMethods.add(new org.objectweb.asm.commons.Method(m.getName(), NEEDS_PARAMETER_METHOD_TYPE.toMethodDescriptorString())); - } else if (m.getName().startsWith("get") && - m.getName().equals("getClass") == false && - Modifier.isStatic(m.getModifiers()) == false) { - getReturns.add( - definitionTypeForClass(painlessLookup, m.getReturnType(), componentType -> "[" + m.getName() + "] has unknown return " + - "type [" + componentType.getName() + "]. Painless can only support getters with return types that are " + - "whitelisted.")); + } else if (m.getName().startsWith("get") + && m.getName().equals("getClass") == false + && Modifier.isStatic(m.getModifiers()) == false) { + getReturns.add( + definitionTypeForClass( + painlessLookup, + m.getReturnType(), + componentType -> "[" + + m.getName() + + "] has unknown return " + + "type [" + + componentType.getName() + + "]. Painless can only support getters with return types that are " + + "whitelisted." + ) + ); - getMethods.add(new org.objectweb.asm.commons.Method(m.getName(), - MethodType.methodType(m.getReturnType()).toMethodDescriptorString())); + getMethods.add( + new org.objectweb.asm.commons.Method( + m.getName(), + MethodType.methodType(m.getReturnType()).toMethodDescriptorString() + ) + ); - } + } } if (executeMethod == null) { @@ -110,18 +123,24 @@ public class ScriptClassInfo { ArrayList converters = new ArrayList<>(); FunctionTable.LocalFunction defConverter = null; for (java.lang.reflect.Method m : baseClass.getMethods()) { - if (m.getName().startsWith("convertFrom") && - m.getParameterTypes().length == 1 && - m.getReturnType() == returnType && - Modifier.isStatic(m.getModifiers())) { + if (m.getName().startsWith("convertFrom") + && m.getParameterTypes().length == 1 + && m.getReturnType() == returnType + && Modifier.isStatic(m.getModifiers())) { if (m.getName().equals("convertFromDef")) { if (m.getParameterTypes()[0] != Object.class) { - throw new IllegalStateException("convertFromDef must take a single Object as an argument, " + - "not [" + m.getParameterTypes()[0] + "]"); + throw new IllegalStateException( + "convertFromDef must take a single Object as an argument, " + "not [" + m.getParameterTypes()[0] + "]" + ); } - defConverter = new FunctionTable.LocalFunction(m.getName(), m.getReturnType(), Arrays.asList(m.getParameterTypes()), - true, true); + defConverter = new FunctionTable.LocalFunction( + m.getName(), + m.getReturnType(), + Arrays.asList(m.getParameterTypes()), + true, + true + ); } else { converters.add( new FunctionTable.LocalFunction(m.getName(), m.getReturnType(), Arrays.asList(m.getParameterTypes()), true, true) @@ -134,17 +153,24 @@ public class ScriptClassInfo { MethodType methodType = MethodType.methodType(executeMethod.getReturnType(), executeMethod.getParameterTypes()); this.executeMethod = new org.objectweb.asm.commons.Method(executeMethod.getName(), methodType.toMethodDescriptorString()); - executeMethodReturnType = definitionTypeForClass(painlessLookup, executeMethod.getReturnType(), - componentType -> "Painless can only implement execute methods returning a whitelisted type but [" + baseClass.getName() - + "#execute] returns [" + componentType.getName() + "] which isn't whitelisted."); + executeMethodReturnType = definitionTypeForClass( + painlessLookup, + executeMethod.getReturnType(), + componentType -> "Painless can only implement execute methods returning a whitelisted type but [" + + baseClass.getName() + + "#execute] returns [" + + componentType.getName() + + "] which isn't whitelisted." + ); // Look up the argument List arguments = new ArrayList<>(); String[] argumentNamesConstant = readArgumentNamesConstant(baseClass); Class[] types = executeMethod.getParameterTypes(); if (argumentNamesConstant.length != types.length) { - throw new IllegalArgumentException("[" + baseClass.getName() + "#ARGUMENTS] has length [2] but [" - + baseClass.getName() + "#execute] takes [1] argument."); + throw new IllegalArgumentException( + "[" + baseClass.getName() + "#ARGUMENTS] has length [2] but [" + baseClass.getName() + "#execute] takes [1] argument." + ); } for (int arg = 0; arg < types.length; arg++) { arguments.add(methodArgument(painlessLookup, types[arg], argumentNamesConstant[arg])); @@ -228,13 +254,23 @@ public class ScriptClassInfo { } private MethodArgument methodArgument(PainlessLookup painlessLookup, Class clazz, String argName) { - Class defClass = definitionTypeForClass(painlessLookup, clazz, componentType -> "[" + argName + "] is of unknown type [" - + componentType.getName() + ". Painless interfaces can only accept arguments that are of whitelisted types."); + Class defClass = definitionTypeForClass( + painlessLookup, + clazz, + componentType -> "[" + + argName + + "] is of unknown type [" + + componentType.getName() + + ". Painless interfaces can only accept arguments that are of whitelisted types." + ); return new MethodArgument(defClass, argName); } - private static Class definitionTypeForClass(PainlessLookup painlessLookup, Class type, - Function, String> unknownErrorMessageSource) { + private static Class definitionTypeForClass( + PainlessLookup painlessLookup, + Class type, + Function, String> unknownErrorMessageSource + ) { type = PainlessLookupUtility.javaTypeToType(type); Class componentType = type; @@ -254,12 +290,21 @@ public class ScriptClassInfo { try { argumentNamesField = iface.getField("PARAMETERS"); } catch (NoSuchFieldException e) { - throw new IllegalArgumentException("Painless needs a constant [String[] PARAMETERS] on all interfaces it implements with the " - + "names of the method arguments but [" + iface.getName() + "] doesn't have one.", e); + throw new IllegalArgumentException( + "Painless needs a constant [String[] PARAMETERS] on all interfaces it implements with the " + + "names of the method arguments but [" + + iface.getName() + + "] doesn't have one.", + e + ); } if (false == argumentNamesField.getType().equals(String[].class)) { - throw new IllegalArgumentException("Painless needs a constant [String[] PARAMETERS] on all interfaces it implements with the " - + "names of the method arguments but [" + iface.getName() + "] doesn't have one."); + throw new IllegalArgumentException( + "Painless needs a constant [String[] PARAMETERS] on all interfaces it implements with the " + + "names of the method arguments but [" + + iface.getName() + + "] doesn't have one." + ); } try { return (String[]) argumentNamesField.get(null); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Utility.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Utility.java index b0c7468bafe..c9a9419fd48 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Utility.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Utility.java @@ -48,13 +48,15 @@ public class Utility { public static char StringTochar(final String value) { if (value == null) { - throw new ClassCastException("cannot cast " + - "null " + String.class.getCanonicalName() + " to " + char.class.getCanonicalName()); + throw new ClassCastException( + "cannot cast " + "null " + String.class.getCanonicalName() + " to " + char.class.getCanonicalName() + ); } if (value.length() != 1) { - throw new ClassCastException("cannot cast " + - String.class.getCanonicalName() + " with length not equal to one to " + char.class.getCanonicalName()); + throw new ClassCastException( + "cannot cast " + String.class.getCanonicalName() + " with length not equal to one to " + char.class.getCanonicalName() + ); } return value.charAt(0); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/opensearch/painless/WriterConstants.java index 35fa2cbda3d..530b4d26079 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/WriterConstants.java @@ -66,7 +66,7 @@ public final class WriterConstants { public static final String CTOR_METHOD_NAME = ""; - public static final Method CLINIT = getAsmMethod(void.class, ""); + public static final Method CLINIT = getAsmMethod(void.class, ""); public static final Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class); @@ -83,8 +83,11 @@ public final class WriterConstants { public static final Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); // TODO: remove this when the transition from Joda to Java datetimes is completed - public static final Method JCZDT_TO_ZONEDDATETIME = - getAsmMethod(ZonedDateTime.class, "JCZDTToZonedDateTime", JodaCompatibleZonedDateTime.class); + public static final Method JCZDT_TO_ZONEDDATETIME = getAsmMethod( + ZonedDateTime.class, + "JCZDTToZonedDateTime", + JodaCompatibleZonedDateTime.class + ); /** * A Method instance for {@linkplain Pattern}. This isn't available from PainlessLookup because we intentionally don't add it @@ -97,46 +100,59 @@ public final class WriterConstants { public static final Method MATCHER_MATCHES = getAsmMethod(boolean.class, "matches"); public static final Method MATCHER_FIND = getAsmMethod(boolean.class, "find"); - public static final Method DEF_BOOTSTRAP_METHOD = getAsmMethod(CallSite.class, "$bootstrapDef", MethodHandles.Lookup.class, - String.class, MethodType.class, int.class, int.class, Object[].class); - static final Handle DEF_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, CLASS_TYPE.getInternalName(), "$bootstrapDef", - DEF_BOOTSTRAP_METHOD.getDescriptor(), false); + public static final Method DEF_BOOTSTRAP_METHOD = getAsmMethod( + CallSite.class, + "$bootstrapDef", + MethodHandles.Lookup.class, + String.class, + MethodType.class, + int.class, + int.class, + Object[].class + ); + static final Handle DEF_BOOTSTRAP_HANDLE = new Handle( + Opcodes.H_INVOKESTATIC, + CLASS_TYPE.getInternalName(), + "$bootstrapDef", + DEF_BOOTSTRAP_METHOD.getDescriptor(), + false + ); public static final Type DEF_UTIL_TYPE = Type.getType(Def.class); public static final Method DEF_TO_P_BOOLEAN = getAsmMethod(boolean.class, "defToboolean", Object.class); - public static final Method DEF_TO_P_BYTE_IMPLICIT = getAsmMethod(byte.class , "defTobyteImplicit" , Object.class); - public static final Method DEF_TO_P_SHORT_IMPLICIT = getAsmMethod(short.class , "defToshortImplicit" , Object.class); - public static final Method DEF_TO_P_CHAR_IMPLICIT = getAsmMethod(char.class , "defTocharImplicit" , Object.class); - public static final Method DEF_TO_P_INT_IMPLICIT = getAsmMethod(int.class , "defTointImplicit" , Object.class); - public static final Method DEF_TO_P_LONG_IMPLICIT = getAsmMethod(long.class , "defTolongImplicit" , Object.class); - public static final Method DEF_TO_P_FLOAT_IMPLICIT = getAsmMethod(float.class , "defTofloatImplicit" , Object.class); - public static final Method DEF_TO_P_DOUBLE_IMPLICIT = getAsmMethod(double.class , "defTodoubleImplicit" , Object.class); - public static final Method DEF_TO_P_BYTE_EXPLICIT = getAsmMethod(byte.class , "defTobyteExplicit" , Object.class); - public static final Method DEF_TO_P_SHORT_EXPLICIT = getAsmMethod(short.class , "defToshortExplicit" , Object.class); - public static final Method DEF_TO_P_CHAR_EXPLICIT = getAsmMethod(char.class , "defTocharExplicit" , Object.class); - public static final Method DEF_TO_P_INT_EXPLICIT = getAsmMethod(int.class , "defTointExplicit" , Object.class); - public static final Method DEF_TO_P_LONG_EXPLICIT = getAsmMethod(long.class , "defTolongExplicit" , Object.class); - public static final Method DEF_TO_P_FLOAT_EXPLICIT = getAsmMethod(float.class , "defTofloatExplicit" , Object.class); - public static final Method DEF_TO_P_DOUBLE_EXPLICIT = getAsmMethod(double.class , "defTodoubleExplicit" , Object.class); + public static final Method DEF_TO_P_BYTE_IMPLICIT = getAsmMethod(byte.class, "defTobyteImplicit", Object.class); + public static final Method DEF_TO_P_SHORT_IMPLICIT = getAsmMethod(short.class, "defToshortImplicit", Object.class); + public static final Method DEF_TO_P_CHAR_IMPLICIT = getAsmMethod(char.class, "defTocharImplicit", Object.class); + public static final Method DEF_TO_P_INT_IMPLICIT = getAsmMethod(int.class, "defTointImplicit", Object.class); + public static final Method DEF_TO_P_LONG_IMPLICIT = getAsmMethod(long.class, "defTolongImplicit", Object.class); + public static final Method DEF_TO_P_FLOAT_IMPLICIT = getAsmMethod(float.class, "defTofloatImplicit", Object.class); + public static final Method DEF_TO_P_DOUBLE_IMPLICIT = getAsmMethod(double.class, "defTodoubleImplicit", Object.class); + public static final Method DEF_TO_P_BYTE_EXPLICIT = getAsmMethod(byte.class, "defTobyteExplicit", Object.class); + public static final Method DEF_TO_P_SHORT_EXPLICIT = getAsmMethod(short.class, "defToshortExplicit", Object.class); + public static final Method DEF_TO_P_CHAR_EXPLICIT = getAsmMethod(char.class, "defTocharExplicit", Object.class); + public static final Method DEF_TO_P_INT_EXPLICIT = getAsmMethod(int.class, "defTointExplicit", Object.class); + public static final Method DEF_TO_P_LONG_EXPLICIT = getAsmMethod(long.class, "defTolongExplicit", Object.class); + public static final Method DEF_TO_P_FLOAT_EXPLICIT = getAsmMethod(float.class, "defTofloatExplicit", Object.class); + public static final Method DEF_TO_P_DOUBLE_EXPLICIT = getAsmMethod(double.class, "defTodoubleExplicit", Object.class); public static final Method DEF_TO_B_BOOLEAN = getAsmMethod(Boolean.class, "defToBoolean", Object.class); - public static final Method DEF_TO_B_BYTE_IMPLICIT = getAsmMethod(Byte.class , "defToByteImplicit" , Object.class); - public static final Method DEF_TO_B_SHORT_IMPLICIT = getAsmMethod(Short.class , "defToShortImplicit" , Object.class); - public static final Method DEF_TO_B_CHARACTER_IMPLICIT = getAsmMethod(Character.class , "defToCharacterImplicit" , Object.class); - public static final Method DEF_TO_B_INTEGER_IMPLICIT = getAsmMethod(Integer.class , "defToIntegerImplicit" , Object.class); - public static final Method DEF_TO_B_LONG_IMPLICIT = getAsmMethod(Long.class , "defToLongImplicit" , Object.class); - public static final Method DEF_TO_B_FLOAT_IMPLICIT = getAsmMethod(Float.class , "defToFloatImplicit" , Object.class); - public static final Method DEF_TO_B_DOUBLE_IMPLICIT = getAsmMethod(Double.class , "defToDoubleImplicit" , Object.class); - public static final Method DEF_TO_B_BYTE_EXPLICIT = getAsmMethod(Byte.class , "defToByteExplicit" , Object.class); - public static final Method DEF_TO_B_SHORT_EXPLICIT = getAsmMethod(Short.class , "defToShortExplicit" , Object.class); - public static final Method DEF_TO_B_CHARACTER_EXPLICIT = getAsmMethod(Character.class , "defToCharacterExplicit" , Object.class); - public static final Method DEF_TO_B_INTEGER_EXPLICIT = getAsmMethod(Integer.class , "defToIntegerExplicit" , Object.class); - public static final Method DEF_TO_B_LONG_EXPLICIT = getAsmMethod(Long.class , "defToLongExplicit" , Object.class); - public static final Method DEF_TO_B_FLOAT_EXPLICIT = getAsmMethod(Float.class , "defToFloatExplicit" , Object.class); - public static final Method DEF_TO_B_DOUBLE_EXPLICIT = getAsmMethod(Double.class , "defToDoubleExplicit" , Object.class); + public static final Method DEF_TO_B_BYTE_IMPLICIT = getAsmMethod(Byte.class, "defToByteImplicit", Object.class); + public static final Method DEF_TO_B_SHORT_IMPLICIT = getAsmMethod(Short.class, "defToShortImplicit", Object.class); + public static final Method DEF_TO_B_CHARACTER_IMPLICIT = getAsmMethod(Character.class, "defToCharacterImplicit", Object.class); + public static final Method DEF_TO_B_INTEGER_IMPLICIT = getAsmMethod(Integer.class, "defToIntegerImplicit", Object.class); + public static final Method DEF_TO_B_LONG_IMPLICIT = getAsmMethod(Long.class, "defToLongImplicit", Object.class); + public static final Method DEF_TO_B_FLOAT_IMPLICIT = getAsmMethod(Float.class, "defToFloatImplicit", Object.class); + public static final Method DEF_TO_B_DOUBLE_IMPLICIT = getAsmMethod(Double.class, "defToDoubleImplicit", Object.class); + public static final Method DEF_TO_B_BYTE_EXPLICIT = getAsmMethod(Byte.class, "defToByteExplicit", Object.class); + public static final Method DEF_TO_B_SHORT_EXPLICIT = getAsmMethod(Short.class, "defToShortExplicit", Object.class); + public static final Method DEF_TO_B_CHARACTER_EXPLICIT = getAsmMethod(Character.class, "defToCharacterExplicit", Object.class); + public static final Method DEF_TO_B_INTEGER_EXPLICIT = getAsmMethod(Integer.class, "defToIntegerExplicit", Object.class); + public static final Method DEF_TO_B_LONG_EXPLICIT = getAsmMethod(Long.class, "defToLongExplicit", Object.class); + public static final Method DEF_TO_B_FLOAT_EXPLICIT = getAsmMethod(Float.class, "defToFloatExplicit", Object.class); + public static final Method DEF_TO_B_DOUBLE_EXPLICIT = getAsmMethod(Double.class, "defToDoubleExplicit", Object.class); public static final Method DEF_TO_STRING_IMPLICIT = getAsmMethod(String.class, "defToStringImplicit", Object.class); public static final Method DEF_TO_STRING_EXPLICIT = getAsmMethod(String.class, "defToStringExplicit", Object.class); @@ -145,18 +161,43 @@ public final class WriterConstants { public static final Method DEF_TO_ZONEDDATETIME = getAsmMethod(ZonedDateTime.class, "defToZonedDateTime", Object.class); /** invokedynamic bootstrap for lambda expression/method references */ - public static final MethodType LAMBDA_BOOTSTRAP_TYPE = - MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, - MethodType.class, String.class, int.class, String.class, MethodType.class, int.class, int.class, Object[].class); - public static final Handle LAMBDA_BOOTSTRAP_HANDLE = - new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(LambdaBootstrap.class), - "lambdaBootstrap", LAMBDA_BOOTSTRAP_TYPE.toMethodDescriptorString(), false); - public static final MethodType DELEGATE_BOOTSTRAP_TYPE = - MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, MethodHandle.class, - int.class, Object[].class); - public static final Handle DELEGATE_BOOTSTRAP_HANDLE = - new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(LambdaBootstrap.class), - "delegateBootstrap", DELEGATE_BOOTSTRAP_TYPE.toMethodDescriptorString(), false); + public static final MethodType LAMBDA_BOOTSTRAP_TYPE = MethodType.methodType( + CallSite.class, + MethodHandles.Lookup.class, + String.class, + MethodType.class, + MethodType.class, + String.class, + int.class, + String.class, + MethodType.class, + int.class, + int.class, + Object[].class + ); + public static final Handle LAMBDA_BOOTSTRAP_HANDLE = new Handle( + Opcodes.H_INVOKESTATIC, + Type.getInternalName(LambdaBootstrap.class), + "lambdaBootstrap", + LAMBDA_BOOTSTRAP_TYPE.toMethodDescriptorString(), + false + ); + public static final MethodType DELEGATE_BOOTSTRAP_TYPE = MethodType.methodType( + CallSite.class, + MethodHandles.Lookup.class, + String.class, + MethodType.class, + MethodHandle.class, + int.class, + Object[].class + ); + public static final Handle DELEGATE_BOOTSTRAP_HANDLE = new Handle( + Opcodes.H_INVOKESTATIC, + Type.getInternalName(LambdaBootstrap.class), + "delegateBootstrap", + DELEGATE_BOOTSTRAP_TYPE.toMethodDescriptorString(), + false + ); /** dynamic invokedynamic bootstrap for indy string concats (Java 9+) */ public static final Handle INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; @@ -181,16 +222,16 @@ public final class WriterConstants { public static final Type STRING_TYPE = Type.getType(String.class); public static final Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); - public static final Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, CTOR_METHOD_NAME); + public static final Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, CTOR_METHOD_NAME); public static final Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class); - public static final Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class); - public static final Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class); - public static final Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class); - public static final Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class); - public static final Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class); - public static final Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class); - public static final Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); - public static final Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); + public static final Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class); + public static final Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class); + public static final Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class); + public static final Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class); + public static final Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class); + public static final Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class); + public static final Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); + public static final Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); public static final Type OBJECTS_TYPE = Type.getType(Objects.class); public static final Method EQUALS = getAsmMethod(boolean.class, "equals", Object.class, Object.class); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java index 4bd44ef4e7c..4b2125aac24 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextAction.java @@ -167,7 +167,7 @@ public class PainlessContextAction extends ActionType)Request::new); + super(NAME, transportService, actionFilters, (Writeable.Reader) Request::new); this.painlessScriptEngine = painlessScriptEngine; } @@ -177,15 +177,18 @@ public class PainlessContextAction extends ActionType v.name).collect(Collectors.toList()); + scriptContextNames = painlessScriptEngine.getContextsToLookups() + .keySet() + .stream() + .map(v -> v.name) + .collect(Collectors.toList()); painlessContextInfo = null; } else { ScriptContext scriptContext = null; PainlessLookup painlessLookup = null; - for (Map.Entry, PainlessLookup> contextLookupEntry : - painlessScriptEngine.getContextsToLookups().entrySet()) { + for (Map.Entry, PainlessLookup> contextLookupEntry : painlessScriptEngine.getContextsToLookups() + .entrySet()) { if (contextLookupEntry.getKey().name.equals(request.getScriptContextName())) { scriptContext = contextLookupEntry.getKey(); painlessLookup = contextLookupEntry.getValue(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassBindingInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassBindingInfo.java index 13ac97d41e2..301c1ca3695 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassBindingInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassBindingInfo.java @@ -60,15 +60,8 @@ public class PainlessContextClassBindingInfo implements Writeable, ToXContentObj @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - PainlessContextClassBindingInfo.class.getCanonicalName(), - (v) -> - new PainlessContextClassBindingInfo( - (String)v[0], - (String)v[1], - (String)v[2], - (int)v[3], - (List)v[4] - ) + PainlessContextClassBindingInfo.class.getCanonicalName(), + (v) -> new PainlessContextClassBindingInfo((String) v[0], (String) v[1], (String) v[2], (int) v[3], (List) v[4]) ); static { @@ -87,11 +80,11 @@ public class PainlessContextClassBindingInfo implements Writeable, ToXContentObj public PainlessContextClassBindingInfo(PainlessClassBinding painlessClassBinding) { this( - painlessClassBinding.javaMethod.getDeclaringClass().getName(), - painlessClassBinding.javaMethod.getName(), - painlessClassBinding.returnType.getName(), - painlessClassBinding.javaConstructor.getParameterCount(), - painlessClassBinding.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) + painlessClassBinding.javaMethod.getDeclaringClass().getName(), + painlessClassBinding.javaMethod.getName(), + painlessClassBinding.returnType.getName(), + painlessClassBinding.javaConstructor.getParameterCount(), + painlessClassBinding.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) ); } @@ -146,11 +139,11 @@ public class PainlessContextClassBindingInfo implements Writeable, ToXContentObj if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PainlessContextClassBindingInfo that = (PainlessContextClassBindingInfo) o; - return readOnly == that.readOnly && - Objects.equals(declaring, that.declaring) && - Objects.equals(name, that.name) && - Objects.equals(rtn, that.rtn) && - Objects.equals(parameters, that.parameters); + return readOnly == that.readOnly + && Objects.equals(declaring, that.declaring) + && Objects.equals(name, that.name) + && Objects.equals(rtn, that.rtn) + && Objects.equals(parameters, that.parameters); } @Override @@ -160,13 +153,21 @@ public class PainlessContextClassBindingInfo implements Writeable, ToXContentObj @Override public String toString() { - return "PainlessContextClassBindingInfo{" + - "declaring='" + declaring + '\'' + - ", name='" + name + '\'' + - ", rtn='" + rtn + '\'' + - ", readOnly=" + readOnly + - ", parameters=" + parameters + - '}'; + return "PainlessContextClassBindingInfo{" + + "declaring='" + + declaring + + '\'' + + ", name='" + + name + + '\'' + + ", rtn='" + + rtn + + '\'' + + ", readOnly=" + + readOnly + + ", parameters=" + + parameters + + '}'; } public String getDeclaring() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassInfo.java index ff840fdcde4..272b4d9fa16 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextClassInfo.java @@ -62,32 +62,38 @@ public class PainlessContextClassInfo implements Writeable, ToXContentObject { @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - PainlessContextClassInfo.class.getCanonicalName(), - (v) -> - new PainlessContextClassInfo( - (String)v[0], - (boolean)v[1], - (List)v[2], - (List)v[3], - (List)v[4], - (List)v[5], - (List)v[6] - ) + PainlessContextClassInfo.class.getCanonicalName(), + (v) -> new PainlessContextClassInfo( + (String) v[0], + (boolean) v[1], + (List) v[2], + (List) v[3], + (List) v[4], + (List) v[5], + (List) v[6] + ) ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), IMPORTED); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextConstructorInfo.fromXContent(p), CONSTRUCTORS); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextMethodInfo.fromXContent(p), STATIC_METHODS); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextMethodInfo.fromXContent(p), METHODS); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextFieldInfo.fromXContent(p), STATIC_FIELDS); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextFieldInfo.fromXContent(p), FIELDS); + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + (p, c) -> PainlessContextConstructorInfo.fromXContent(p), + CONSTRUCTORS + ); + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + (p, c) -> PainlessContextMethodInfo.fromXContent(p), + STATIC_METHODS + ); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> PainlessContextMethodInfo.fromXContent(p), METHODS); + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + (p, c) -> PainlessContextFieldInfo.fromXContent(p), + STATIC_FIELDS + ); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> PainlessContextFieldInfo.fromXContent(p), FIELDS); } private final String name; @@ -100,20 +106,25 @@ public class PainlessContextClassInfo implements Writeable, ToXContentObject { public PainlessContextClassInfo(Class javaClass, boolean imported, PainlessClass painlessClass) { this( - javaClass.getName(), - imported, - painlessClass.constructors.values().stream().map(PainlessContextConstructorInfo::new).collect(Collectors.toList()), - painlessClass.staticMethods.values().stream().map(PainlessContextMethodInfo::new).collect(Collectors.toList()), - painlessClass.methods.values().stream().map(PainlessContextMethodInfo::new).collect(Collectors.toList()), - painlessClass.staticFields.values().stream().map(PainlessContextFieldInfo::new).collect(Collectors.toList()), - painlessClass.fields.values().stream().map(PainlessContextFieldInfo::new).collect(Collectors.toList()) + javaClass.getName(), + imported, + painlessClass.constructors.values().stream().map(PainlessContextConstructorInfo::new).collect(Collectors.toList()), + painlessClass.staticMethods.values().stream().map(PainlessContextMethodInfo::new).collect(Collectors.toList()), + painlessClass.methods.values().stream().map(PainlessContextMethodInfo::new).collect(Collectors.toList()), + painlessClass.staticFields.values().stream().map(PainlessContextFieldInfo::new).collect(Collectors.toList()), + painlessClass.fields.values().stream().map(PainlessContextFieldInfo::new).collect(Collectors.toList()) ); } - public PainlessContextClassInfo(String name, boolean imported, - List constructors, - List staticMethods, List methods, - List staticFields, List fields) { + public PainlessContextClassInfo( + String name, + boolean imported, + List constructors, + List staticMethods, + List methods, + List staticFields, + List fields + ) { this.name = Objects.requireNonNull(name); this.imported = imported; @@ -183,13 +194,13 @@ public class PainlessContextClassInfo implements Writeable, ToXContentObject { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PainlessContextClassInfo that = (PainlessContextClassInfo) o; - return imported == that.imported && - Objects.equals(name, that.name) && - Objects.equals(constructors, that.constructors) && - Objects.equals(staticMethods, that.staticMethods) && - Objects.equals(methods, that.methods) && - Objects.equals(staticFields, that.staticFields) && - Objects.equals(fields, that.fields); + return imported == that.imported + && Objects.equals(name, that.name) + && Objects.equals(constructors, that.constructors) + && Objects.equals(staticMethods, that.staticMethods) + && Objects.equals(methods, that.methods) + && Objects.equals(staticFields, that.staticFields) + && Objects.equals(fields, that.fields); } @Override @@ -199,15 +210,23 @@ public class PainlessContextClassInfo implements Writeable, ToXContentObject { @Override public String toString() { - return "PainlessContextClassInfo{" + - "name='" + name + '\'' + - ", imported=" + imported + - ", constructors=" + constructors + - ", staticMethods=" + staticMethods + - ", methods=" + methods + - ", staticFields=" + staticFields + - ", fields=" + fields + - '}'; + return "PainlessContextClassInfo{" + + "name='" + + name + + '\'' + + ", imported=" + + imported + + ", constructors=" + + constructors + + ", staticMethods=" + + staticMethods + + ", methods=" + + methods + + ", staticFields=" + + staticFields + + ", fields=" + + fields + + '}'; } public String getName() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextConstructorInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextConstructorInfo.java index f724fb0ef09..9058be6d2c0 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextConstructorInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextConstructorInfo.java @@ -60,12 +60,8 @@ public class PainlessContextConstructorInfo implements Writeable, ToXContentObje @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - PainlessContextConstructorInfo.class.getCanonicalName(), - (v) -> - new PainlessContextConstructorInfo( - (String)v[0], - (List)v[1] - ) + PainlessContextConstructorInfo.class.getCanonicalName(), + (v) -> new PainlessContextConstructorInfo((String) v[0], (List) v[1]) ); static { @@ -74,9 +70,9 @@ public class PainlessContextConstructorInfo implements Writeable, ToXContentObje } public PainlessContextConstructorInfo(PainlessConstructor painlessConstructor) { - this ( - painlessConstructor.javaConstructor.getDeclaringClass().getName(), - painlessConstructor.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) + this( + painlessConstructor.javaConstructor.getDeclaringClass().getName(), + painlessConstructor.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) ); } @@ -119,8 +115,7 @@ public class PainlessContextConstructorInfo implements Writeable, ToXContentObje if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PainlessContextConstructorInfo that = (PainlessContextConstructorInfo) o; - return Objects.equals(declaring, that.declaring) && - Objects.equals(parameters, that.parameters); + return Objects.equals(declaring, that.declaring) && Objects.equals(parameters, that.parameters); } @Override @@ -130,10 +125,7 @@ public class PainlessContextConstructorInfo implements Writeable, ToXContentObje @Override public String toString() { - return "PainlessContextConstructorInfo{" + - "declaring='" + declaring + '\'' + - ", parameters=" + parameters + - '}'; + return "PainlessContextConstructorInfo{" + "declaring='" + declaring + '\'' + ", parameters=" + parameters + '}'; } public String getDeclaring() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextFieldInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextFieldInfo.java index 267b0b929f4..1cf1f0937b4 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextFieldInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextFieldInfo.java @@ -53,13 +53,8 @@ public class PainlessContextFieldInfo implements Writeable, ToXContentObject { public static final ParseField TYPE = new ParseField("type"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - PainlessContextFieldInfo.class.getCanonicalName(), - (v) -> - new PainlessContextFieldInfo( - (String)v[0], - (String)v[1], - (String)v[2] - ) + PainlessContextFieldInfo.class.getCanonicalName(), + (v) -> new PainlessContextFieldInfo((String) v[0], (String) v[1], (String) v[2]) ); static { @@ -74,9 +69,9 @@ public class PainlessContextFieldInfo implements Writeable, ToXContentObject { public PainlessContextFieldInfo(PainlessField painlessField) { this( - painlessField.javaField.getDeclaringClass().getName(), - painlessField.javaField.getName(), - painlessField.typeParameter.getName() + painlessField.javaField.getDeclaringClass().getName(), + painlessField.javaField.getName(), + painlessField.typeParameter.getName() ); } @@ -123,9 +118,7 @@ public class PainlessContextFieldInfo implements Writeable, ToXContentObject { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PainlessContextFieldInfo that = (PainlessContextFieldInfo) o; - return Objects.equals(declaring, that.declaring) && - Objects.equals(name, that.name) && - Objects.equals(type, that.type); + return Objects.equals(declaring, that.declaring) && Objects.equals(name, that.name) && Objects.equals(type, that.type); } @Override @@ -135,11 +128,7 @@ public class PainlessContextFieldInfo implements Writeable, ToXContentObject { @Override public String toString() { - return "PainlessContextFieldInfo{" + - "declaring='" + declaring + '\'' + - ", name='" + name + '\'' + - ", type='" + type + '\'' + - '}'; + return "PainlessContextFieldInfo{" + "declaring='" + declaring + '\'' + ", name='" + name + '\'' + ", type='" + type + '\'' + '}'; } public String getDeclaring() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInfo.java index f8afb426017..e82f2299c51 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInfo.java @@ -64,27 +64,34 @@ public class PainlessContextInfo implements Writeable, ToXContentObject { @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - PainlessContextInfo.class.getCanonicalName(), - (v) -> - new PainlessContextInfo( - (String)v[0], - (List)v[1], - (List)v[2], - (List)v[3], - (List)v[4] - ) + PainlessContextInfo.class.getCanonicalName(), + (v) -> new PainlessContextInfo( + (String) v[0], + (List) v[1], + (List) v[2], + (List) v[3], + (List) v[4] + ) ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextClassInfo.fromXContent(p), CLASSES); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextMethodInfo.fromXContent(p), IMPORTED_METHODS); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextClassBindingInfo.fromXContent(p), CLASS_BINDINGS); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), - (p, c) -> PainlessContextInstanceBindingInfo.fromXContent(p), INSTANCE_BINDINGS); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> PainlessContextClassInfo.fromXContent(p), CLASSES); + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + (p, c) -> PainlessContextMethodInfo.fromXContent(p), + IMPORTED_METHODS + ); + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + (p, c) -> PainlessContextClassBindingInfo.fromXContent(p), + CLASS_BINDINGS + ); + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + (p, c) -> PainlessContextInstanceBindingInfo.fromXContent(p), + INSTANCE_BINDINGS + ); } private final String name; @@ -95,43 +102,59 @@ public class PainlessContextInfo implements Writeable, ToXContentObject { public PainlessContextInfo(ScriptContext scriptContext, PainlessLookup painlessLookup) { this( - scriptContext.name, - painlessLookup.getClasses().stream().map( - javaClass -> new PainlessContextClassInfo( - javaClass, - javaClass == painlessLookup.canonicalTypeNameToType( - javaClass.getName().substring(javaClass.getName().lastIndexOf('.') + 1).replace('$', '.')), - painlessLookup.lookupPainlessClass(javaClass)) - ).collect(Collectors.toList()), - painlessLookup.getImportedPainlessMethodsKeys().stream().map(importedPainlessMethodKey -> { - String[] split = importedPainlessMethodKey.split("/"); - String importedPainlessMethodName = split[0]; - int importedPainlessMethodArity = Integer.parseInt(split[1]); - PainlessMethod importedPainlessMethod = - painlessLookup.lookupImportedPainlessMethod(importedPainlessMethodName, importedPainlessMethodArity); - return new PainlessContextMethodInfo(importedPainlessMethod); - }).collect(Collectors.toList()), - painlessLookup.getPainlessClassBindingsKeys().stream().map(painlessClassBindingKey -> { - String[] split = painlessClassBindingKey.split("/"); - String painlessClassBindingName = split[0]; - int painlessClassBindingArity = Integer.parseInt(split[1]); - PainlessClassBinding painlessClassBinding = - painlessLookup.lookupPainlessClassBinding(painlessClassBindingName, painlessClassBindingArity); - return new PainlessContextClassBindingInfo(painlessClassBinding); - }).collect(Collectors.toList()), - painlessLookup.getPainlessInstanceBindingsKeys().stream().map(painlessInstanceBindingKey -> { - String[] split = painlessInstanceBindingKey.split("/"); - String painlessInstanceBindingName = split[0]; - int painlessInstanceBindingArity = Integer.parseInt(split[1]); - PainlessInstanceBinding painlessInstanceBinding = - painlessLookup.lookupPainlessInstanceBinding(painlessInstanceBindingName, painlessInstanceBindingArity); - return new PainlessContextInstanceBindingInfo(painlessInstanceBinding); - }).collect(Collectors.toList()) + scriptContext.name, + painlessLookup.getClasses() + .stream() + .map( + javaClass -> new PainlessContextClassInfo( + javaClass, + javaClass == painlessLookup.canonicalTypeNameToType( + javaClass.getName().substring(javaClass.getName().lastIndexOf('.') + 1).replace('$', '.') + ), + painlessLookup.lookupPainlessClass(javaClass) + ) + ) + .collect(Collectors.toList()), + painlessLookup.getImportedPainlessMethodsKeys().stream().map(importedPainlessMethodKey -> { + String[] split = importedPainlessMethodKey.split("/"); + String importedPainlessMethodName = split[0]; + int importedPainlessMethodArity = Integer.parseInt(split[1]); + PainlessMethod importedPainlessMethod = painlessLookup.lookupImportedPainlessMethod( + importedPainlessMethodName, + importedPainlessMethodArity + ); + return new PainlessContextMethodInfo(importedPainlessMethod); + }).collect(Collectors.toList()), + painlessLookup.getPainlessClassBindingsKeys().stream().map(painlessClassBindingKey -> { + String[] split = painlessClassBindingKey.split("/"); + String painlessClassBindingName = split[0]; + int painlessClassBindingArity = Integer.parseInt(split[1]); + PainlessClassBinding painlessClassBinding = painlessLookup.lookupPainlessClassBinding( + painlessClassBindingName, + painlessClassBindingArity + ); + return new PainlessContextClassBindingInfo(painlessClassBinding); + }).collect(Collectors.toList()), + painlessLookup.getPainlessInstanceBindingsKeys().stream().map(painlessInstanceBindingKey -> { + String[] split = painlessInstanceBindingKey.split("/"); + String painlessInstanceBindingName = split[0]; + int painlessInstanceBindingArity = Integer.parseInt(split[1]); + PainlessInstanceBinding painlessInstanceBinding = painlessLookup.lookupPainlessInstanceBinding( + painlessInstanceBindingName, + painlessInstanceBindingArity + ); + return new PainlessContextInstanceBindingInfo(painlessInstanceBinding); + }).collect(Collectors.toList()) ); } - public PainlessContextInfo(String name, List classes, List importedMethods, - List classBindings, List instanceBindings) { + public PainlessContextInfo( + String name, + List classes, + List importedMethods, + List classBindings, + List instanceBindings + ) { this.name = Objects.requireNonNull(name); classes = new ArrayList<>(Objects.requireNonNull(classes)); classes.sort(Comparator.comparing(PainlessContextClassInfo::getSortValue)); @@ -186,11 +209,11 @@ public class PainlessContextInfo implements Writeable, ToXContentObject { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PainlessContextInfo that = (PainlessContextInfo) o; - return Objects.equals(name, that.name) && - Objects.equals(classes, that.classes) && - Objects.equals(importedMethods, that.importedMethods) && - Objects.equals(classBindings, that.classBindings) && - Objects.equals(instanceBindings, that.instanceBindings); + return Objects.equals(name, that.name) + && Objects.equals(classes, that.classes) + && Objects.equals(importedMethods, that.importedMethods) + && Objects.equals(classBindings, that.classBindings) + && Objects.equals(instanceBindings, that.instanceBindings); } @Override @@ -200,13 +223,19 @@ public class PainlessContextInfo implements Writeable, ToXContentObject { @Override public String toString() { - return "PainlessContextInfo{" + - "name='" + name + '\'' + - ", classes=" + classes + - ", importedMethods=" + importedMethods + - ", classBindings=" + classBindings + - ", instanceBindings=" + instanceBindings + - '}'; + return "PainlessContextInfo{" + + "name='" + + name + + '\'' + + ", classes=" + + classes + + ", importedMethods=" + + importedMethods + + ", classBindings=" + + classBindings + + ", instanceBindings=" + + instanceBindings + + '}'; } public String getName() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInstanceBindingInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInstanceBindingInfo.java index 14821dc58d2..74637a790da 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInstanceBindingInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextInstanceBindingInfo.java @@ -59,14 +59,8 @@ public class PainlessContextInstanceBindingInfo implements Writeable, ToXContent @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - PainlessContextInstanceBindingInfo.class.getCanonicalName(), - (v) -> - new PainlessContextInstanceBindingInfo( - (String)v[0], - (String)v[1], - (String)v[2], - (List)v[3] - ) + PainlessContextInstanceBindingInfo.class.getCanonicalName(), + (v) -> new PainlessContextInstanceBindingInfo((String) v[0], (String) v[1], (String) v[2], (List) v[3]) ); static { @@ -83,10 +77,10 @@ public class PainlessContextInstanceBindingInfo implements Writeable, ToXContent public PainlessContextInstanceBindingInfo(PainlessInstanceBinding painlessInstanceBinding) { this( - painlessInstanceBinding.javaMethod.getDeclaringClass().getName(), - painlessInstanceBinding.javaMethod.getName(), - painlessInstanceBinding.returnType.getName(), - painlessInstanceBinding.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) + painlessInstanceBinding.javaMethod.getDeclaringClass().getName(), + painlessInstanceBinding.javaMethod.getName(), + painlessInstanceBinding.returnType.getName(), + painlessInstanceBinding.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) ); } @@ -138,10 +132,10 @@ public class PainlessContextInstanceBindingInfo implements Writeable, ToXContent if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PainlessContextInstanceBindingInfo that = (PainlessContextInstanceBindingInfo) o; - return Objects.equals(declaring, that.declaring) && - Objects.equals(name, that.name) && - Objects.equals(rtn, that.rtn) && - Objects.equals(parameters, that.parameters); + return Objects.equals(declaring, that.declaring) + && Objects.equals(name, that.name) + && Objects.equals(rtn, that.rtn) + && Objects.equals(parameters, that.parameters); } @Override @@ -151,12 +145,19 @@ public class PainlessContextInstanceBindingInfo implements Writeable, ToXContent @Override public String toString() { - return "PainlessContextInstanceBindingInfo{" + - "declaring='" + declaring + '\'' + - ", name='" + name + '\'' + - ", rtn='" + rtn + '\'' + - ", parameters=" + parameters + - '}'; + return "PainlessContextInstanceBindingInfo{" + + "declaring='" + + declaring + + '\'' + + ", name='" + + name + + '\'' + + ", rtn='" + + rtn + + '\'' + + ", parameters=" + + parameters + + '}'; } public String getDeclaring() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextMethodInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextMethodInfo.java index bca1c2207cb..eaf751ea631 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextMethodInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessContextMethodInfo.java @@ -59,14 +59,8 @@ public class PainlessContextMethodInfo implements Writeable, ToXContentObject { @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - PainlessContextMethodInfo.class.getCanonicalName(), - (v) -> - new PainlessContextMethodInfo( - (String)v[0], - (String)v[1], - (String)v[2], - (List)v[3] - ) + PainlessContextMethodInfo.class.getCanonicalName(), + (v) -> new PainlessContextMethodInfo((String) v[0], (String) v[1], (String) v[2], (List) v[3]) ); static { @@ -83,10 +77,10 @@ public class PainlessContextMethodInfo implements Writeable, ToXContentObject { public PainlessContextMethodInfo(PainlessMethod painlessMethod) { this( - painlessMethod.javaMethod.getDeclaringClass().getName(), - painlessMethod.javaMethod.getName(), - painlessMethod.returnType.getName(), - painlessMethod.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) + painlessMethod.javaMethod.getDeclaringClass().getName(), + painlessMethod.javaMethod.getName(), + painlessMethod.returnType.getName(), + painlessMethod.typeParameters.stream().map(Class::getName).collect(Collectors.toList()) ); } @@ -137,10 +131,10 @@ public class PainlessContextMethodInfo implements Writeable, ToXContentObject { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PainlessContextMethodInfo that = (PainlessContextMethodInfo) o; - return Objects.equals(declaring, that.declaring) && - Objects.equals(name, that.name) && - Objects.equals(rtn, that.rtn) && - Objects.equals(parameters, that.parameters); + return Objects.equals(declaring, that.declaring) + && Objects.equals(name, that.name) + && Objects.equals(rtn, that.rtn) + && Objects.equals(parameters, that.parameters); } @Override @@ -150,12 +144,19 @@ public class PainlessContextMethodInfo implements Writeable, ToXContentObject { @Override public String toString() { - return "PainlessContextMethodInfo{" + - "declaring='" + declaring + '\'' + - ", name='" + name + '\'' + - ", rtn='" + rtn + '\'' + - ", parameters=" + parameters + - '}'; + return "PainlessContextMethodInfo{" + + "declaring='" + + declaring + + '\'' + + ", name='" + + name + + '\'' + + ", rtn='" + + rtn + + '\'' + + ", parameters=" + + parameters + + '}'; } public String getDeclaring() { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java index 60efa205b41..2c8796d49f3 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java @@ -122,7 +122,9 @@ public class PainlessExecuteAction extends ActionType PARSER = new ConstructingObjectParser<>( - "painless_execute_request", args -> new Request((Script) args[0], (String) args[1], (ContextSetup) args[2])); + "painless_execute_request", + args -> new Request((Script) args[0], (String) args[1], (ContextSetup) args[2]) + ); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> Script.parse(p), SCRIPT_FIELD); @@ -153,9 +155,10 @@ public class PainlessExecuteAction extends ActionType PARSER = - new ConstructingObjectParser<>("execute_script_context", - args -> new ContextSetup((String) args[0], (BytesReference) args[1], (QueryBuilder) args[2])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "execute_script_context", + args -> new ContextSetup((String) args[0], (BytesReference) args[1], (QueryBuilder) args[2]) + ); static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_FIELD); @@ -165,8 +168,11 @@ public class PainlessExecuteAction extends ActionType - AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY_FIELD); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), + QUERY_FIELD + ); } private final String index; @@ -191,7 +197,7 @@ public class PainlessExecuteAction extends ActionType { FilterScript.Factory factory = scriptService.compile(request.script, FilterScript.CONTEXT); - FilterScript.LeafFactory leafFactory = - factory.newFactory(request.getScript().getParams(), context.lookup()); + FilterScript.LeafFactory leafFactory = factory.newFactory(request.getScript().getParams(), context.lookup()); FilterScript filterScript = leafFactory.newInstance(leafReaderContext); filterScript.setDocument(0); boolean result = filterScript.execute(); @@ -545,8 +570,7 @@ public class PainlessExecuteAction extends ActionType { ScoreScript.Factory factory = scriptService.compile(request.script, ScoreScript.CONTEXT); - ScoreScript.LeafFactory leafFactory = - factory.newFactory(request.getScript().getParams(), context.lookup()); + ScoreScript.LeafFactory leafFactory = factory.newFactory(request.getScript().getParams(), context.lookup()); ScoreScript scoreScript = leafFactory.newInstance(leafReaderContext); scoreScript.setDocument(0); @@ -570,9 +594,11 @@ public class PainlessExecuteAction extends ActionType handler, - IndexService indexService) throws IOException { + private static Response prepareRamIndex( + Request request, + CheckedBiFunction handler, + IndexService indexService + ) throws IOException { Analyzer defaultAnalyzer = indexService.getIndexAnalyzers().getDefaultIndexAnalyzer(); @@ -589,8 +615,7 @@ public class PainlessExecuteAction extends ActionType absoluteStartMillis, null); + QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> absoluteStartMillis, null); return handler.apply(context, indexReader.leaves().get(0)); } } @@ -602,9 +627,7 @@ public class PainlessExecuteAction extends ActionType routes() { - return unmodifiableList(asList( - new Route(GET, "/_scripts/painless/_execute"), - new Route(POST, "/_scripts/painless/_execute"))); + return unmodifiableList(asList(new Route(GET, "/_scripts/painless/_execute"), new Route(POST, "/_scripts/painless/_execute"))); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/EnhancedPainlessLexer.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/EnhancedPainlessLexer.java index 5c5dce34899..805b8a8a45b 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/EnhancedPainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/EnhancedPainlessLexer.java @@ -76,8 +76,11 @@ final class EnhancedPainlessLexer extends PainlessLexer { if ((firstChar == '\'' || firstChar == '"') && text.length() - 2 > 0 && text.charAt(text.length() - 2) == '\\') { /* Use a simple heuristic to guess if the unrecognized characters were trying to be a string but has a broken escape sequence. * If it was add an extra message about valid string escape sequences. */ - message += " The only valid escape sequences in strings starting with [" + firstChar + "] are [\\\\] and [\\" - + firstChar + "]."; + message += " The only valid escape sequences in strings starting with [" + + firstChar + + "] are [\\\\] and [\\" + + firstChar + + "]."; } throw location.createError(new IllegalArgumentException(message, lnvae)); } @@ -89,18 +92,18 @@ final class EnhancedPainlessLexer extends PainlessLexer { return true; } switch (lastToken.getType()) { - case PainlessLexer.RBRACE: - case PainlessLexer.RP: - case PainlessLexer.OCTAL: - case PainlessLexer.HEX: - case PainlessLexer.INTEGER: - case PainlessLexer.DECIMAL: - case PainlessLexer.ID: - case PainlessLexer.DOTINTEGER: - case PainlessLexer.DOTID: - return false; - default: - return true; + case PainlessLexer.RBRACE: + case PainlessLexer.RP: + case PainlessLexer.OCTAL: + case PainlessLexer.HEX: + case PainlessLexer.INTEGER: + case PainlessLexer.DECIMAL: + case PainlessLexer.ID: + case PainlessLexer.DOTINTEGER: + case PainlessLexer.DOTID: + return false; + default: + return true; } } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java index a4e7e3a1915..82b8bc939e9 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java @@ -32,387 +32,611 @@ */ package org.opensearch.painless.antlr; + import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.misc.*; -@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) +@SuppressWarnings({ "all", "warnings", "unchecked", "unused", "cast" }) abstract class PainlessLexer extends Lexer { - static { RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } - - protected static final DFA[] _decisionToDFA; - protected static final PredictionContextCache _sharedContextCache = - new PredictionContextCache(); - public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, - FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, - THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, - ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, - EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, - COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, - DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, - AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, - DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, PRIMITIVE=81, - DEF=82, ID=83, DOTINTEGER=84, DOTID=85; - public static final int AFTER_DOT = 1; - public static String[] modeNames = { - "DEFAULT_MODE", "AFTER_DOT" - }; - - public static final String[] ruleNames = { - "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", - "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", - "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", - "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", - "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", - "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", "REF", "ARROW", - "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", - "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", - "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "PRIMITIVE", - "DEF", "ID", "DOTINTEGER", "DOTID" - }; - - private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", - "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", - "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", - "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", - "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", - "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", - "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", - "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, - null, null, null, null, null, "'true'", "'false'", "'null'", null, "'def'" - }; - private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", - "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", - "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", - "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", - "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", - "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", - "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", - "NULL", "PRIMITIVE", "DEF", "ID", "DOTINTEGER", "DOTID" - }; - public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); - - /** - * @deprecated Use {@link #VOCABULARY} instead. - */ - @Deprecated - public static final String[] tokenNames; - static { - tokenNames = new String[_SYMBOLIC_NAMES.length]; - for (int i = 0; i < tokenNames.length; i++) { - tokenNames[i] = VOCABULARY.getLiteralName(i); - if (tokenNames[i] == null) { - tokenNames[i] = VOCABULARY.getSymbolicName(i); - } - - if (tokenNames[i] == null) { - tokenNames[i] = ""; - } + static { + RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } - } - @Override - @Deprecated - public String[] getTokenNames() { - return tokenNames; - } + protected static final DFA[] _decisionToDFA; + protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); + public static final int WS = 1, COMMENT = 2, LBRACK = 3, RBRACK = 4, LBRACE = 5, RBRACE = 6, LP = 7, RP = 8, DOT = 9, NSDOT = 10, + COMMA = 11, SEMICOLON = 12, IF = 13, IN = 14, ELSE = 15, WHILE = 16, DO = 17, FOR = 18, CONTINUE = 19, BREAK = 20, RETURN = 21, + NEW = 22, TRY = 23, CATCH = 24, THROW = 25, THIS = 26, INSTANCEOF = 27, BOOLNOT = 28, BWNOT = 29, MUL = 30, DIV = 31, REM = 32, + ADD = 33, SUB = 34, LSH = 35, RSH = 36, USH = 37, LT = 38, LTE = 39, GT = 40, GTE = 41, EQ = 42, EQR = 43, NE = 44, NER = 45, + BWAND = 46, XOR = 47, BWOR = 48, BOOLAND = 49, BOOLOR = 50, COND = 51, COLON = 52, ELVIS = 53, REF = 54, ARROW = 55, FIND = 56, + MATCH = 57, INCR = 58, DECR = 59, ASSIGN = 60, AADD = 61, ASUB = 62, AMUL = 63, ADIV = 64, AREM = 65, AAND = 66, AXOR = 67, AOR = + 68, ALSH = 69, ARSH = 70, AUSH = 71, OCTAL = 72, HEX = 73, INTEGER = 74, DECIMAL = 75, STRING = 76, REGEX = 77, TRUE = 78, + FALSE = 79, NULL = 80, PRIMITIVE = 81, DEF = 82, ID = 83, DOTINTEGER = 84, DOTID = 85; + public static final int AFTER_DOT = 1; + public static String[] modeNames = { "DEFAULT_MODE", "AFTER_DOT" }; - @Override + public static final String[] ruleNames = { + "WS", + "COMMENT", + "LBRACK", + "RBRACK", + "LBRACE", + "RBRACE", + "LP", + "RP", + "DOT", + "NSDOT", + "COMMA", + "SEMICOLON", + "IF", + "IN", + "ELSE", + "WHILE", + "DO", + "FOR", + "CONTINUE", + "BREAK", + "RETURN", + "NEW", + "TRY", + "CATCH", + "THROW", + "THIS", + "INSTANCEOF", + "BOOLNOT", + "BWNOT", + "MUL", + "DIV", + "REM", + "ADD", + "SUB", + "LSH", + "RSH", + "USH", + "LT", + "LTE", + "GT", + "GTE", + "EQ", + "EQR", + "NE", + "NER", + "BWAND", + "XOR", + "BWOR", + "BOOLAND", + "BOOLOR", + "COND", + "COLON", + "ELVIS", + "REF", + "ARROW", + "FIND", + "MATCH", + "INCR", + "DECR", + "ASSIGN", + "AADD", + "ASUB", + "AMUL", + "ADIV", + "AREM", + "AAND", + "AXOR", + "AOR", + "ALSH", + "ARSH", + "AUSH", + "OCTAL", + "HEX", + "INTEGER", + "DECIMAL", + "STRING", + "REGEX", + "TRUE", + "FALSE", + "NULL", + "PRIMITIVE", + "DEF", + "ID", + "DOTINTEGER", + "DOTID" }; - public Vocabulary getVocabulary() { - return VOCABULARY; - } + private static final String[] _LITERAL_NAMES = { + null, + null, + null, + "'{'", + "'}'", + "'['", + "']'", + "'('", + "')'", + "'.'", + "'?.'", + "','", + "';'", + "'if'", + "'in'", + "'else'", + "'while'", + "'do'", + "'for'", + "'continue'", + "'break'", + "'return'", + "'new'", + "'try'", + "'catch'", + "'throw'", + "'this'", + "'instanceof'", + "'!'", + "'~'", + "'*'", + "'/'", + "'%'", + "'+'", + "'-'", + "'<<'", + "'>>'", + "'>>>'", + "'<'", + "'<='", + "'>'", + "'>='", + "'=='", + "'==='", + "'!='", + "'!=='", + "'&'", + "'^'", + "'|'", + "'&&'", + "'||'", + "'?'", + "':'", + "'?:'", + "'::'", + "'->'", + "'=~'", + "'==~'", + "'++'", + "'--'", + "'='", + "'+='", + "'-='", + "'*='", + "'/='", + "'%='", + "'&='", + "'^='", + "'|='", + "'<<='", + "'>>='", + "'>>>='", + null, + null, + null, + null, + null, + null, + "'true'", + "'false'", + "'null'", + null, + "'def'" }; + private static final String[] _SYMBOLIC_NAMES = { + null, + "WS", + "COMMENT", + "LBRACK", + "RBRACK", + "LBRACE", + "RBRACE", + "LP", + "RP", + "DOT", + "NSDOT", + "COMMA", + "SEMICOLON", + "IF", + "IN", + "ELSE", + "WHILE", + "DO", + "FOR", + "CONTINUE", + "BREAK", + "RETURN", + "NEW", + "TRY", + "CATCH", + "THROW", + "THIS", + "INSTANCEOF", + "BOOLNOT", + "BWNOT", + "MUL", + "DIV", + "REM", + "ADD", + "SUB", + "LSH", + "RSH", + "USH", + "LT", + "LTE", + "GT", + "GTE", + "EQ", + "EQR", + "NE", + "NER", + "BWAND", + "XOR", + "BWOR", + "BOOLAND", + "BOOLOR", + "COND", + "COLON", + "ELVIS", + "REF", + "ARROW", + "FIND", + "MATCH", + "INCR", + "DECR", + "ASSIGN", + "AADD", + "ASUB", + "AMUL", + "ADIV", + "AREM", + "AAND", + "AXOR", + "AOR", + "ALSH", + "ARSH", + "AUSH", + "OCTAL", + "HEX", + "INTEGER", + "DECIMAL", + "STRING", + "REGEX", + "TRUE", + "FALSE", + "NULL", + "PRIMITIVE", + "DEF", + "ID", + "DOTINTEGER", + "DOTID" }; + public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); + /** + * @deprecated Use {@link #VOCABULARY} instead. + */ + @Deprecated + public static final String[] tokenNames; + static { + tokenNames = new String[_SYMBOLIC_NAMES.length]; + for (int i = 0; i < tokenNames.length; i++) { + tokenNames[i] = VOCABULARY.getLiteralName(i); + if (tokenNames[i] == null) { + tokenNames[i] = VOCABULARY.getSymbolicName(i); + } - /** Is the preceding {@code /} a the beginning of a regex (true) or a division (false). */ - protected abstract boolean isSlashRegex(); - - - public PainlessLexer(CharStream input) { - super(input); - _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); - } - - @Override - public String getGrammarFileName() { return "PainlessLexer.g4"; } - - @Override - public String[] getRuleNames() { return ruleNames; } - - @Override - public String getSerializedATN() { return _serializedATN; } - - @Override - public String[] getModeNames() { return modeNames; } - - @Override - public ATN getATN() { return _ATN; } - - @Override - public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { - switch (ruleIndex) { - case 30: - return DIV_sempred((RuleContext)_localctx, predIndex); - case 76: - return REGEX_sempred((RuleContext)_localctx, predIndex); + if (tokenNames[i] == null) { + tokenNames[i] = ""; + } + } } - return true; - } - private boolean DIV_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 0: - return isSlashRegex() == false ; - } - return true; - } - private boolean REGEX_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 1: - return isSlashRegex() ; - } - return true; - } - public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2W\u027a\b\1\b\1\4"+ - "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ - "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ - "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ - "\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+ - " \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t"+ - "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+ - "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+ - "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+ - "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\t"+ - "T\4U\tU\4V\tV\3\2\6\2\u00b0\n\2\r\2\16\2\u00b1\3\2\3\2\3\3\3\3\3\3\3\3"+ - "\7\3\u00ba\n\3\f\3\16\3\u00bd\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00c4\n\3\f"+ - "\3\16\3\u00c7\13\3\3\3\3\3\5\3\u00cb\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3"+ - "\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3"+ - "\f\3\f\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20"+ - "\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\24"+ - "\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25"+ - "\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\30"+ - "\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33"+ - "\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+ - "\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3\"\3\"\3#\3#\3$\3"+ - "$\3$\3%\3%\3%\3&\3&\3&\3&\3\'\3\'\3(\3(\3(\3)\3)\3*\3*\3*\3+\3+\3+\3,"+ - "\3,\3,\3,\3-\3-\3-\3.\3.\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\62"+ - "\3\63\3\63\3\63\3\64\3\64\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\3"+ - "8\38\39\39\39\3:\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3>\3>\3>\3?\3?\3?\3"+ - "@\3@\3@\3A\3A\3A\3B\3B\3B\3C\3C\3C\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3G\3"+ - "G\3G\3G\3H\3H\3H\3H\3H\3I\3I\6I\u01ba\nI\rI\16I\u01bb\3I\5I\u01bf\nI\3"+ - "J\3J\3J\6J\u01c4\nJ\rJ\16J\u01c5\3J\5J\u01c9\nJ\3K\3K\3K\7K\u01ce\nK\f"+ - "K\16K\u01d1\13K\5K\u01d3\nK\3K\5K\u01d6\nK\3L\3L\3L\7L\u01db\nL\fL\16"+ - "L\u01de\13L\5L\u01e0\nL\3L\3L\6L\u01e4\nL\rL\16L\u01e5\5L\u01e8\nL\3L"+ - "\3L\5L\u01ec\nL\3L\6L\u01ef\nL\rL\16L\u01f0\5L\u01f3\nL\3L\5L\u01f6\n"+ - "L\3M\3M\3M\3M\3M\3M\7M\u01fe\nM\fM\16M\u0201\13M\3M\3M\3M\3M\3M\3M\3M"+ - "\7M\u020a\nM\fM\16M\u020d\13M\3M\5M\u0210\nM\3N\3N\3N\3N\6N\u0216\nN\r"+ - "N\16N\u0217\3N\3N\7N\u021c\nN\fN\16N\u021f\13N\3N\3N\3O\3O\3O\3O\3O\3"+ - "P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3"+ - "R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3"+ - "R\3R\3R\5R\u0259\nR\3S\3S\3S\3S\3T\3T\7T\u0261\nT\fT\16T\u0264\13T\3U"+ - "\3U\3U\7U\u0269\nU\fU\16U\u026c\13U\5U\u026e\nU\3U\3U\3V\3V\7V\u0274\n"+ - "V\fV\16V\u0277\13V\3V\3V\7\u00bb\u00c5\u01ff\u020b\u0217\2W\4\3\6\4\b"+ - "\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21\"\22$"+ - "\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!B\"D#F"+ - "$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090I"+ - "\u0092J\u0094K\u0096L\u0098M\u009aN\u009cO\u009eP\u00a0Q\u00a2R\u00a4"+ - "S\u00a6T\u00a8U\u00aaV\u00acW\4\2\3\25\5\2\13\f\17\17\"\"\4\2\f\f\17\17"+ - "\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4"+ - "\2GGgg\4\2--//\6\2FFHHffhh\4\2$$^^\4\2))^^\3\2\f\f\4\2\f\f\61\61\t\2W"+ - "Weekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac|\u02a0\2\4\3\2\2\2\2\6\3\2\2\2"+ - "\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20\3\2\2\2\2\22\3"+ - "\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2"+ - "\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3\2\2"+ - "\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62\3\2\2\2\2\64\3\2"+ - "\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2"+ - "\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2"+ - "N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3"+ - "\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2"+ - "\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2"+ - "\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080"+ - "\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2"+ - "\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092"+ - "\3\2\2\2\2\u0094\3\2\2\2\2\u0096\3\2\2\2\2\u0098\3\2\2\2\2\u009a\3\2\2"+ - "\2\2\u009c\3\2\2\2\2\u009e\3\2\2\2\2\u00a0\3\2\2\2\2\u00a2\3\2\2\2\2\u00a4"+ - "\3\2\2\2\2\u00a6\3\2\2\2\2\u00a8\3\2\2\2\3\u00aa\3\2\2\2\3\u00ac\3\2\2"+ - "\2\4\u00af\3\2\2\2\6\u00ca\3\2\2\2\b\u00ce\3\2\2\2\n\u00d0\3\2\2\2\f\u00d2"+ - "\3\2\2\2\16\u00d4\3\2\2\2\20\u00d6\3\2\2\2\22\u00d8\3\2\2\2\24\u00da\3"+ - "\2\2\2\26\u00de\3\2\2\2\30\u00e3\3\2\2\2\32\u00e5\3\2\2\2\34\u00e7\3\2"+ - "\2\2\36\u00ea\3\2\2\2 \u00ed\3\2\2\2\"\u00f2\3\2\2\2$\u00f8\3\2\2\2&\u00fb"+ - "\3\2\2\2(\u00ff\3\2\2\2*\u0108\3\2\2\2,\u010e\3\2\2\2.\u0115\3\2\2\2\60"+ - "\u0119\3\2\2\2\62\u011d\3\2\2\2\64\u0123\3\2\2\2\66\u0129\3\2\2\28\u012e"+ - "\3\2\2\2:\u0139\3\2\2\2<\u013b\3\2\2\2>\u013d\3\2\2\2@\u013f\3\2\2\2B"+ - "\u0142\3\2\2\2D\u0144\3\2\2\2F\u0146\3\2\2\2H\u0148\3\2\2\2J\u014b\3\2"+ - "\2\2L\u014e\3\2\2\2N\u0152\3\2\2\2P\u0154\3\2\2\2R\u0157\3\2\2\2T\u0159"+ - "\3\2\2\2V\u015c\3\2\2\2X\u015f\3\2\2\2Z\u0163\3\2\2\2\\\u0166\3\2\2\2"+ - "^\u016a\3\2\2\2`\u016c\3\2\2\2b\u016e\3\2\2\2d\u0170\3\2\2\2f\u0173\3"+ - "\2\2\2h\u0176\3\2\2\2j\u0178\3\2\2\2l\u017a\3\2\2\2n\u017d\3\2\2\2p\u0180"+ - "\3\2\2\2r\u0183\3\2\2\2t\u0186\3\2\2\2v\u018a\3\2\2\2x\u018d\3\2\2\2z"+ - "\u0190\3\2\2\2|\u0192\3\2\2\2~\u0195\3\2\2\2\u0080\u0198\3\2\2\2\u0082"+ - "\u019b\3\2\2\2\u0084\u019e\3\2\2\2\u0086\u01a1\3\2\2\2\u0088\u01a4\3\2"+ - "\2\2\u008a\u01a7\3\2\2\2\u008c\u01aa\3\2\2\2\u008e\u01ae\3\2\2\2\u0090"+ - "\u01b2\3\2\2\2\u0092\u01b7\3\2\2\2\u0094\u01c0\3\2\2\2\u0096\u01d2\3\2"+ - "\2\2\u0098\u01df\3\2\2\2\u009a\u020f\3\2\2\2\u009c\u0211\3\2\2\2\u009e"+ - "\u0222\3\2\2\2\u00a0\u0227\3\2\2\2\u00a2\u022d\3\2\2\2\u00a4\u0258\3\2"+ - "\2\2\u00a6\u025a\3\2\2\2\u00a8\u025e\3\2\2\2\u00aa\u026d\3\2\2\2\u00ac"+ - "\u0271\3\2\2\2\u00ae\u00b0\t\2\2\2\u00af\u00ae\3\2\2\2\u00b0\u00b1\3\2"+ - "\2\2\u00b1\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3"+ - "\u00b4\b\2\2\2\u00b4\5\3\2\2\2\u00b5\u00b6\7\61\2\2\u00b6\u00b7\7\61\2"+ - "\2\u00b7\u00bb\3\2\2\2\u00b8\u00ba\13\2\2\2\u00b9\u00b8\3\2\2\2\u00ba"+ - "\u00bd\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bb\u00b9\3\2\2\2\u00bc\u00be\3\2"+ - "\2\2\u00bd\u00bb\3\2\2\2\u00be\u00cb\t\3\2\2\u00bf\u00c0\7\61\2\2\u00c0"+ - "\u00c1\7,\2\2\u00c1\u00c5\3\2\2\2\u00c2\u00c4\13\2\2\2\u00c3\u00c2\3\2"+ - "\2\2\u00c4\u00c7\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c6"+ - "\u00c8\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\7,\2\2\u00c9\u00cb\7\61"+ - "\2\2\u00ca\u00b5\3\2\2\2\u00ca\u00bf\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc"+ - "\u00cd\b\3\2\2\u00cd\7\3\2\2\2\u00ce\u00cf\7}\2\2\u00cf\t\3\2\2\2\u00d0"+ - "\u00d1\7\177\2\2\u00d1\13\3\2\2\2\u00d2\u00d3\7]\2\2\u00d3\r\3\2\2\2\u00d4"+ - "\u00d5\7_\2\2\u00d5\17\3\2\2\2\u00d6\u00d7\7*\2\2\u00d7\21\3\2\2\2\u00d8"+ - "\u00d9\7+\2\2\u00d9\23\3\2\2\2\u00da\u00db\7\60\2\2\u00db\u00dc\3\2\2"+ - "\2\u00dc\u00dd\b\n\3\2\u00dd\25\3\2\2\2\u00de\u00df\7A\2\2\u00df\u00e0"+ - "\7\60\2\2\u00e0\u00e1\3\2\2\2\u00e1\u00e2\b\13\3\2\u00e2\27\3\2\2\2\u00e3"+ - "\u00e4\7.\2\2\u00e4\31\3\2\2\2\u00e5\u00e6\7=\2\2\u00e6\33\3\2\2\2\u00e7"+ - "\u00e8\7k\2\2\u00e8\u00e9\7h\2\2\u00e9\35\3\2\2\2\u00ea\u00eb\7k\2\2\u00eb"+ - "\u00ec\7p\2\2\u00ec\37\3\2\2\2\u00ed\u00ee\7g\2\2\u00ee\u00ef\7n\2\2\u00ef"+ - "\u00f0\7u\2\2\u00f0\u00f1\7g\2\2\u00f1!\3\2\2\2\u00f2\u00f3\7y\2\2\u00f3"+ - "\u00f4\7j\2\2\u00f4\u00f5\7k\2\2\u00f5\u00f6\7n\2\2\u00f6\u00f7\7g\2\2"+ - "\u00f7#\3\2\2\2\u00f8\u00f9\7f\2\2\u00f9\u00fa\7q\2\2\u00fa%\3\2\2\2\u00fb"+ - "\u00fc\7h\2\2\u00fc\u00fd\7q\2\2\u00fd\u00fe\7t\2\2\u00fe\'\3\2\2\2\u00ff"+ - "\u0100\7e\2\2\u0100\u0101\7q\2\2\u0101\u0102\7p\2\2\u0102\u0103\7v\2\2"+ - "\u0103\u0104\7k\2\2\u0104\u0105\7p\2\2\u0105\u0106\7w\2\2\u0106\u0107"+ - "\7g\2\2\u0107)\3\2\2\2\u0108\u0109\7d\2\2\u0109\u010a\7t\2\2\u010a\u010b"+ - "\7g\2\2\u010b\u010c\7c\2\2\u010c\u010d\7m\2\2\u010d+\3\2\2\2\u010e\u010f"+ - "\7t\2\2\u010f\u0110\7g\2\2\u0110\u0111\7v\2\2\u0111\u0112\7w\2\2\u0112"+ - "\u0113\7t\2\2\u0113\u0114\7p\2\2\u0114-\3\2\2\2\u0115\u0116\7p\2\2\u0116"+ - "\u0117\7g\2\2\u0117\u0118\7y\2\2\u0118/\3\2\2\2\u0119\u011a\7v\2\2\u011a"+ - "\u011b\7t\2\2\u011b\u011c\7{\2\2\u011c\61\3\2\2\2\u011d\u011e\7e\2\2\u011e"+ - "\u011f\7c\2\2\u011f\u0120\7v\2\2\u0120\u0121\7e\2\2\u0121\u0122\7j\2\2"+ - "\u0122\63\3\2\2\2\u0123\u0124\7v\2\2\u0124\u0125\7j\2\2\u0125\u0126\7"+ - "t\2\2\u0126\u0127\7q\2\2\u0127\u0128\7y\2\2\u0128\65\3\2\2\2\u0129\u012a"+ - "\7v\2\2\u012a\u012b\7j\2\2\u012b\u012c\7k\2\2\u012c\u012d\7u\2\2\u012d"+ - "\67\3\2\2\2\u012e\u012f\7k\2\2\u012f\u0130\7p\2\2\u0130\u0131\7u\2\2\u0131"+ - "\u0132\7v\2\2\u0132\u0133\7c\2\2\u0133\u0134\7p\2\2\u0134\u0135\7e\2\2"+ - "\u0135\u0136\7g\2\2\u0136\u0137\7q\2\2\u0137\u0138\7h\2\2\u01389\3\2\2"+ - "\2\u0139\u013a\7#\2\2\u013a;\3\2\2\2\u013b\u013c\7\u0080\2\2\u013c=\3"+ - "\2\2\2\u013d\u013e\7,\2\2\u013e?\3\2\2\2\u013f\u0140\7\61\2\2\u0140\u0141"+ - "\6 \2\2\u0141A\3\2\2\2\u0142\u0143\7\'\2\2\u0143C\3\2\2\2\u0144\u0145"+ - "\7-\2\2\u0145E\3\2\2\2\u0146\u0147\7/\2\2\u0147G\3\2\2\2\u0148\u0149\7"+ - ">\2\2\u0149\u014a\7>\2\2\u014aI\3\2\2\2\u014b\u014c\7@\2\2\u014c\u014d"+ - "\7@\2\2\u014dK\3\2\2\2\u014e\u014f\7@\2\2\u014f\u0150\7@\2\2\u0150\u0151"+ - "\7@\2\2\u0151M\3\2\2\2\u0152\u0153\7>\2\2\u0153O\3\2\2\2\u0154\u0155\7"+ - ">\2\2\u0155\u0156\7?\2\2\u0156Q\3\2\2\2\u0157\u0158\7@\2\2\u0158S\3\2"+ - "\2\2\u0159\u015a\7@\2\2\u015a\u015b\7?\2\2\u015bU\3\2\2\2\u015c\u015d"+ - "\7?\2\2\u015d\u015e\7?\2\2\u015eW\3\2\2\2\u015f\u0160\7?\2\2\u0160\u0161"+ - "\7?\2\2\u0161\u0162\7?\2\2\u0162Y\3\2\2\2\u0163\u0164\7#\2\2\u0164\u0165"+ - "\7?\2\2\u0165[\3\2\2\2\u0166\u0167\7#\2\2\u0167\u0168\7?\2\2\u0168\u0169"+ - "\7?\2\2\u0169]\3\2\2\2\u016a\u016b\7(\2\2\u016b_\3\2\2\2\u016c\u016d\7"+ - "`\2\2\u016da\3\2\2\2\u016e\u016f\7~\2\2\u016fc\3\2\2\2\u0170\u0171\7("+ - "\2\2\u0171\u0172\7(\2\2\u0172e\3\2\2\2\u0173\u0174\7~\2\2\u0174\u0175"+ - "\7~\2\2\u0175g\3\2\2\2\u0176\u0177\7A\2\2\u0177i\3\2\2\2\u0178\u0179\7"+ - "<\2\2\u0179k\3\2\2\2\u017a\u017b\7A\2\2\u017b\u017c\7<\2\2\u017cm\3\2"+ - "\2\2\u017d\u017e\7<\2\2\u017e\u017f\7<\2\2\u017fo\3\2\2\2\u0180\u0181"+ - "\7/\2\2\u0181\u0182\7@\2\2\u0182q\3\2\2\2\u0183\u0184\7?\2\2\u0184\u0185"+ - "\7\u0080\2\2\u0185s\3\2\2\2\u0186\u0187\7?\2\2\u0187\u0188\7?\2\2\u0188"+ - "\u0189\7\u0080\2\2\u0189u\3\2\2\2\u018a\u018b\7-\2\2\u018b\u018c\7-\2"+ - "\2\u018cw\3\2\2\2\u018d\u018e\7/\2\2\u018e\u018f\7/\2\2\u018fy\3\2\2\2"+ - "\u0190\u0191\7?\2\2\u0191{\3\2\2\2\u0192\u0193\7-\2\2\u0193\u0194\7?\2"+ - "\2\u0194}\3\2\2\2\u0195\u0196\7/\2\2\u0196\u0197\7?\2\2\u0197\177\3\2"+ - "\2\2\u0198\u0199\7,\2\2\u0199\u019a\7?\2\2\u019a\u0081\3\2\2\2\u019b\u019c"+ - "\7\61\2\2\u019c\u019d\7?\2\2\u019d\u0083\3\2\2\2\u019e\u019f\7\'\2\2\u019f"+ - "\u01a0\7?\2\2\u01a0\u0085\3\2\2\2\u01a1\u01a2\7(\2\2\u01a2\u01a3\7?\2"+ - "\2\u01a3\u0087\3\2\2\2\u01a4\u01a5\7`\2\2\u01a5\u01a6\7?\2\2\u01a6\u0089"+ - "\3\2\2\2\u01a7\u01a8\7~\2\2\u01a8\u01a9\7?\2\2\u01a9\u008b\3\2\2\2\u01aa"+ - "\u01ab\7>\2\2\u01ab\u01ac\7>\2\2\u01ac\u01ad\7?\2\2\u01ad\u008d\3\2\2"+ - "\2\u01ae\u01af\7@\2\2\u01af\u01b0\7@\2\2\u01b0\u01b1\7?\2\2\u01b1\u008f"+ - "\3\2\2\2\u01b2\u01b3\7@\2\2\u01b3\u01b4\7@\2\2\u01b4\u01b5\7@\2\2\u01b5"+ - "\u01b6\7?\2\2\u01b6\u0091\3\2\2\2\u01b7\u01b9\7\62\2\2\u01b8\u01ba\t\4"+ - "\2\2\u01b9\u01b8\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bb\u01b9\3\2\2\2\u01bb"+ - "\u01bc\3\2\2\2\u01bc\u01be\3\2\2\2\u01bd\u01bf\t\5\2\2\u01be\u01bd\3\2"+ - "\2\2\u01be\u01bf\3\2\2\2\u01bf\u0093\3\2\2\2\u01c0\u01c1\7\62\2\2\u01c1"+ - "\u01c3\t\6\2\2\u01c2\u01c4\t\7\2\2\u01c3\u01c2\3\2\2\2\u01c4\u01c5\3\2"+ - "\2\2\u01c5\u01c3\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6\u01c8\3\2\2\2\u01c7"+ - "\u01c9\t\5\2\2\u01c8\u01c7\3\2\2\2\u01c8\u01c9\3\2\2\2\u01c9\u0095\3\2"+ - "\2\2\u01ca\u01d3\7\62\2\2\u01cb\u01cf\t\b\2\2\u01cc\u01ce\t\t\2\2\u01cd"+ - "\u01cc\3\2\2\2\u01ce\u01d1\3\2\2\2\u01cf\u01cd\3\2\2\2\u01cf\u01d0\3\2"+ - "\2\2\u01d0\u01d3\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d2\u01ca\3\2\2\2\u01d2"+ - "\u01cb\3\2\2\2\u01d3\u01d5\3\2\2\2\u01d4\u01d6\t\n\2\2\u01d5\u01d4\3\2"+ - "\2\2\u01d5\u01d6\3\2\2\2\u01d6\u0097\3\2\2\2\u01d7\u01e0\7\62\2\2\u01d8"+ - "\u01dc\t\b\2\2\u01d9\u01db\t\t\2\2\u01da\u01d9\3\2\2\2\u01db\u01de\3\2"+ - "\2\2\u01dc\u01da\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u01e0\3\2\2\2\u01de"+ - "\u01dc\3\2\2\2\u01df\u01d7\3\2\2\2\u01df\u01d8\3\2\2\2\u01e0\u01e7\3\2"+ - "\2\2\u01e1\u01e3\5\24\n\2\u01e2\u01e4\t\t\2\2\u01e3\u01e2\3\2\2\2\u01e4"+ - "\u01e5\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e5\u01e6\3\2\2\2\u01e6\u01e8\3\2"+ - "\2\2\u01e7\u01e1\3\2\2\2\u01e7\u01e8\3\2\2\2\u01e8\u01f2\3\2\2\2\u01e9"+ - "\u01eb\t\13\2\2\u01ea\u01ec\t\f\2\2\u01eb\u01ea\3\2\2\2\u01eb\u01ec\3"+ - "\2\2\2\u01ec\u01ee\3\2\2\2\u01ed\u01ef\t\t\2\2\u01ee\u01ed\3\2\2\2\u01ef"+ - "\u01f0\3\2\2\2\u01f0\u01ee\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f3\3\2"+ - "\2\2\u01f2\u01e9\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f5\3\2\2\2\u01f4"+ - "\u01f6\t\r\2\2\u01f5\u01f4\3\2\2\2\u01f5\u01f6\3\2\2\2\u01f6\u0099\3\2"+ - "\2\2\u01f7\u01ff\7$\2\2\u01f8\u01f9\7^\2\2\u01f9\u01fe\7$\2\2\u01fa\u01fb"+ - "\7^\2\2\u01fb\u01fe\7^\2\2\u01fc\u01fe\n\16\2\2\u01fd\u01f8\3\2\2\2\u01fd"+ - "\u01fa\3\2\2\2\u01fd\u01fc\3\2\2\2\u01fe\u0201\3\2\2\2\u01ff\u0200\3\2"+ - "\2\2\u01ff\u01fd\3\2\2\2\u0200\u0202\3\2\2\2\u0201\u01ff\3\2\2\2\u0202"+ - "\u0210\7$\2\2\u0203\u020b\7)\2\2\u0204\u0205\7^\2\2\u0205\u020a\7)\2\2"+ - "\u0206\u0207\7^\2\2\u0207\u020a\7^\2\2\u0208\u020a\n\17\2\2\u0209\u0204"+ - "\3\2\2\2\u0209\u0206\3\2\2\2\u0209\u0208\3\2\2\2\u020a\u020d\3\2\2\2\u020b"+ - "\u020c\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u020e\3\2\2\2\u020d\u020b\3\2"+ - "\2\2\u020e\u0210\7)\2\2\u020f\u01f7\3\2\2\2\u020f\u0203\3\2\2\2\u0210"+ - "\u009b\3\2\2\2\u0211\u0215\7\61\2\2\u0212\u0213\7^\2\2\u0213\u0216\n\20"+ - "\2\2\u0214\u0216\n\21\2\2\u0215\u0212\3\2\2\2\u0215\u0214\3\2\2\2\u0216"+ - "\u0217\3\2\2\2\u0217\u0218\3\2\2\2\u0217\u0215\3\2\2\2\u0218\u0219\3\2"+ - "\2\2\u0219\u021d\7\61\2\2\u021a\u021c\t\22\2\2\u021b\u021a\3\2\2\2\u021c"+ - "\u021f\3\2\2\2\u021d\u021b\3\2\2\2\u021d\u021e\3\2\2\2\u021e\u0220\3\2"+ - "\2\2\u021f\u021d\3\2\2\2\u0220\u0221\6N\3\2\u0221\u009d\3\2\2\2\u0222"+ - "\u0223\7v\2\2\u0223\u0224\7t\2\2\u0224\u0225\7w\2\2\u0225\u0226\7g\2\2"+ - "\u0226\u009f\3\2\2\2\u0227\u0228\7h\2\2\u0228\u0229\7c\2\2\u0229\u022a"+ - "\7n\2\2\u022a\u022b\7u\2\2\u022b\u022c\7g\2\2\u022c\u00a1\3\2\2\2\u022d"+ - "\u022e\7p\2\2\u022e\u022f\7w\2\2\u022f\u0230\7n\2\2\u0230\u0231\7n\2\2"+ - "\u0231\u00a3\3\2\2\2\u0232\u0233\7d\2\2\u0233\u0234\7q\2\2\u0234\u0235"+ - "\7q\2\2\u0235\u0236\7n\2\2\u0236\u0237\7g\2\2\u0237\u0238\7c\2\2\u0238"+ - "\u0259\7p\2\2\u0239\u023a\7d\2\2\u023a\u023b\7{\2\2\u023b\u023c\7v\2\2"+ - "\u023c\u0259\7g\2\2\u023d\u023e\7u\2\2\u023e\u023f\7j\2\2\u023f\u0240"+ - "\7q\2\2\u0240\u0241\7t\2\2\u0241\u0259\7v\2\2\u0242\u0243\7e\2\2\u0243"+ - "\u0244\7j\2\2\u0244\u0245\7c\2\2\u0245\u0259\7t\2\2\u0246\u0247\7k\2\2"+ - "\u0247\u0248\7p\2\2\u0248\u0259\7v\2\2\u0249\u024a\7n\2\2\u024a\u024b"+ - "\7q\2\2\u024b\u024c\7p\2\2\u024c\u0259\7i\2\2\u024d\u024e\7h\2\2\u024e"+ - "\u024f\7n\2\2\u024f\u0250\7q\2\2\u0250\u0251\7c\2\2\u0251\u0259\7v\2\2"+ - "\u0252\u0253\7f\2\2\u0253\u0254\7q\2\2\u0254\u0255\7w\2\2\u0255\u0256"+ - "\7d\2\2\u0256\u0257\7n\2\2\u0257\u0259\7g\2\2\u0258\u0232\3\2\2\2\u0258"+ - "\u0239\3\2\2\2\u0258\u023d\3\2\2\2\u0258\u0242\3\2\2\2\u0258\u0246\3\2"+ - "\2\2\u0258\u0249\3\2\2\2\u0258\u024d\3\2\2\2\u0258\u0252\3\2\2\2\u0259"+ - "\u00a5\3\2\2\2\u025a\u025b\7f\2\2\u025b\u025c\7g\2\2\u025c\u025d\7h\2"+ - "\2\u025d\u00a7\3\2\2\2\u025e\u0262\t\23\2\2\u025f\u0261\t\24\2\2\u0260"+ - "\u025f\3\2\2\2\u0261\u0264\3\2\2\2\u0262\u0260\3\2\2\2\u0262\u0263\3\2"+ - "\2\2\u0263\u00a9\3\2\2\2\u0264\u0262\3\2\2\2\u0265\u026e\7\62\2\2\u0266"+ - "\u026a\t\b\2\2\u0267\u0269\t\t\2\2\u0268\u0267\3\2\2\2\u0269\u026c\3\2"+ - "\2\2\u026a\u0268\3\2\2\2\u026a\u026b\3\2\2\2\u026b\u026e\3\2\2\2\u026c"+ - "\u026a\3\2\2\2\u026d\u0265\3\2\2\2\u026d\u0266\3\2\2\2\u026e\u026f\3\2"+ - "\2\2\u026f\u0270\bU\4\2\u0270\u00ab\3\2\2\2\u0271\u0275\t\23\2\2\u0272"+ - "\u0274\t\24\2\2\u0273\u0272\3\2\2\2\u0274\u0277\3\2\2\2\u0275\u0273\3"+ - "\2\2\2\u0275\u0276\3\2\2\2\u0276\u0278\3\2\2\2\u0277\u0275\3\2\2\2\u0278"+ - "\u0279\bV\4\2\u0279\u00ad\3\2\2\2$\2\3\u00b1\u00bb\u00c5\u00ca\u01bb\u01be"+ - "\u01c5\u01c8\u01cf\u01d2\u01d5\u01dc\u01df\u01e5\u01e7\u01eb\u01f0\u01f2"+ - "\u01f5\u01fd\u01ff\u0209\u020b\u020f\u0215\u0217\u021d\u0258\u0262\u026a"+ - "\u026d\u0275\5\b\2\2\4\3\2\4\2\2"; - public static final ATN _ATN = - new ATNDeserializer().deserialize(_serializedATN.toCharArray()); - static { - _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; - for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { - _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + @Override + @Deprecated + public String[] getTokenNames() { + return tokenNames; + } + + @Override + + public Vocabulary getVocabulary() { + return VOCABULARY; + } + + /** Is the preceding {@code /} a the beginning of a regex (true) or a division (false). */ + protected abstract boolean isSlashRegex(); + + public PainlessLexer(CharStream input) { + super(input); + _interp = new LexerATNSimulator(this, _ATN, _decisionToDFA, _sharedContextCache); + } + + @Override + public String getGrammarFileName() { + return "PainlessLexer.g4"; + } + + @Override + public String[] getRuleNames() { + return ruleNames; + } + + @Override + public String getSerializedATN() { + return _serializedATN; + } + + @Override + public String[] getModeNames() { + return modeNames; + } + + @Override + public ATN getATN() { + return _ATN; + } + + @Override + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { + switch (ruleIndex) { + case 30: + return DIV_sempred((RuleContext) _localctx, predIndex); + case 76: + return REGEX_sempred((RuleContext) _localctx, predIndex); + } + return true; + } + + private boolean DIV_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 0: + return isSlashRegex() == false; + } + return true; + } + + private boolean REGEX_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 1: + return isSlashRegex(); + } + return true; + } + + public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2W\u027a\b\1\b\1\4" + + "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n" + + "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22" + + "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31" + + "\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t" + + " \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t" + + "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64" + + "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t" + + "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4" + + "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\t" + + "T\4U\tU\4V\tV\3\2\6\2\u00b0\n\2\r\2\16\2\u00b1\3\2\3\2\3\3\3\3\3\3\3\3" + + "\7\3\u00ba\n\3\f\3\16\3\u00bd\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00c4\n\3\f" + + "\3\16\3\u00c7\13\3\3\3\3\3\5\3\u00cb\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3" + + "\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3" + + "\f\3\f\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20" + + "\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\24" + + "\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25" + + "\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\30" + + "\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33" + + "\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34" + + "\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3\"\3\"\3#\3#\3$\3" + + "$\3$\3%\3%\3%\3&\3&\3&\3&\3\'\3\'\3(\3(\3(\3)\3)\3*\3*\3*\3+\3+\3+\3," + + "\3,\3,\3,\3-\3-\3-\3.\3.\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\62" + + "\3\63\3\63\3\63\3\64\3\64\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\3" + + "8\38\39\39\39\3:\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3>\3>\3>\3?\3?\3?\3" + + "@\3@\3@\3A\3A\3A\3B\3B\3B\3C\3C\3C\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3G\3" + + "G\3G\3G\3H\3H\3H\3H\3H\3I\3I\6I\u01ba\nI\rI\16I\u01bb\3I\5I\u01bf\nI\3" + + "J\3J\3J\6J\u01c4\nJ\rJ\16J\u01c5\3J\5J\u01c9\nJ\3K\3K\3K\7K\u01ce\nK\f" + + "K\16K\u01d1\13K\5K\u01d3\nK\3K\5K\u01d6\nK\3L\3L\3L\7L\u01db\nL\fL\16" + + "L\u01de\13L\5L\u01e0\nL\3L\3L\6L\u01e4\nL\rL\16L\u01e5\5L\u01e8\nL\3L" + + "\3L\5L\u01ec\nL\3L\6L\u01ef\nL\rL\16L\u01f0\5L\u01f3\nL\3L\5L\u01f6\n" + + "L\3M\3M\3M\3M\3M\3M\7M\u01fe\nM\fM\16M\u0201\13M\3M\3M\3M\3M\3M\3M\3M" + + "\7M\u020a\nM\fM\16M\u020d\13M\3M\5M\u0210\nM\3N\3N\3N\3N\6N\u0216\nN\r" + + "N\16N\u0217\3N\3N\7N\u021c\nN\fN\16N\u021f\13N\3N\3N\3O\3O\3O\3O\3O\3" + + "P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3" + + "R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3" + + "R\3R\3R\5R\u0259\nR\3S\3S\3S\3S\3T\3T\7T\u0261\nT\fT\16T\u0264\13T\3U" + + "\3U\3U\7U\u0269\nU\fU\16U\u026c\13U\5U\u026e\nU\3U\3U\3V\3V\7V\u0274\n" + + "V\fV\16V\u0277\13V\3V\3V\7\u00bb\u00c5\u01ff\u020b\u0217\2W\4\3\6\4\b" + + "\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21\"\22$" + + "\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!B\"D#F" + + "$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090I" + + "\u0092J\u0094K\u0096L\u0098M\u009aN\u009cO\u009eP\u00a0Q\u00a2R\u00a4" + + "S\u00a6T\u00a8U\u00aaV\u00acW\4\2\3\25\5\2\13\f\17\17\"\"\4\2\f\f\17\17" + + "\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4" + + "\2GGgg\4\2--//\6\2FFHHffhh\4\2$$^^\4\2))^^\3\2\f\f\4\2\f\f\61\61\t\2W" + + "Weekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac|\u02a0\2\4\3\2\2\2\2\6\3\2\2\2" + + "\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20\3\2\2\2\2\22\3" + + "\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2" + + "\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3\2\2" + + "\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62\3\2\2\2\2\64\3\2" + + "\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2" + + "\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2" + + "N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3" + + "\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2" + + "\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2" + + "\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080" + + "\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2" + + "\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092" + + "\3\2\2\2\2\u0094\3\2\2\2\2\u0096\3\2\2\2\2\u0098\3\2\2\2\2\u009a\3\2\2" + + "\2\2\u009c\3\2\2\2\2\u009e\3\2\2\2\2\u00a0\3\2\2\2\2\u00a2\3\2\2\2\2\u00a4" + + "\3\2\2\2\2\u00a6\3\2\2\2\2\u00a8\3\2\2\2\3\u00aa\3\2\2\2\3\u00ac\3\2\2" + + "\2\4\u00af\3\2\2\2\6\u00ca\3\2\2\2\b\u00ce\3\2\2\2\n\u00d0\3\2\2\2\f\u00d2" + + "\3\2\2\2\16\u00d4\3\2\2\2\20\u00d6\3\2\2\2\22\u00d8\3\2\2\2\24\u00da\3" + + "\2\2\2\26\u00de\3\2\2\2\30\u00e3\3\2\2\2\32\u00e5\3\2\2\2\34\u00e7\3\2" + + "\2\2\36\u00ea\3\2\2\2 \u00ed\3\2\2\2\"\u00f2\3\2\2\2$\u00f8\3\2\2\2&\u00fb" + + "\3\2\2\2(\u00ff\3\2\2\2*\u0108\3\2\2\2,\u010e\3\2\2\2.\u0115\3\2\2\2\60" + + "\u0119\3\2\2\2\62\u011d\3\2\2\2\64\u0123\3\2\2\2\66\u0129\3\2\2\28\u012e" + + "\3\2\2\2:\u0139\3\2\2\2<\u013b\3\2\2\2>\u013d\3\2\2\2@\u013f\3\2\2\2B" + + "\u0142\3\2\2\2D\u0144\3\2\2\2F\u0146\3\2\2\2H\u0148\3\2\2\2J\u014b\3\2" + + "\2\2L\u014e\3\2\2\2N\u0152\3\2\2\2P\u0154\3\2\2\2R\u0157\3\2\2\2T\u0159" + + "\3\2\2\2V\u015c\3\2\2\2X\u015f\3\2\2\2Z\u0163\3\2\2\2\\\u0166\3\2\2\2" + + "^\u016a\3\2\2\2`\u016c\3\2\2\2b\u016e\3\2\2\2d\u0170\3\2\2\2f\u0173\3" + + "\2\2\2h\u0176\3\2\2\2j\u0178\3\2\2\2l\u017a\3\2\2\2n\u017d\3\2\2\2p\u0180" + + "\3\2\2\2r\u0183\3\2\2\2t\u0186\3\2\2\2v\u018a\3\2\2\2x\u018d\3\2\2\2z" + + "\u0190\3\2\2\2|\u0192\3\2\2\2~\u0195\3\2\2\2\u0080\u0198\3\2\2\2\u0082" + + "\u019b\3\2\2\2\u0084\u019e\3\2\2\2\u0086\u01a1\3\2\2\2\u0088\u01a4\3\2" + + "\2\2\u008a\u01a7\3\2\2\2\u008c\u01aa\3\2\2\2\u008e\u01ae\3\2\2\2\u0090" + + "\u01b2\3\2\2\2\u0092\u01b7\3\2\2\2\u0094\u01c0\3\2\2\2\u0096\u01d2\3\2" + + "\2\2\u0098\u01df\3\2\2\2\u009a\u020f\3\2\2\2\u009c\u0211\3\2\2\2\u009e" + + "\u0222\3\2\2\2\u00a0\u0227\3\2\2\2\u00a2\u022d\3\2\2\2\u00a4\u0258\3\2" + + "\2\2\u00a6\u025a\3\2\2\2\u00a8\u025e\3\2\2\2\u00aa\u026d\3\2\2\2\u00ac" + + "\u0271\3\2\2\2\u00ae\u00b0\t\2\2\2\u00af\u00ae\3\2\2\2\u00b0\u00b1\3\2" + + "\2\2\u00b1\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3" + + "\u00b4\b\2\2\2\u00b4\5\3\2\2\2\u00b5\u00b6\7\61\2\2\u00b6\u00b7\7\61\2" + + "\2\u00b7\u00bb\3\2\2\2\u00b8\u00ba\13\2\2\2\u00b9\u00b8\3\2\2\2\u00ba" + + "\u00bd\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bb\u00b9\3\2\2\2\u00bc\u00be\3\2" + + "\2\2\u00bd\u00bb\3\2\2\2\u00be\u00cb\t\3\2\2\u00bf\u00c0\7\61\2\2\u00c0" + + "\u00c1\7,\2\2\u00c1\u00c5\3\2\2\2\u00c2\u00c4\13\2\2\2\u00c3\u00c2\3\2" + + "\2\2\u00c4\u00c7\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c6" + + "\u00c8\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\7,\2\2\u00c9\u00cb\7\61" + + "\2\2\u00ca\u00b5\3\2\2\2\u00ca\u00bf\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc" + + "\u00cd\b\3\2\2\u00cd\7\3\2\2\2\u00ce\u00cf\7}\2\2\u00cf\t\3\2\2\2\u00d0" + + "\u00d1\7\177\2\2\u00d1\13\3\2\2\2\u00d2\u00d3\7]\2\2\u00d3\r\3\2\2\2\u00d4" + + "\u00d5\7_\2\2\u00d5\17\3\2\2\2\u00d6\u00d7\7*\2\2\u00d7\21\3\2\2\2\u00d8" + + "\u00d9\7+\2\2\u00d9\23\3\2\2\2\u00da\u00db\7\60\2\2\u00db\u00dc\3\2\2" + + "\2\u00dc\u00dd\b\n\3\2\u00dd\25\3\2\2\2\u00de\u00df\7A\2\2\u00df\u00e0" + + "\7\60\2\2\u00e0\u00e1\3\2\2\2\u00e1\u00e2\b\13\3\2\u00e2\27\3\2\2\2\u00e3" + + "\u00e4\7.\2\2\u00e4\31\3\2\2\2\u00e5\u00e6\7=\2\2\u00e6\33\3\2\2\2\u00e7" + + "\u00e8\7k\2\2\u00e8\u00e9\7h\2\2\u00e9\35\3\2\2\2\u00ea\u00eb\7k\2\2\u00eb" + + "\u00ec\7p\2\2\u00ec\37\3\2\2\2\u00ed\u00ee\7g\2\2\u00ee\u00ef\7n\2\2\u00ef" + + "\u00f0\7u\2\2\u00f0\u00f1\7g\2\2\u00f1!\3\2\2\2\u00f2\u00f3\7y\2\2\u00f3" + + "\u00f4\7j\2\2\u00f4\u00f5\7k\2\2\u00f5\u00f6\7n\2\2\u00f6\u00f7\7g\2\2" + + "\u00f7#\3\2\2\2\u00f8\u00f9\7f\2\2\u00f9\u00fa\7q\2\2\u00fa%\3\2\2\2\u00fb" + + "\u00fc\7h\2\2\u00fc\u00fd\7q\2\2\u00fd\u00fe\7t\2\2\u00fe\'\3\2\2\2\u00ff" + + "\u0100\7e\2\2\u0100\u0101\7q\2\2\u0101\u0102\7p\2\2\u0102\u0103\7v\2\2" + + "\u0103\u0104\7k\2\2\u0104\u0105\7p\2\2\u0105\u0106\7w\2\2\u0106\u0107" + + "\7g\2\2\u0107)\3\2\2\2\u0108\u0109\7d\2\2\u0109\u010a\7t\2\2\u010a\u010b" + + "\7g\2\2\u010b\u010c\7c\2\2\u010c\u010d\7m\2\2\u010d+\3\2\2\2\u010e\u010f" + + "\7t\2\2\u010f\u0110\7g\2\2\u0110\u0111\7v\2\2\u0111\u0112\7w\2\2\u0112" + + "\u0113\7t\2\2\u0113\u0114\7p\2\2\u0114-\3\2\2\2\u0115\u0116\7p\2\2\u0116" + + "\u0117\7g\2\2\u0117\u0118\7y\2\2\u0118/\3\2\2\2\u0119\u011a\7v\2\2\u011a" + + "\u011b\7t\2\2\u011b\u011c\7{\2\2\u011c\61\3\2\2\2\u011d\u011e\7e\2\2\u011e" + + "\u011f\7c\2\2\u011f\u0120\7v\2\2\u0120\u0121\7e\2\2\u0121\u0122\7j\2\2" + + "\u0122\63\3\2\2\2\u0123\u0124\7v\2\2\u0124\u0125\7j\2\2\u0125\u0126\7" + + "t\2\2\u0126\u0127\7q\2\2\u0127\u0128\7y\2\2\u0128\65\3\2\2\2\u0129\u012a" + + "\7v\2\2\u012a\u012b\7j\2\2\u012b\u012c\7k\2\2\u012c\u012d\7u\2\2\u012d" + + "\67\3\2\2\2\u012e\u012f\7k\2\2\u012f\u0130\7p\2\2\u0130\u0131\7u\2\2\u0131" + + "\u0132\7v\2\2\u0132\u0133\7c\2\2\u0133\u0134\7p\2\2\u0134\u0135\7e\2\2" + + "\u0135\u0136\7g\2\2\u0136\u0137\7q\2\2\u0137\u0138\7h\2\2\u01389\3\2\2" + + "\2\u0139\u013a\7#\2\2\u013a;\3\2\2\2\u013b\u013c\7\u0080\2\2\u013c=\3" + + "\2\2\2\u013d\u013e\7,\2\2\u013e?\3\2\2\2\u013f\u0140\7\61\2\2\u0140\u0141" + + "\6 \2\2\u0141A\3\2\2\2\u0142\u0143\7\'\2\2\u0143C\3\2\2\2\u0144\u0145" + + "\7-\2\2\u0145E\3\2\2\2\u0146\u0147\7/\2\2\u0147G\3\2\2\2\u0148\u0149\7" + + ">\2\2\u0149\u014a\7>\2\2\u014aI\3\2\2\2\u014b\u014c\7@\2\2\u014c\u014d" + + "\7@\2\2\u014dK\3\2\2\2\u014e\u014f\7@\2\2\u014f\u0150\7@\2\2\u0150\u0151" + + "\7@\2\2\u0151M\3\2\2\2\u0152\u0153\7>\2\2\u0153O\3\2\2\2\u0154\u0155\7" + + ">\2\2\u0155\u0156\7?\2\2\u0156Q\3\2\2\2\u0157\u0158\7@\2\2\u0158S\3\2" + + "\2\2\u0159\u015a\7@\2\2\u015a\u015b\7?\2\2\u015bU\3\2\2\2\u015c\u015d" + + "\7?\2\2\u015d\u015e\7?\2\2\u015eW\3\2\2\2\u015f\u0160\7?\2\2\u0160\u0161" + + "\7?\2\2\u0161\u0162\7?\2\2\u0162Y\3\2\2\2\u0163\u0164\7#\2\2\u0164\u0165" + + "\7?\2\2\u0165[\3\2\2\2\u0166\u0167\7#\2\2\u0167\u0168\7?\2\2\u0168\u0169" + + "\7?\2\2\u0169]\3\2\2\2\u016a\u016b\7(\2\2\u016b_\3\2\2\2\u016c\u016d\7" + + "`\2\2\u016da\3\2\2\2\u016e\u016f\7~\2\2\u016fc\3\2\2\2\u0170\u0171\7(" + + "\2\2\u0171\u0172\7(\2\2\u0172e\3\2\2\2\u0173\u0174\7~\2\2\u0174\u0175" + + "\7~\2\2\u0175g\3\2\2\2\u0176\u0177\7A\2\2\u0177i\3\2\2\2\u0178\u0179\7" + + "<\2\2\u0179k\3\2\2\2\u017a\u017b\7A\2\2\u017b\u017c\7<\2\2\u017cm\3\2" + + "\2\2\u017d\u017e\7<\2\2\u017e\u017f\7<\2\2\u017fo\3\2\2\2\u0180\u0181" + + "\7/\2\2\u0181\u0182\7@\2\2\u0182q\3\2\2\2\u0183\u0184\7?\2\2\u0184\u0185" + + "\7\u0080\2\2\u0185s\3\2\2\2\u0186\u0187\7?\2\2\u0187\u0188\7?\2\2\u0188" + + "\u0189\7\u0080\2\2\u0189u\3\2\2\2\u018a\u018b\7-\2\2\u018b\u018c\7-\2" + + "\2\u018cw\3\2\2\2\u018d\u018e\7/\2\2\u018e\u018f\7/\2\2\u018fy\3\2\2\2" + + "\u0190\u0191\7?\2\2\u0191{\3\2\2\2\u0192\u0193\7-\2\2\u0193\u0194\7?\2" + + "\2\u0194}\3\2\2\2\u0195\u0196\7/\2\2\u0196\u0197\7?\2\2\u0197\177\3\2" + + "\2\2\u0198\u0199\7,\2\2\u0199\u019a\7?\2\2\u019a\u0081\3\2\2\2\u019b\u019c" + + "\7\61\2\2\u019c\u019d\7?\2\2\u019d\u0083\3\2\2\2\u019e\u019f\7\'\2\2\u019f" + + "\u01a0\7?\2\2\u01a0\u0085\3\2\2\2\u01a1\u01a2\7(\2\2\u01a2\u01a3\7?\2" + + "\2\u01a3\u0087\3\2\2\2\u01a4\u01a5\7`\2\2\u01a5\u01a6\7?\2\2\u01a6\u0089" + + "\3\2\2\2\u01a7\u01a8\7~\2\2\u01a8\u01a9\7?\2\2\u01a9\u008b\3\2\2\2\u01aa" + + "\u01ab\7>\2\2\u01ab\u01ac\7>\2\2\u01ac\u01ad\7?\2\2\u01ad\u008d\3\2\2" + + "\2\u01ae\u01af\7@\2\2\u01af\u01b0\7@\2\2\u01b0\u01b1\7?\2\2\u01b1\u008f" + + "\3\2\2\2\u01b2\u01b3\7@\2\2\u01b3\u01b4\7@\2\2\u01b4\u01b5\7@\2\2\u01b5" + + "\u01b6\7?\2\2\u01b6\u0091\3\2\2\2\u01b7\u01b9\7\62\2\2\u01b8\u01ba\t\4" + + "\2\2\u01b9\u01b8\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bb\u01b9\3\2\2\2\u01bb" + + "\u01bc\3\2\2\2\u01bc\u01be\3\2\2\2\u01bd\u01bf\t\5\2\2\u01be\u01bd\3\2" + + "\2\2\u01be\u01bf\3\2\2\2\u01bf\u0093\3\2\2\2\u01c0\u01c1\7\62\2\2\u01c1" + + "\u01c3\t\6\2\2\u01c2\u01c4\t\7\2\2\u01c3\u01c2\3\2\2\2\u01c4\u01c5\3\2" + + "\2\2\u01c5\u01c3\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6\u01c8\3\2\2\2\u01c7" + + "\u01c9\t\5\2\2\u01c8\u01c7\3\2\2\2\u01c8\u01c9\3\2\2\2\u01c9\u0095\3\2" + + "\2\2\u01ca\u01d3\7\62\2\2\u01cb\u01cf\t\b\2\2\u01cc\u01ce\t\t\2\2\u01cd" + + "\u01cc\3\2\2\2\u01ce\u01d1\3\2\2\2\u01cf\u01cd\3\2\2\2\u01cf\u01d0\3\2" + + "\2\2\u01d0\u01d3\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d2\u01ca\3\2\2\2\u01d2" + + "\u01cb\3\2\2\2\u01d3\u01d5\3\2\2\2\u01d4\u01d6\t\n\2\2\u01d5\u01d4\3\2" + + "\2\2\u01d5\u01d6\3\2\2\2\u01d6\u0097\3\2\2\2\u01d7\u01e0\7\62\2\2\u01d8" + + "\u01dc\t\b\2\2\u01d9\u01db\t\t\2\2\u01da\u01d9\3\2\2\2\u01db\u01de\3\2" + + "\2\2\u01dc\u01da\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u01e0\3\2\2\2\u01de" + + "\u01dc\3\2\2\2\u01df\u01d7\3\2\2\2\u01df\u01d8\3\2\2\2\u01e0\u01e7\3\2" + + "\2\2\u01e1\u01e3\5\24\n\2\u01e2\u01e4\t\t\2\2\u01e3\u01e2\3\2\2\2\u01e4" + + "\u01e5\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e5\u01e6\3\2\2\2\u01e6\u01e8\3\2" + + "\2\2\u01e7\u01e1\3\2\2\2\u01e7\u01e8\3\2\2\2\u01e8\u01f2\3\2\2\2\u01e9" + + "\u01eb\t\13\2\2\u01ea\u01ec\t\f\2\2\u01eb\u01ea\3\2\2\2\u01eb\u01ec\3" + + "\2\2\2\u01ec\u01ee\3\2\2\2\u01ed\u01ef\t\t\2\2\u01ee\u01ed\3\2\2\2\u01ef" + + "\u01f0\3\2\2\2\u01f0\u01ee\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f3\3\2" + + "\2\2\u01f2\u01e9\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f5\3\2\2\2\u01f4" + + "\u01f6\t\r\2\2\u01f5\u01f4\3\2\2\2\u01f5\u01f6\3\2\2\2\u01f6\u0099\3\2" + + "\2\2\u01f7\u01ff\7$\2\2\u01f8\u01f9\7^\2\2\u01f9\u01fe\7$\2\2\u01fa\u01fb" + + "\7^\2\2\u01fb\u01fe\7^\2\2\u01fc\u01fe\n\16\2\2\u01fd\u01f8\3\2\2\2\u01fd" + + "\u01fa\3\2\2\2\u01fd\u01fc\3\2\2\2\u01fe\u0201\3\2\2\2\u01ff\u0200\3\2" + + "\2\2\u01ff\u01fd\3\2\2\2\u0200\u0202\3\2\2\2\u0201\u01ff\3\2\2\2\u0202" + + "\u0210\7$\2\2\u0203\u020b\7)\2\2\u0204\u0205\7^\2\2\u0205\u020a\7)\2\2" + + "\u0206\u0207\7^\2\2\u0207\u020a\7^\2\2\u0208\u020a\n\17\2\2\u0209\u0204" + + "\3\2\2\2\u0209\u0206\3\2\2\2\u0209\u0208\3\2\2\2\u020a\u020d\3\2\2\2\u020b" + + "\u020c\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u020e\3\2\2\2\u020d\u020b\3\2" + + "\2\2\u020e\u0210\7)\2\2\u020f\u01f7\3\2\2\2\u020f\u0203\3\2\2\2\u0210" + + "\u009b\3\2\2\2\u0211\u0215\7\61\2\2\u0212\u0213\7^\2\2\u0213\u0216\n\20" + + "\2\2\u0214\u0216\n\21\2\2\u0215\u0212\3\2\2\2\u0215\u0214\3\2\2\2\u0216" + + "\u0217\3\2\2\2\u0217\u0218\3\2\2\2\u0217\u0215\3\2\2\2\u0218\u0219\3\2" + + "\2\2\u0219\u021d\7\61\2\2\u021a\u021c\t\22\2\2\u021b\u021a\3\2\2\2\u021c" + + "\u021f\3\2\2\2\u021d\u021b\3\2\2\2\u021d\u021e\3\2\2\2\u021e\u0220\3\2" + + "\2\2\u021f\u021d\3\2\2\2\u0220\u0221\6N\3\2\u0221\u009d\3\2\2\2\u0222" + + "\u0223\7v\2\2\u0223\u0224\7t\2\2\u0224\u0225\7w\2\2\u0225\u0226\7g\2\2" + + "\u0226\u009f\3\2\2\2\u0227\u0228\7h\2\2\u0228\u0229\7c\2\2\u0229\u022a" + + "\7n\2\2\u022a\u022b\7u\2\2\u022b\u022c\7g\2\2\u022c\u00a1\3\2\2\2\u022d" + + "\u022e\7p\2\2\u022e\u022f\7w\2\2\u022f\u0230\7n\2\2\u0230\u0231\7n\2\2" + + "\u0231\u00a3\3\2\2\2\u0232\u0233\7d\2\2\u0233\u0234\7q\2\2\u0234\u0235" + + "\7q\2\2\u0235\u0236\7n\2\2\u0236\u0237\7g\2\2\u0237\u0238\7c\2\2\u0238" + + "\u0259\7p\2\2\u0239\u023a\7d\2\2\u023a\u023b\7{\2\2\u023b\u023c\7v\2\2" + + "\u023c\u0259\7g\2\2\u023d\u023e\7u\2\2\u023e\u023f\7j\2\2\u023f\u0240" + + "\7q\2\2\u0240\u0241\7t\2\2\u0241\u0259\7v\2\2\u0242\u0243\7e\2\2\u0243" + + "\u0244\7j\2\2\u0244\u0245\7c\2\2\u0245\u0259\7t\2\2\u0246\u0247\7k\2\2" + + "\u0247\u0248\7p\2\2\u0248\u0259\7v\2\2\u0249\u024a\7n\2\2\u024a\u024b" + + "\7q\2\2\u024b\u024c\7p\2\2\u024c\u0259\7i\2\2\u024d\u024e\7h\2\2\u024e" + + "\u024f\7n\2\2\u024f\u0250\7q\2\2\u0250\u0251\7c\2\2\u0251\u0259\7v\2\2" + + "\u0252\u0253\7f\2\2\u0253\u0254\7q\2\2\u0254\u0255\7w\2\2\u0255\u0256" + + "\7d\2\2\u0256\u0257\7n\2\2\u0257\u0259\7g\2\2\u0258\u0232\3\2\2\2\u0258" + + "\u0239\3\2\2\2\u0258\u023d\3\2\2\2\u0258\u0242\3\2\2\2\u0258\u0246\3\2" + + "\2\2\u0258\u0249\3\2\2\2\u0258\u024d\3\2\2\2\u0258\u0252\3\2\2\2\u0259" + + "\u00a5\3\2\2\2\u025a\u025b\7f\2\2\u025b\u025c\7g\2\2\u025c\u025d\7h\2" + + "\2\u025d\u00a7\3\2\2\2\u025e\u0262\t\23\2\2\u025f\u0261\t\24\2\2\u0260" + + "\u025f\3\2\2\2\u0261\u0264\3\2\2\2\u0262\u0260\3\2\2\2\u0262\u0263\3\2" + + "\2\2\u0263\u00a9\3\2\2\2\u0264\u0262\3\2\2\2\u0265\u026e\7\62\2\2\u0266" + + "\u026a\t\b\2\2\u0267\u0269\t\t\2\2\u0268\u0267\3\2\2\2\u0269\u026c\3\2" + + "\2\2\u026a\u0268\3\2\2\2\u026a\u026b\3\2\2\2\u026b\u026e\3\2\2\2\u026c" + + "\u026a\3\2\2\2\u026d\u0265\3\2\2\2\u026d\u0266\3\2\2\2\u026e\u026f\3\2" + + "\2\2\u026f\u0270\bU\4\2\u0270\u00ab\3\2\2\2\u0271\u0275\t\23\2\2\u0272" + + "\u0274\t\24\2\2\u0273\u0272\3\2\2\2\u0274\u0277\3\2\2\2\u0275\u0273\3" + + "\2\2\2\u0275\u0276\3\2\2\2\u0276\u0278\3\2\2\2\u0277\u0275\3\2\2\2\u0278" + + "\u0279\bV\4\2\u0279\u00ad\3\2\2\2$\2\3\u00b1\u00bb\u00c5\u00ca\u01bb\u01be" + + "\u01c5\u01c8\u01cf\u01d2\u01d5\u01dc\u01df\u01e5\u01e7\u01eb\u01f0\u01f2" + + "\u01f5\u01fd\u01ff\u0209\u020b\u020f\u0215\u0217\u021d\u0258\u0262\u026a" + + "\u026d\u0275\5\b\2\2\4\3\2\4\2\2"; + public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); + static { + _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; + for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { + _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + } } - } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java index 67b27ba4d7f..1e064724c24 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java @@ -32,4390 +32,5510 @@ */ package org.opensearch.painless.antlr; + import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.misc.*; import org.antlr.v4.runtime.tree.*; import java.util.List; -import java.util.Iterator; -import java.util.ArrayList; -@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) +@SuppressWarnings({ "all", "warnings", "unchecked", "unused", "cast" }) class PainlessParser extends Parser { - static { RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } - - protected static final DFA[] _decisionToDFA; - protected static final PredictionContextCache _sharedContextCache = - new PredictionContextCache(); - public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, - FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, - THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, - ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, - EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, - COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, - DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, - AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, - DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, PRIMITIVE=81, - DEF=82, ID=83, DOTINTEGER=84, DOTID=85; - public static final int - RULE_source = 0, RULE_function = 1, RULE_parameters = 2, RULE_statement = 3, - RULE_rstatement = 4, RULE_dstatement = 5, RULE_trailer = 6, RULE_block = 7, - RULE_empty = 8, RULE_initializer = 9, RULE_afterthought = 10, RULE_declaration = 11, - RULE_decltype = 12, RULE_type = 13, RULE_declvar = 14, RULE_trap = 15, - RULE_noncondexpression = 16, RULE_expression = 17, RULE_unary = 18, RULE_unarynotaddsub = 19, - RULE_castexpression = 20, RULE_primordefcasttype = 21, RULE_refcasttype = 22, - RULE_chain = 23, RULE_primary = 24, RULE_postfix = 25, RULE_postdot = 26, - RULE_callinvoke = 27, RULE_fieldaccess = 28, RULE_braceaccess = 29, RULE_arrayinitializer = 30, - RULE_listinitializer = 31, RULE_mapinitializer = 32, RULE_maptoken = 33, - RULE_arguments = 34, RULE_argument = 35, RULE_lambda = 36, RULE_lamtype = 37, - RULE_funcref = 38; - public static final String[] ruleNames = { - "source", "function", "parameters", "statement", "rstatement", "dstatement", - "trailer", "block", "empty", "initializer", "afterthought", "declaration", - "decltype", "type", "declvar", "trap", "noncondexpression", "expression", - "unary", "unarynotaddsub", "castexpression", "primordefcasttype", "refcasttype", - "chain", "primary", "postfix", "postdot", "callinvoke", "fieldaccess", - "braceaccess", "arrayinitializer", "listinitializer", "mapinitializer", - "maptoken", "arguments", "argument", "lambda", "lamtype", "funcref" - }; - - private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", - "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", - "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", - "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", - "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", - "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", - "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", - "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, - null, null, null, null, null, "'true'", "'false'", "'null'", null, "'def'" - }; - private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", - "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", - "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", - "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", - "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", - "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", - "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", - "NULL", "PRIMITIVE", "DEF", "ID", "DOTINTEGER", "DOTID" - }; - public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); - - /** - * @deprecated Use {@link #VOCABULARY} instead. - */ - @Deprecated - public static final String[] tokenNames; - static { - tokenNames = new String[_SYMBOLIC_NAMES.length]; - for (int i = 0; i < tokenNames.length; i++) { - tokenNames[i] = VOCABULARY.getLiteralName(i); - if (tokenNames[i] == null) { - tokenNames[i] = VOCABULARY.getSymbolicName(i); - } - - if (tokenNames[i] == null) { - tokenNames[i] = ""; - } + static { + RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); } - } - @Override - @Deprecated - public String[] getTokenNames() { - return tokenNames; - } + protected static final DFA[] _decisionToDFA; + protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); + public static final int WS = 1, COMMENT = 2, LBRACK = 3, RBRACK = 4, LBRACE = 5, RBRACE = 6, LP = 7, RP = 8, DOT = 9, NSDOT = 10, + COMMA = 11, SEMICOLON = 12, IF = 13, IN = 14, ELSE = 15, WHILE = 16, DO = 17, FOR = 18, CONTINUE = 19, BREAK = 20, RETURN = 21, + NEW = 22, TRY = 23, CATCH = 24, THROW = 25, THIS = 26, INSTANCEOF = 27, BOOLNOT = 28, BWNOT = 29, MUL = 30, DIV = 31, REM = 32, + ADD = 33, SUB = 34, LSH = 35, RSH = 36, USH = 37, LT = 38, LTE = 39, GT = 40, GTE = 41, EQ = 42, EQR = 43, NE = 44, NER = 45, + BWAND = 46, XOR = 47, BWOR = 48, BOOLAND = 49, BOOLOR = 50, COND = 51, COLON = 52, ELVIS = 53, REF = 54, ARROW = 55, FIND = 56, + MATCH = 57, INCR = 58, DECR = 59, ASSIGN = 60, AADD = 61, ASUB = 62, AMUL = 63, ADIV = 64, AREM = 65, AAND = 66, AXOR = 67, AOR = + 68, ALSH = 69, ARSH = 70, AUSH = 71, OCTAL = 72, HEX = 73, INTEGER = 74, DECIMAL = 75, STRING = 76, REGEX = 77, TRUE = 78, + FALSE = 79, NULL = 80, PRIMITIVE = 81, DEF = 82, ID = 83, DOTINTEGER = 84, DOTID = 85; + public static final int RULE_source = 0, RULE_function = 1, RULE_parameters = 2, RULE_statement = 3, RULE_rstatement = 4, + RULE_dstatement = 5, RULE_trailer = 6, RULE_block = 7, RULE_empty = 8, RULE_initializer = 9, RULE_afterthought = 10, + RULE_declaration = 11, RULE_decltype = 12, RULE_type = 13, RULE_declvar = 14, RULE_trap = 15, RULE_noncondexpression = 16, + RULE_expression = 17, RULE_unary = 18, RULE_unarynotaddsub = 19, RULE_castexpression = 20, RULE_primordefcasttype = 21, + RULE_refcasttype = 22, RULE_chain = 23, RULE_primary = 24, RULE_postfix = 25, RULE_postdot = 26, RULE_callinvoke = 27, + RULE_fieldaccess = 28, RULE_braceaccess = 29, RULE_arrayinitializer = 30, RULE_listinitializer = 31, RULE_mapinitializer = 32, + RULE_maptoken = 33, RULE_arguments = 34, RULE_argument = 35, RULE_lambda = 36, RULE_lamtype = 37, RULE_funcref = 38; + public static final String[] ruleNames = { + "source", + "function", + "parameters", + "statement", + "rstatement", + "dstatement", + "trailer", + "block", + "empty", + "initializer", + "afterthought", + "declaration", + "decltype", + "type", + "declvar", + "trap", + "noncondexpression", + "expression", + "unary", + "unarynotaddsub", + "castexpression", + "primordefcasttype", + "refcasttype", + "chain", + "primary", + "postfix", + "postdot", + "callinvoke", + "fieldaccess", + "braceaccess", + "arrayinitializer", + "listinitializer", + "mapinitializer", + "maptoken", + "arguments", + "argument", + "lambda", + "lamtype", + "funcref" }; - @Override + private static final String[] _LITERAL_NAMES = { + null, + null, + null, + "'{'", + "'}'", + "'['", + "']'", + "'('", + "')'", + "'.'", + "'?.'", + "','", + "';'", + "'if'", + "'in'", + "'else'", + "'while'", + "'do'", + "'for'", + "'continue'", + "'break'", + "'return'", + "'new'", + "'try'", + "'catch'", + "'throw'", + "'this'", + "'instanceof'", + "'!'", + "'~'", + "'*'", + "'/'", + "'%'", + "'+'", + "'-'", + "'<<'", + "'>>'", + "'>>>'", + "'<'", + "'<='", + "'>'", + "'>='", + "'=='", + "'==='", + "'!='", + "'!=='", + "'&'", + "'^'", + "'|'", + "'&&'", + "'||'", + "'?'", + "':'", + "'?:'", + "'::'", + "'->'", + "'=~'", + "'==~'", + "'++'", + "'--'", + "'='", + "'+='", + "'-='", + "'*='", + "'/='", + "'%='", + "'&='", + "'^='", + "'|='", + "'<<='", + "'>>='", + "'>>>='", + null, + null, + null, + null, + null, + null, + "'true'", + "'false'", + "'null'", + null, + "'def'" }; + private static final String[] _SYMBOLIC_NAMES = { + null, + "WS", + "COMMENT", + "LBRACK", + "RBRACK", + "LBRACE", + "RBRACE", + "LP", + "RP", + "DOT", + "NSDOT", + "COMMA", + "SEMICOLON", + "IF", + "IN", + "ELSE", + "WHILE", + "DO", + "FOR", + "CONTINUE", + "BREAK", + "RETURN", + "NEW", + "TRY", + "CATCH", + "THROW", + "THIS", + "INSTANCEOF", + "BOOLNOT", + "BWNOT", + "MUL", + "DIV", + "REM", + "ADD", + "SUB", + "LSH", + "RSH", + "USH", + "LT", + "LTE", + "GT", + "GTE", + "EQ", + "EQR", + "NE", + "NER", + "BWAND", + "XOR", + "BWOR", + "BOOLAND", + "BOOLOR", + "COND", + "COLON", + "ELVIS", + "REF", + "ARROW", + "FIND", + "MATCH", + "INCR", + "DECR", + "ASSIGN", + "AADD", + "ASUB", + "AMUL", + "ADIV", + "AREM", + "AAND", + "AXOR", + "AOR", + "ALSH", + "ARSH", + "AUSH", + "OCTAL", + "HEX", + "INTEGER", + "DECIMAL", + "STRING", + "REGEX", + "TRUE", + "FALSE", + "NULL", + "PRIMITIVE", + "DEF", + "ID", + "DOTINTEGER", + "DOTID" }; + public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); - public Vocabulary getVocabulary() { - return VOCABULARY; - } + /** + * @deprecated Use {@link #VOCABULARY} instead. + */ + @Deprecated + public static final String[] tokenNames; + static { + tokenNames = new String[_SYMBOLIC_NAMES.length]; + for (int i = 0; i < tokenNames.length; i++) { + tokenNames[i] = VOCABULARY.getLiteralName(i); + if (tokenNames[i] == null) { + tokenNames[i] = VOCABULARY.getSymbolicName(i); + } - @Override - public String getGrammarFileName() { return "PainlessParser.g4"; } - - @Override - public String[] getRuleNames() { return ruleNames; } - - @Override - public String getSerializedATN() { return _serializedATN; } - - @Override - public ATN getATN() { return _ATN; } - - public PainlessParser(TokenStream input) { - super(input); - _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); - } - public static class SourceContext extends ParserRuleContext { - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } - public List function() { - return getRuleContexts(FunctionContext.class); + if (tokenNames[i] == null) { + tokenNames[i] = ""; + } + } } - public FunctionContext function(int i) { - return getRuleContext(FunctionContext.class,i); - } - public List statement() { - return getRuleContexts(StatementContext.class); - } - public StatementContext statement(int i) { - return getRuleContext(StatementContext.class,i); - } - public SourceContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_source; } + @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSource(this); - else return visitor.visitChildren(this); + @Deprecated + public String[] getTokenNames() { + return tokenNames; } - } - public final SourceContext source() throws RecognitionException { - SourceContext _localctx = new SourceContext(_ctx, getState()); - enterRule(_localctx, 0, RULE_source); - int _la; - try { - int _alt; - enterOuterAlt(_localctx, 1); - { - setState(81); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,0,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(78); - function(); - } - } - } - setState(83); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,0,_ctx); - } - setState(87); - _errHandler.sync(this); - _la = _input.LA(1); - while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { - { - { - setState(84); - statement(); - } - } - setState(89); - _errHandler.sync(this); - _la = _input.LA(1); - } - setState(90); - match(EOF); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class FunctionContext extends ParserRuleContext { - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public ParametersContext parameters() { - return getRuleContext(ParametersContext.class,0); - } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); - } - public FunctionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_function; } @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFunction(this); - else return visitor.visitChildren(this); - } - } - public final FunctionContext function() throws RecognitionException { - FunctionContext _localctx = new FunctionContext(_ctx, getState()); - enterRule(_localctx, 2, RULE_function); - try { - enterOuterAlt(_localctx, 1); - { - setState(92); - decltype(); - setState(93); - match(ID); - setState(94); - parameters(); - setState(95); - block(); - } + public Vocabulary getVocabulary() { + return VOCABULARY; } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class ParametersContext extends ParserRuleContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public List decltype() { - return getRuleContexts(DecltypeContext.class); - } - public DecltypeContext decltype(int i) { - return getRuleContext(DecltypeContext.class,i); - } - public List ID() { return getTokens(PainlessParser.ID); } - public TerminalNode ID(int i) { - return getToken(PainlessParser.ID, i); - } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public ParametersContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_parameters; } @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitParameters(this); - else return visitor.visitChildren(this); + public String getGrammarFileName() { + return "PainlessParser.g4"; } - } - public final ParametersContext parameters() throws RecognitionException { - ParametersContext _localctx = new ParametersContext(_ctx, getState()); - enterRule(_localctx, 4, RULE_parameters); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(97); - match(LP); - setState(109); - _la = _input.LA(1); - if (((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & ((1L << (PRIMITIVE - 81)) | (1L << (DEF - 81)) | (1L << (ID - 81)))) != 0)) { - { - setState(98); - decltype(); - setState(99); - match(ID); - setState(106); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(100); - match(COMMA); - setState(101); - decltype(); - setState(102); - match(ID); - } - } - setState(108); - _errHandler.sync(this); - _la = _input.LA(1); - } - } - } - - setState(111); - match(RP); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class StatementContext extends ParserRuleContext { - public RstatementContext rstatement() { - return getRuleContext(RstatementContext.class,0); - } - public DstatementContext dstatement() { - return getRuleContext(DstatementContext.class,0); - } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } - public StatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_statement; } @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitStatement(this); - else return visitor.visitChildren(this); + public String[] getRuleNames() { + return ruleNames; } - } - public final StatementContext statement() throws RecognitionException { - StatementContext _localctx = new StatementContext(_ctx, getState()); - enterRule(_localctx, 6, RULE_statement); - int _la; - try { - setState(117); - switch (_input.LA(1)) { - case IF: - case WHILE: - case FOR: - case TRY: - enterOuterAlt(_localctx, 1); - { - setState(113); - rstatement(); - } - break; - case LBRACE: - case LP: - case DO: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case REGEX: - case TRUE: - case FALSE: - case NULL: - case PRIMITIVE: - case DEF: - case ID: - enterOuterAlt(_localctx, 2); - { - setState(114); - dstatement(); - setState(115); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - break; - default: - throw new NoViableAltException(this); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class RstatementContext extends ParserRuleContext { - public RstatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_rstatement; } - - public RstatementContext() { } - public void copyFrom(RstatementContext ctx) { - super.copyFrom(ctx); - } - } - public static class ForContext extends RstatementContext { - public TerminalNode FOR() { return getToken(PainlessParser.FOR, 0); } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public List SEMICOLON() { return getTokens(PainlessParser.SEMICOLON); } - public TerminalNode SEMICOLON(int i) { - return getToken(PainlessParser.SEMICOLON, i); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public TrailerContext trailer() { - return getRuleContext(TrailerContext.class,0); - } - public EmptyContext empty() { - return getRuleContext(EmptyContext.class,0); - } - public InitializerContext initializer() { - return getRuleContext(InitializerContext.class,0); - } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public AfterthoughtContext afterthought() { - return getRuleContext(AfterthoughtContext.class,0); - } - public ForContext(RstatementContext ctx) { copyFrom(ctx); } @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFor(this); - else return visitor.visitChildren(this); + public String getSerializedATN() { + return _serializedATN; } - } - public static class TryContext extends RstatementContext { - public TerminalNode TRY() { return getToken(PainlessParser.TRY, 0); } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); - } - public List trap() { - return getRuleContexts(TrapContext.class); - } - public TrapContext trap(int i) { - return getRuleContext(TrapContext.class,i); - } - public TryContext(RstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTry(this); - else return visitor.visitChildren(this); - } - } - public static class WhileContext extends RstatementContext { - public TerminalNode WHILE() { return getToken(PainlessParser.WHILE, 0); } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public TrailerContext trailer() { - return getRuleContext(TrailerContext.class,0); - } - public EmptyContext empty() { - return getRuleContext(EmptyContext.class,0); - } - public WhileContext(RstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitWhile(this); - else return visitor.visitChildren(this); - } - } - public static class IneachContext extends RstatementContext { - public TerminalNode FOR() { return getToken(PainlessParser.FOR, 0); } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public TerminalNode IN() { return getToken(PainlessParser.IN, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public TrailerContext trailer() { - return getRuleContext(TrailerContext.class,0); - } - public IneachContext(RstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIneach(this); - else return visitor.visitChildren(this); - } - } - public static class IfContext extends RstatementContext { - public TerminalNode IF() { return getToken(PainlessParser.IF, 0); } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public List trailer() { - return getRuleContexts(TrailerContext.class); - } - public TrailerContext trailer(int i) { - return getRuleContext(TrailerContext.class,i); - } - public TerminalNode ELSE() { return getToken(PainlessParser.ELSE, 0); } - public IfContext(RstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIf(this); - else return visitor.visitChildren(this); - } - } - public static class EachContext extends RstatementContext { - public TerminalNode FOR() { return getToken(PainlessParser.FOR, 0); } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public TrailerContext trailer() { - return getRuleContext(TrailerContext.class,0); - } - public EachContext(RstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitEach(this); - else return visitor.visitChildren(this); - } - } - public final RstatementContext rstatement() throws RecognitionException { - RstatementContext _localctx = new RstatementContext(_ctx, getState()); - enterRule(_localctx, 8, RULE_rstatement); - int _la; - try { - int _alt; - setState(179); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { - case 1: - _localctx = new IfContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(119); - match(IF); - setState(120); - match(LP); - setState(121); - expression(); - setState(122); - match(RP); - setState(123); - trailer(); - setState(127); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { - case 1: - { - setState(124); - match(ELSE); - setState(125); - trailer(); - } - break; - case 2: - { - setState(126); - if (!( _input.LA(1) != ELSE )) throw new FailedPredicateException(this, " _input.LA(1) != ELSE "); - } - break; - } - } - break; - case 2: - _localctx = new WhileContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(129); - match(WHILE); - setState(130); - match(LP); - setState(131); - expression(); - setState(132); - match(RP); - setState(135); - switch (_input.LA(1)) { - case LBRACK: - case LBRACE: - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case REGEX: - case TRUE: - case FALSE: - case NULL: - case PRIMITIVE: - case DEF: - case ID: - { - setState(133); - trailer(); - } - break; - case SEMICOLON: - { - setState(134); - empty(); - } - break; - default: - throw new NoViableAltException(this); - } - } - break; - case 3: - _localctx = new ForContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(137); - match(FOR); - setState(138); - match(LP); - setState(140); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { - { - setState(139); - initializer(); - } + @Override + public ATN getATN() { + return _ATN; + } + + public PainlessParser(TokenStream input) { + super(input); + _interp = new ParserATNSimulator(this, _ATN, _decisionToDFA, _sharedContextCache); + } + + public static class SourceContext extends ParserRuleContext { + public TerminalNode EOF() { + return getToken(PainlessParser.EOF, 0); } - setState(142); - match(SEMICOLON); - setState(144); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { - { - setState(143); - expression(); - } + public List function() { + return getRuleContexts(FunctionContext.class); } - setState(146); - match(SEMICOLON); - setState(148); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { - { - setState(147); - afterthought(); - } + public FunctionContext function(int i) { + return getRuleContext(FunctionContext.class, i); } - setState(150); - match(RP); - setState(153); - switch (_input.LA(1)) { - case LBRACK: - case LBRACE: - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case REGEX: - case TRUE: - case FALSE: - case NULL: - case PRIMITIVE: - case DEF: - case ID: - { - setState(151); - trailer(); - } - break; - case SEMICOLON: - { - setState(152); - empty(); - } - break; - default: - throw new NoViableAltException(this); + public List statement() { + return getRuleContexts(StatementContext.class); } + + public StatementContext statement(int i) { + return getRuleContext(StatementContext.class, i); } - break; - case 4: - _localctx = new EachContext(_localctx); - enterOuterAlt(_localctx, 4); - { - setState(155); - match(FOR); - setState(156); - match(LP); - setState(157); - decltype(); - setState(158); - match(ID); - setState(159); - match(COLON); - setState(160); - expression(); - setState(161); - match(RP); - setState(162); - trailer(); + + public SourceContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); } - break; - case 5: - _localctx = new IneachContext(_localctx); - enterOuterAlt(_localctx, 5); - { - setState(164); - match(FOR); - setState(165); - match(LP); - setState(166); - match(ID); - setState(167); - match(IN); - setState(168); - expression(); - setState(169); - match(RP); - setState(170); - trailer(); + + @Override + public int getRuleIndex() { + return RULE_source; } - break; - case 6: - _localctx = new TryContext(_localctx); - enterOuterAlt(_localctx, 6); - { - setState(172); - match(TRY); - setState(173); - block(); - setState(175); - _errHandler.sync(this); - _alt = 1; - do { - switch (_alt) { - case 1: + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitSource(this); + else return visitor.visitChildren(this); + } + } + + public final SourceContext source() throws RecognitionException { + SourceContext _localctx = new SourceContext(_ctx, getState()); + enterRule(_localctx, 0, RULE_source); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); { + setState(81); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 0, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + { + { + setState(78); + function(); + } + } + } + setState(83); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 0, _ctx); + } + setState(87); + _errHandler.sync(this); + _la = _input.LA(1); + while ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L + << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L + << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { + { + { + setState(84); + statement(); + } + } + setState(89); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(90); + match(EOF); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class FunctionContext extends ParserRuleContext { + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class, 0); + } + + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public ParametersContext parameters() { + return getRuleContext(ParametersContext.class, 0); + } + + public BlockContext block() { + return getRuleContext(BlockContext.class, 0); + } + + public FunctionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_function; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitFunction(this); + else return visitor.visitChildren(this); + } + } + + public final FunctionContext function() throws RecognitionException { + FunctionContext _localctx = new FunctionContext(_ctx, getState()); + enterRule(_localctx, 2, RULE_function); + try { + enterOuterAlt(_localctx, 1); { - setState(174); - trap(); + setState(92); + decltype(); + setState(93); + match(ID); + setState(94); + parameters(); + setState(95); + block(); } - } - break; - default: - throw new NoViableAltException(this); - } - setState(177); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,11,_ctx); - } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); } - break; - } + return _localctx; } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class DstatementContext extends ParserRuleContext { - public DstatementContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_dstatement; } - - public DstatementContext() { } - public void copyFrom(DstatementContext ctx) { - super.copyFrom(ctx); - } - } - public static class DeclContext extends DstatementContext { - public DeclarationContext declaration() { - return getRuleContext(DeclarationContext.class,0); - } - public DeclContext(DstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDecl(this); - else return visitor.visitChildren(this); - } - } - public static class BreakContext extends DstatementContext { - public TerminalNode BREAK() { return getToken(PainlessParser.BREAK, 0); } - public BreakContext(DstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBreak(this); - else return visitor.visitChildren(this); - } - } - public static class ThrowContext extends DstatementContext { - public TerminalNode THROW() { return getToken(PainlessParser.THROW, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public ThrowContext(DstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitThrow(this); - else return visitor.visitChildren(this); - } - } - public static class ContinueContext extends DstatementContext { - public TerminalNode CONTINUE() { return getToken(PainlessParser.CONTINUE, 0); } - public ContinueContext(DstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitContinue(this); - else return visitor.visitChildren(this); - } - } - public static class ExprContext extends DstatementContext { - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public ExprContext(DstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExpr(this); - else return visitor.visitChildren(this); - } - } - public static class DoContext extends DstatementContext { - public TerminalNode DO() { return getToken(PainlessParser.DO, 0); } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); - } - public TerminalNode WHILE() { return getToken(PainlessParser.WHILE, 0); } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public DoContext(DstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDo(this); - else return visitor.visitChildren(this); - } - } - public static class ReturnContext extends DstatementContext { - public TerminalNode RETURN() { return getToken(PainlessParser.RETURN, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public ReturnContext(DstatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitReturn(this); - else return visitor.visitChildren(this); - } - } - - public final DstatementContext dstatement() throws RecognitionException { - DstatementContext _localctx = new DstatementContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_dstatement); - int _la; - try { - setState(198); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { - case 1: - _localctx = new DoContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(181); - match(DO); - setState(182); - block(); - setState(183); - match(WHILE); - setState(184); - match(LP); - setState(185); - expression(); - setState(186); - match(RP); - } - break; - case 2: - _localctx = new DeclContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(188); - declaration(); - } - break; - case 3: - _localctx = new ContinueContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(189); - match(CONTINUE); - } - break; - case 4: - _localctx = new BreakContext(_localctx); - enterOuterAlt(_localctx, 4); - { - setState(190); - match(BREAK); - } - break; - case 5: - _localctx = new ReturnContext(_localctx); - enterOuterAlt(_localctx, 5); - { - setState(191); - match(RETURN); - setState(193); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { - { - setState(192); - expression(); - } + public static class ParametersContext extends ParserRuleContext { + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); } + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); } - break; - case 6: - _localctx = new ThrowContext(_localctx); - enterOuterAlt(_localctx, 6); - { - setState(195); - match(THROW); - setState(196); - expression(); + + public List decltype() { + return getRuleContexts(DecltypeContext.class); } - break; - case 7: - _localctx = new ExprContext(_localctx); - enterOuterAlt(_localctx, 7); - { - setState(197); - expression(); + + public DecltypeContext decltype(int i) { + return getRuleContext(DecltypeContext.class, i); } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class TrailerContext extends ParserRuleContext { - public BlockContext block() { - return getRuleContext(BlockContext.class,0); - } - public StatementContext statement() { - return getRuleContext(StatementContext.class,0); - } - public TrailerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_trailer; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrailer(this); - else return visitor.visitChildren(this); - } - } - - public final TrailerContext trailer() throws RecognitionException { - TrailerContext _localctx = new TrailerContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_trailer); - try { - setState(202); - switch (_input.LA(1)) { - case LBRACK: - enterOuterAlt(_localctx, 1); - { - setState(200); - block(); + public List ID() { + return getTokens(PainlessParser.ID); } - break; - case LBRACE: - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case REGEX: - case TRUE: - case FALSE: - case NULL: - case PRIMITIVE: - case DEF: - case ID: - enterOuterAlt(_localctx, 2); - { - setState(201); - statement(); + + public TerminalNode ID(int i) { + return getToken(PainlessParser.ID, i); } - break; - default: - throw new NoViableAltException(this); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class BlockContext extends ParserRuleContext { - public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } - public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } - public List statement() { - return getRuleContexts(StatementContext.class); - } - public StatementContext statement(int i) { - return getRuleContext(StatementContext.class,i); - } - public DstatementContext dstatement() { - return getRuleContext(DstatementContext.class,0); - } - public BlockContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_block; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBlock(this); - else return visitor.visitChildren(this); - } - } - - public final BlockContext block() throws RecognitionException { - BlockContext _localctx = new BlockContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_block); - int _la; - try { - int _alt; - enterOuterAlt(_localctx, 1); - { - setState(204); - match(LBRACK); - setState(208); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,16,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(205); - statement(); - } - } + public List COMMA() { + return getTokens(PainlessParser.COMMA); } - setState(210); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,16,_ctx); - } - setState(212); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << DO) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { - { - setState(211); - dstatement(); + + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); } - } - setState(214); - match(RBRACK); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class EmptyContext extends ParserRuleContext { - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public EmptyContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_empty; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitEmpty(this); - else return visitor.visitChildren(this); - } - } - - public final EmptyContext empty() throws RecognitionException { - EmptyContext _localctx = new EmptyContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_empty); - try { - enterOuterAlt(_localctx, 1); - { - setState(216); - match(SEMICOLON); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class InitializerContext extends ParserRuleContext { - public DeclarationContext declaration() { - return getRuleContext(DeclarationContext.class,0); - } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public InitializerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_initializer; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitInitializer(this); - else return visitor.visitChildren(this); - } - } - - public final InitializerContext initializer() throws RecognitionException { - InitializerContext _localctx = new InitializerContext(_ctx, getState()); - enterRule(_localctx, 18, RULE_initializer); - try { - setState(220); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(218); - declaration(); + public ParametersContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(219); - expression(); + + @Override + public int getRuleIndex() { + return RULE_parameters; } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class AfterthoughtContext extends ParserRuleContext { - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public AfterthoughtContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_afterthought; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitAfterthought(this); - else return visitor.visitChildren(this); - } - } - - public final AfterthoughtContext afterthought() throws RecognitionException { - AfterthoughtContext _localctx = new AfterthoughtContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_afterthought); - try { - enterOuterAlt(_localctx, 1); - { - setState(222); - expression(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class DeclarationContext extends ParserRuleContext { - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public List declvar() { - return getRuleContexts(DeclvarContext.class); - } - public DeclvarContext declvar(int i) { - return getRuleContext(DeclvarContext.class,i); - } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public DeclarationContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_declaration; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDeclaration(this); - else return visitor.visitChildren(this); - } - } - - public final DeclarationContext declaration() throws RecognitionException { - DeclarationContext _localctx = new DeclarationContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_declaration); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(224); - decltype(); - setState(225); - declvar(); - setState(230); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(226); - match(COMMA); - setState(227); - declvar(); + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitParameters(this); + else return visitor.visitChildren(this); } - } - setState(232); - _errHandler.sync(this); - _la = _input.LA(1); - } - } } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class DecltypeContext extends ParserRuleContext { - public TypeContext type() { - return getRuleContext(TypeContext.class,0); - } - public List LBRACE() { return getTokens(PainlessParser.LBRACE); } - public TerminalNode LBRACE(int i) { - return getToken(PainlessParser.LBRACE, i); - } - public List RBRACE() { return getTokens(PainlessParser.RBRACE); } - public TerminalNode RBRACE(int i) { - return getToken(PainlessParser.RBRACE, i); - } - public DecltypeContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_decltype; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDecltype(this); - else return visitor.visitChildren(this); - } - } - - public final DecltypeContext decltype() throws RecognitionException { - DecltypeContext _localctx = new DecltypeContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_decltype); - try { - int _alt; - enterOuterAlt(_localctx, 1); - { - setState(233); - type(); - setState(238); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,20,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(234); - match(LBRACE); - setState(235); - match(RBRACE); - } - } - } - setState(240); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,20,_ctx); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class TypeContext extends ParserRuleContext { - public TerminalNode DEF() { return getToken(PainlessParser.DEF, 0); } - public TerminalNode PRIMITIVE() { return getToken(PainlessParser.PRIMITIVE, 0); } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public List DOT() { return getTokens(PainlessParser.DOT); } - public TerminalNode DOT(int i) { - return getToken(PainlessParser.DOT, i); - } - public List DOTID() { return getTokens(PainlessParser.DOTID); } - public TerminalNode DOTID(int i) { - return getToken(PainlessParser.DOTID, i); - } - public TypeContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_type; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitType(this); - else return visitor.visitChildren(this); - } - } - - public final TypeContext type() throws RecognitionException { - TypeContext _localctx = new TypeContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_type); - try { - int _alt; - setState(251); - switch (_input.LA(1)) { - case DEF: - enterOuterAlt(_localctx, 1); - { - setState(241); - match(DEF); - } - break; - case PRIMITIVE: - enterOuterAlt(_localctx, 2); - { - setState(242); - match(PRIMITIVE); - } - break; - case ID: - enterOuterAlt(_localctx, 3); - { - setState(243); - match(ID); - setState(248); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { + public final ParametersContext parameters() throws RecognitionException { + ParametersContext _localctx = new ParametersContext(_ctx, getState()); + enterRule(_localctx, 4, RULE_parameters); + int _la; + try { + enterOuterAlt(_localctx, 1); { - { - setState(244); - match(DOT); - setState(245); - match(DOTID); + setState(97); + match(LP); + setState(109); + _la = _input.LA(1); + if (((((_la - 81)) & ~0x3f) == 0 + && ((1L << (_la - 81)) & ((1L << (PRIMITIVE - 81)) | (1L << (DEF - 81)) | (1L << (ID - 81)))) != 0)) { + { + setState(98); + decltype(); + setState(99); + match(ID); + setState(106); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == COMMA) { + { + { + setState(100); + match(COMMA); + setState(101); + decltype(); + setState(102); + match(ID); + } + } + setState(108); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + + setState(111); + match(RP); } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class StatementContext extends ParserRuleContext { + public RstatementContext rstatement() { + return getRuleContext(RstatementContext.class, 0); + } + + public DstatementContext dstatement() { + return getRuleContext(DstatementContext.class, 0); + } + + public TerminalNode SEMICOLON() { + return getToken(PainlessParser.SEMICOLON, 0); + } + + public TerminalNode EOF() { + return getToken(PainlessParser.EOF, 0); + } + + public StatementContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_statement; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitStatement(this); + else return visitor.visitChildren(this); + } + } + + public final StatementContext statement() throws RecognitionException { + StatementContext _localctx = new StatementContext(_ctx, getState()); + enterRule(_localctx, 6, RULE_statement); + int _la; + try { + setState(117); + switch (_input.LA(1)) { + case IF: + case WHILE: + case FOR: + case TRY: + enterOuterAlt(_localctx, 1); { + setState(113); + rstatement(); + } + break; + case LBRACE: + case LP: + case DO: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case PRIMITIVE: + case DEF: + case ID: + enterOuterAlt(_localctx, 2); { + setState(114); + dstatement(); + setState(115); + _la = _input.LA(1); + if (!(_la == EOF || _la == SEMICOLON)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + break; + default: + throw new NoViableAltException(this); } - } - setState(250); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); } - } - break; - default: - throw new NoViableAltException(this); - } + return _localctx; } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class DeclvarContext extends ParserRuleContext { - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public TerminalNode ASSIGN() { return getToken(PainlessParser.ASSIGN, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public DeclvarContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_declvar; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDeclvar(this); - else return visitor.visitChildren(this); - } - } + public static class RstatementContext extends ParserRuleContext { + public RstatementContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } - public final DeclvarContext declvar() throws RecognitionException { - DeclvarContext _localctx = new DeclvarContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_declvar); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(253); - match(ID); - setState(256); - _la = _input.LA(1); - if (_la==ASSIGN) { - { - setState(254); - match(ASSIGN); - setState(255); - expression(); + @Override + public int getRuleIndex() { + return RULE_rstatement; } - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } + public RstatementContext() {} - public static class TrapContext extends ParserRuleContext { - public TerminalNode CATCH() { return getToken(PainlessParser.CATCH, 0); } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public TypeContext type() { - return getRuleContext(TypeContext.class,0); + public void copyFrom(RstatementContext ctx) { + super.copyFrom(ctx); + } } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); - } - public TrapContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_trap; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrap(this); - else return visitor.visitChildren(this); - } - } - public final TrapContext trap() throws RecognitionException { - TrapContext _localctx = new TrapContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_trap); - try { - enterOuterAlt(_localctx, 1); - { - setState(258); - match(CATCH); - setState(259); - match(LP); - setState(260); - type(); - setState(261); - match(ID); - setState(262); - match(RP); - setState(263); - block(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } + public static class ForContext extends RstatementContext { + public TerminalNode FOR() { + return getToken(PainlessParser.FOR, 0); + } - public static class NoncondexpressionContext extends ParserRuleContext { - public NoncondexpressionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_noncondexpression; } + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } - public NoncondexpressionContext() { } - public void copyFrom(NoncondexpressionContext ctx) { - super.copyFrom(ctx); - } - } - public static class SingleContext extends NoncondexpressionContext { - public UnaryContext unary() { - return getRuleContext(UnaryContext.class,0); - } - public SingleContext(NoncondexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSingle(this); - else return visitor.visitChildren(this); - } - } - public static class CompContext extends NoncondexpressionContext { - public List noncondexpression() { - return getRuleContexts(NoncondexpressionContext.class); - } - public NoncondexpressionContext noncondexpression(int i) { - return getRuleContext(NoncondexpressionContext.class,i); - } - public TerminalNode LT() { return getToken(PainlessParser.LT, 0); } - public TerminalNode LTE() { return getToken(PainlessParser.LTE, 0); } - public TerminalNode GT() { return getToken(PainlessParser.GT, 0); } - public TerminalNode GTE() { return getToken(PainlessParser.GTE, 0); } - public TerminalNode EQ() { return getToken(PainlessParser.EQ, 0); } - public TerminalNode EQR() { return getToken(PainlessParser.EQR, 0); } - public TerminalNode NE() { return getToken(PainlessParser.NE, 0); } - public TerminalNode NER() { return getToken(PainlessParser.NER, 0); } - public CompContext(NoncondexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitComp(this); - else return visitor.visitChildren(this); - } - } - public static class BoolContext extends NoncondexpressionContext { - public List noncondexpression() { - return getRuleContexts(NoncondexpressionContext.class); - } - public NoncondexpressionContext noncondexpression(int i) { - return getRuleContext(NoncondexpressionContext.class,i); - } - public TerminalNode BOOLAND() { return getToken(PainlessParser.BOOLAND, 0); } - public TerminalNode BOOLOR() { return getToken(PainlessParser.BOOLOR, 0); } - public BoolContext(NoncondexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBool(this); - else return visitor.visitChildren(this); - } - } - public static class BinaryContext extends NoncondexpressionContext { - public List noncondexpression() { - return getRuleContexts(NoncondexpressionContext.class); - } - public NoncondexpressionContext noncondexpression(int i) { - return getRuleContext(NoncondexpressionContext.class,i); - } - public TerminalNode MUL() { return getToken(PainlessParser.MUL, 0); } - public TerminalNode DIV() { return getToken(PainlessParser.DIV, 0); } - public TerminalNode REM() { return getToken(PainlessParser.REM, 0); } - public TerminalNode ADD() { return getToken(PainlessParser.ADD, 0); } - public TerminalNode SUB() { return getToken(PainlessParser.SUB, 0); } - public TerminalNode FIND() { return getToken(PainlessParser.FIND, 0); } - public TerminalNode MATCH() { return getToken(PainlessParser.MATCH, 0); } - public TerminalNode LSH() { return getToken(PainlessParser.LSH, 0); } - public TerminalNode RSH() { return getToken(PainlessParser.RSH, 0); } - public TerminalNode USH() { return getToken(PainlessParser.USH, 0); } - public TerminalNode BWAND() { return getToken(PainlessParser.BWAND, 0); } - public TerminalNode XOR() { return getToken(PainlessParser.XOR, 0); } - public TerminalNode BWOR() { return getToken(PainlessParser.BWOR, 0); } - public BinaryContext(NoncondexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBinary(this); - else return visitor.visitChildren(this); - } - } - public static class ElvisContext extends NoncondexpressionContext { - public List noncondexpression() { - return getRuleContexts(NoncondexpressionContext.class); - } - public NoncondexpressionContext noncondexpression(int i) { - return getRuleContext(NoncondexpressionContext.class,i); - } - public TerminalNode ELVIS() { return getToken(PainlessParser.ELVIS, 0); } - public ElvisContext(NoncondexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitElvis(this); - else return visitor.visitChildren(this); - } - } - public static class InstanceofContext extends NoncondexpressionContext { - public NoncondexpressionContext noncondexpression() { - return getRuleContext(NoncondexpressionContext.class,0); - } - public TerminalNode INSTANCEOF() { return getToken(PainlessParser.INSTANCEOF, 0); } - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public InstanceofContext(NoncondexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitInstanceof(this); - else return visitor.visitChildren(this); - } - } + public List SEMICOLON() { + return getTokens(PainlessParser.SEMICOLON); + } - public final NoncondexpressionContext noncondexpression() throws RecognitionException { - return noncondexpression(0); - } + public TerminalNode SEMICOLON(int i) { + return getToken(PainlessParser.SEMICOLON, i); + } - private NoncondexpressionContext noncondexpression(int _p) throws RecognitionException { - ParserRuleContext _parentctx = _ctx; - int _parentState = getState(); - NoncondexpressionContext _localctx = new NoncondexpressionContext(_ctx, _parentState); - NoncondexpressionContext _prevctx = _localctx; - int _startState = 32; - enterRecursionRule(_localctx, 32, RULE_noncondexpression, _p); - int _la; - try { - int _alt; - enterOuterAlt(_localctx, 1); - { - { - _localctx = new SingleContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } - setState(266); - unary(); - } - _ctx.stop = _input.LT(-1); - setState(309); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,25,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - if ( _parseListeners!=null ) triggerExitRuleEvent(); - _prevctx = _localctx; - { - setState(307); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { - case 1: - { - _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(268); - if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)"); - setState(269); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(270); - noncondexpression(14); - } - break; - case 2: - { - _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(271); - if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(272); - _la = _input.LA(1); - if ( !(_la==ADD || _la==SUB) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(273); - noncondexpression(13); - } - break; - case 3: - { - _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(274); - if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(275); - _la = _input.LA(1); - if ( !(_la==FIND || _la==MATCH) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(276); - noncondexpression(12); - } - break; - case 4: - { - _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(277); - if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(278); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(279); - noncondexpression(11); - } - break; - case 5: - { - _localctx = new CompContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(280); - if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(281); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(282); - noncondexpression(10); - } - break; - case 6: - { - _localctx = new CompContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(283); - if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(284); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(285); - noncondexpression(8); - } - break; - case 7: - { - _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(286); - if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); - setState(287); - match(BWAND); - setState(288); - noncondexpression(7); - } - break; - case 8: - { - _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(289); - if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(290); - match(XOR); - setState(291); - noncondexpression(6); - } - break; - case 9: - { - _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(292); - if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(293); - match(BWOR); - setState(294); - noncondexpression(5); - } - break; - case 10: - { - _localctx = new BoolContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(295); - if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(296); - match(BOOLAND); - setState(297); - noncondexpression(4); - } - break; - case 11: - { - _localctx = new BoolContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(298); - if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(299); - match(BOOLOR); - setState(300); - noncondexpression(3); - } - break; - case 12: - { - _localctx = new ElvisContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(301); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(302); - match(ELVIS); - setState(303); - noncondexpression(1); - } - break; - case 13: - { - _localctx = new InstanceofContext(new NoncondexpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); - setState(304); - if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(305); - match(INSTANCEOF); - setState(306); - decltype(); - } - break; - } - } + public TrailerContext trailer() { + return getRuleContext(TrailerContext.class, 0); } - setState(311); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,25,_ctx); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - unrollRecursionContexts(_parentctx); - } - return _localctx; - } - public static class ExpressionContext extends ParserRuleContext { - public ExpressionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_expression; } + public EmptyContext empty() { + return getRuleContext(EmptyContext.class, 0); + } - public ExpressionContext() { } - public void copyFrom(ExpressionContext ctx) { - super.copyFrom(ctx); - } - } - public static class ConditionalContext extends ExpressionContext { - public NoncondexpressionContext noncondexpression() { - return getRuleContext(NoncondexpressionContext.class,0); - } - public TerminalNode COND() { return getToken(PainlessParser.COND, 0); } - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } - public ConditionalContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitConditional(this); - else return visitor.visitChildren(this); - } - } - public static class AssignmentContext extends ExpressionContext { - public NoncondexpressionContext noncondexpression() { - return getRuleContext(NoncondexpressionContext.class,0); - } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode ASSIGN() { return getToken(PainlessParser.ASSIGN, 0); } - public TerminalNode AADD() { return getToken(PainlessParser.AADD, 0); } - public TerminalNode ASUB() { return getToken(PainlessParser.ASUB, 0); } - public TerminalNode AMUL() { return getToken(PainlessParser.AMUL, 0); } - public TerminalNode ADIV() { return getToken(PainlessParser.ADIV, 0); } - public TerminalNode AREM() { return getToken(PainlessParser.AREM, 0); } - public TerminalNode AAND() { return getToken(PainlessParser.AAND, 0); } - public TerminalNode AXOR() { return getToken(PainlessParser.AXOR, 0); } - public TerminalNode AOR() { return getToken(PainlessParser.AOR, 0); } - public TerminalNode ALSH() { return getToken(PainlessParser.ALSH, 0); } - public TerminalNode ARSH() { return getToken(PainlessParser.ARSH, 0); } - public TerminalNode AUSH() { return getToken(PainlessParser.AUSH, 0); } - public AssignmentContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitAssignment(this); - else return visitor.visitChildren(this); - } - } - public static class NonconditionalContext extends ExpressionContext { - public NoncondexpressionContext noncondexpression() { - return getRuleContext(NoncondexpressionContext.class,0); - } - public NonconditionalContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNonconditional(this); - else return visitor.visitChildren(this); - } - } + public InitializerContext initializer() { + return getRuleContext(InitializerContext.class, 0); + } - public final ExpressionContext expression() throws RecognitionException { - ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_expression); - int _la; - try { - setState(323); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { - case 1: - _localctx = new NonconditionalContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(312); - noncondexpression(0); + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); } - break; - case 2: - _localctx = new ConditionalContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(313); - noncondexpression(0); - setState(314); - match(COND); - setState(315); - expression(); - setState(316); - match(COLON); - setState(317); - expression(); - } - break; - case 3: - _localctx = new AssignmentContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(319); - noncondexpression(0); - setState(320); - _la = _input.LA(1); - if ( !(((((_la - 60)) & ~0x3f) == 0 && ((1L << (_la - 60)) & ((1L << (ASSIGN - 60)) | (1L << (AADD - 60)) | (1L << (ASUB - 60)) | (1L << (AMUL - 60)) | (1L << (ADIV - 60)) | (1L << (AREM - 60)) | (1L << (AAND - 60)) | (1L << (AXOR - 60)) | (1L << (AOR - 60)) | (1L << (ALSH - 60)) | (1L << (ARSH - 60)) | (1L << (AUSH - 60)))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(321); - expression(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class UnaryContext extends ParserRuleContext { - public UnaryContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_unary; } + public AfterthoughtContext afterthought() { + return getRuleContext(AfterthoughtContext.class, 0); + } - public UnaryContext() { } - public void copyFrom(UnaryContext ctx) { - super.copyFrom(ctx); - } - } - public static class NotaddsubContext extends UnaryContext { - public UnarynotaddsubContext unarynotaddsub() { - return getRuleContext(UnarynotaddsubContext.class,0); - } - public NotaddsubContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNotaddsub(this); - else return visitor.visitChildren(this); - } - } - public static class PreContext extends UnaryContext { - public ChainContext chain() { - return getRuleContext(ChainContext.class,0); - } - public TerminalNode INCR() { return getToken(PainlessParser.INCR, 0); } - public TerminalNode DECR() { return getToken(PainlessParser.DECR, 0); } - public PreContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPre(this); - else return visitor.visitChildren(this); - } - } - public static class AddsubContext extends UnaryContext { - public UnaryContext unary() { - return getRuleContext(UnaryContext.class,0); - } - public TerminalNode ADD() { return getToken(PainlessParser.ADD, 0); } - public TerminalNode SUB() { return getToken(PainlessParser.SUB, 0); } - public AddsubContext(UnaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitAddsub(this); - else return visitor.visitChildren(this); - } - } + public ForContext(RstatementContext ctx) { + copyFrom(ctx); + } - public final UnaryContext unary() throws RecognitionException { - UnaryContext _localctx = new UnaryContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_unary); - int _la; - try { - setState(330); - switch (_input.LA(1)) { - case INCR: - case DECR: - _localctx = new PreContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(325); - _la = _input.LA(1); - if ( !(_la==INCR || _la==DECR) ) { - _errHandler.recoverInline(this); - } else { - consume(); + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitFor(this); + else return visitor.visitChildren(this); } - setState(326); - chain(); - } - break; - case ADD: - case SUB: - _localctx = new AddsubContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(327); - _la = _input.LA(1); - if ( !(_la==ADD || _la==SUB) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(328); - unary(); - } - break; - case LBRACE: - case LP: - case NEW: - case BOOLNOT: - case BWNOT: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case REGEX: - case TRUE: - case FALSE: - case NULL: - case ID: - _localctx = new NotaddsubContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(329); - unarynotaddsub(); - } - break; - default: - throw new NoViableAltException(this); - } } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class UnarynotaddsubContext extends ParserRuleContext { - public UnarynotaddsubContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_unarynotaddsub; } + public static class TryContext extends RstatementContext { + public TerminalNode TRY() { + return getToken(PainlessParser.TRY, 0); + } - public UnarynotaddsubContext() { } - public void copyFrom(UnarynotaddsubContext ctx) { - super.copyFrom(ctx); - } - } - public static class CastContext extends UnarynotaddsubContext { - public CastexpressionContext castexpression() { - return getRuleContext(CastexpressionContext.class,0); - } - public CastContext(UnarynotaddsubContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCast(this); - else return visitor.visitChildren(this); - } - } - public static class NotContext extends UnarynotaddsubContext { - public UnaryContext unary() { - return getRuleContext(UnaryContext.class,0); - } - public TerminalNode BOOLNOT() { return getToken(PainlessParser.BOOLNOT, 0); } - public TerminalNode BWNOT() { return getToken(PainlessParser.BWNOT, 0); } - public NotContext(UnarynotaddsubContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNot(this); - else return visitor.visitChildren(this); - } - } - public static class ReadContext extends UnarynotaddsubContext { - public ChainContext chain() { - return getRuleContext(ChainContext.class,0); - } - public ReadContext(UnarynotaddsubContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitRead(this); - else return visitor.visitChildren(this); - } - } - public static class PostContext extends UnarynotaddsubContext { - public ChainContext chain() { - return getRuleContext(ChainContext.class,0); - } - public TerminalNode INCR() { return getToken(PainlessParser.INCR, 0); } - public TerminalNode DECR() { return getToken(PainlessParser.DECR, 0); } - public PostContext(UnarynotaddsubContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPost(this); - else return visitor.visitChildren(this); - } - } + public BlockContext block() { + return getRuleContext(BlockContext.class, 0); + } - public final UnarynotaddsubContext unarynotaddsub() throws RecognitionException { - UnarynotaddsubContext _localctx = new UnarynotaddsubContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_unarynotaddsub); - int _la; - try { - setState(339); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { - case 1: - _localctx = new ReadContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(332); - chain(); + public List trap() { + return getRuleContexts(TrapContext.class); } - break; - case 2: - _localctx = new PostContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(333); - chain(); - setState(334); - _la = _input.LA(1); - if ( !(_la==INCR || _la==DECR) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - break; - case 3: - _localctx = new NotContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(336); - _la = _input.LA(1); - if ( !(_la==BOOLNOT || _la==BWNOT) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(337); - unary(); - } - break; - case 4: - _localctx = new CastContext(_localctx); - enterOuterAlt(_localctx, 4); - { - setState(338); - castexpression(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class CastexpressionContext extends ParserRuleContext { - public CastexpressionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_castexpression; } + public TrapContext trap(int i) { + return getRuleContext(TrapContext.class, i); + } - public CastexpressionContext() { } - public void copyFrom(CastexpressionContext ctx) { - super.copyFrom(ctx); - } - } - public static class RefcastContext extends CastexpressionContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public RefcasttypeContext refcasttype() { - return getRuleContext(RefcasttypeContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public UnarynotaddsubContext unarynotaddsub() { - return getRuleContext(UnarynotaddsubContext.class,0); - } - public RefcastContext(CastexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitRefcast(this); - else return visitor.visitChildren(this); - } - } - public static class PrimordefcastContext extends CastexpressionContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public PrimordefcasttypeContext primordefcasttype() { - return getRuleContext(PrimordefcasttypeContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public UnaryContext unary() { - return getRuleContext(UnaryContext.class,0); - } - public PrimordefcastContext(CastexpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPrimordefcast(this); - else return visitor.visitChildren(this); - } - } + public TryContext(RstatementContext ctx) { + copyFrom(ctx); + } - public final CastexpressionContext castexpression() throws RecognitionException { - CastexpressionContext _localctx = new CastexpressionContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_castexpression); - try { - setState(351); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { - case 1: - _localctx = new PrimordefcastContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(341); - match(LP); - setState(342); - primordefcasttype(); - setState(343); - match(RP); - setState(344); - unary(); + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitTry(this); + else return visitor.visitChildren(this); } - break; - case 2: - _localctx = new RefcastContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(346); - match(LP); - setState(347); - refcasttype(); - setState(348); - match(RP); - setState(349); - unarynotaddsub(); - } - break; - } } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class PrimordefcasttypeContext extends ParserRuleContext { - public TerminalNode DEF() { return getToken(PainlessParser.DEF, 0); } - public TerminalNode PRIMITIVE() { return getToken(PainlessParser.PRIMITIVE, 0); } - public PrimordefcasttypeContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_primordefcasttype; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPrimordefcasttype(this); - else return visitor.visitChildren(this); - } - } + public static class WhileContext extends RstatementContext { + public TerminalNode WHILE() { + return getToken(PainlessParser.WHILE, 0); + } - public final PrimordefcasttypeContext primordefcasttype() throws RecognitionException { - PrimordefcasttypeContext _localctx = new PrimordefcasttypeContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_primordefcasttype); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(353); - _la = _input.LA(1); - if ( !(_la==PRIMITIVE || _la==DEF) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } - public static class RefcasttypeContext extends ParserRuleContext { - public TerminalNode DEF() { return getToken(PainlessParser.DEF, 0); } - public List LBRACE() { return getTokens(PainlessParser.LBRACE); } - public TerminalNode LBRACE(int i) { - return getToken(PainlessParser.LBRACE, i); - } - public List RBRACE() { return getTokens(PainlessParser.RBRACE); } - public TerminalNode RBRACE(int i) { - return getToken(PainlessParser.RBRACE, i); - } - public TerminalNode PRIMITIVE() { return getToken(PainlessParser.PRIMITIVE, 0); } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public List DOT() { return getTokens(PainlessParser.DOT); } - public TerminalNode DOT(int i) { - return getToken(PainlessParser.DOT, i); - } - public List DOTID() { return getTokens(PainlessParser.DOTID); } - public TerminalNode DOTID(int i) { - return getToken(PainlessParser.DOTID, i); - } - public RefcasttypeContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_refcasttype; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitRefcasttype(this); - else return visitor.visitChildren(this); - } - } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } - public final RefcasttypeContext refcasttype() throws RecognitionException { - RefcasttypeContext _localctx = new RefcasttypeContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_refcasttype); - int _la; - try { - setState(384); - switch (_input.LA(1)) { - case DEF: - enterOuterAlt(_localctx, 1); - { - setState(355); - match(DEF); - setState(358); - _errHandler.sync(this); - _la = _input.LA(1); - do { - { - { - setState(356); - match(LBRACE); - setState(357); - match(RBRACE); - } - } - setState(360); - _errHandler.sync(this); - _la = _input.LA(1); - } while ( _la==LBRACE ); + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); } - break; - case PRIMITIVE: - enterOuterAlt(_localctx, 2); - { - setState(362); - match(PRIMITIVE); - setState(365); - _errHandler.sync(this); - _la = _input.LA(1); - do { - { - { - setState(363); - match(LBRACE); - setState(364); - match(RBRACE); - } - } - setState(367); - _errHandler.sync(this); - _la = _input.LA(1); - } while ( _la==LBRACE ); - } - break; - case ID: - enterOuterAlt(_localctx, 3); - { - setState(369); - match(ID); - setState(374); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==DOT) { - { - { - setState(370); - match(DOT); - setState(371); - match(DOTID); - } - } - setState(376); - _errHandler.sync(this); - _la = _input.LA(1); - } - setState(381); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==LBRACE) { - { - { - setState(377); - match(LBRACE); - setState(378); - match(RBRACE); - } - } - setState(383); - _errHandler.sync(this); - _la = _input.LA(1); - } - } - break; - default: - throw new NoViableAltException(this); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class ChainContext extends ParserRuleContext { - public ChainContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_chain; } + public TrailerContext trailer() { + return getRuleContext(TrailerContext.class, 0); + } - public ChainContext() { } - public void copyFrom(ChainContext ctx) { - super.copyFrom(ctx); - } - } - public static class DynamicContext extends ChainContext { - public PrimaryContext primary() { - return getRuleContext(PrimaryContext.class,0); - } - public List postfix() { - return getRuleContexts(PostfixContext.class); - } - public PostfixContext postfix(int i) { - return getRuleContext(PostfixContext.class,i); - } - public DynamicContext(ChainContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDynamic(this); - else return visitor.visitChildren(this); - } - } - public static class NewarrayContext extends ChainContext { - public ArrayinitializerContext arrayinitializer() { - return getRuleContext(ArrayinitializerContext.class,0); - } - public NewarrayContext(ChainContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewarray(this); - else return visitor.visitChildren(this); - } - } + public EmptyContext empty() { + return getRuleContext(EmptyContext.class, 0); + } - public final ChainContext chain() throws RecognitionException { - ChainContext _localctx = new ChainContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_chain); - try { - int _alt; - setState(394); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { - case 1: - _localctx = new DynamicContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(386); - primary(); - setState(390); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(387); - postfix(); - } - } - } - setState(392); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + public WhileContext(RstatementContext ctx) { + copyFrom(ctx); } - } - break; - case 2: - _localctx = new NewarrayContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(393); - arrayinitializer(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class PrimaryContext extends ParserRuleContext { - public PrimaryContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitWhile(this); + else return visitor.visitChildren(this); + } } - @Override public int getRuleIndex() { return RULE_primary; } - public PrimaryContext() { } - public void copyFrom(PrimaryContext ctx) { - super.copyFrom(ctx); - } - } - public static class ListinitContext extends PrimaryContext { - public ListinitializerContext listinitializer() { - return getRuleContext(ListinitializerContext.class,0); - } - public ListinitContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitListinit(this); - else return visitor.visitChildren(this); - } - } - public static class RegexContext extends PrimaryContext { - public TerminalNode REGEX() { return getToken(PainlessParser.REGEX, 0); } - public RegexContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitRegex(this); - else return visitor.visitChildren(this); - } - } - public static class NullContext extends PrimaryContext { - public TerminalNode NULL() { return getToken(PainlessParser.NULL, 0); } - public NullContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNull(this); - else return visitor.visitChildren(this); - } - } - public static class StringContext extends PrimaryContext { - public TerminalNode STRING() { return getToken(PainlessParser.STRING, 0); } - public StringContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitString(this); - else return visitor.visitChildren(this); - } - } - public static class MapinitContext extends PrimaryContext { - public MapinitializerContext mapinitializer() { - return getRuleContext(MapinitializerContext.class,0); - } - public MapinitContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMapinit(this); - else return visitor.visitChildren(this); - } - } - public static class CalllocalContext extends PrimaryContext { - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public ArgumentsContext arguments() { - return getRuleContext(ArgumentsContext.class,0); - } - public CalllocalContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCalllocal(this); - else return visitor.visitChildren(this); - } - } - public static class TrueContext extends PrimaryContext { - public TerminalNode TRUE() { return getToken(PainlessParser.TRUE, 0); } - public TrueContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrue(this); - else return visitor.visitChildren(this); - } - } - public static class FalseContext extends PrimaryContext { - public TerminalNode FALSE() { return getToken(PainlessParser.FALSE, 0); } - public FalseContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFalse(this); - else return visitor.visitChildren(this); - } - } - public static class VariableContext extends PrimaryContext { - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public VariableContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitVariable(this); - else return visitor.visitChildren(this); - } - } - public static class NumericContext extends PrimaryContext { - public TerminalNode OCTAL() { return getToken(PainlessParser.OCTAL, 0); } - public TerminalNode HEX() { return getToken(PainlessParser.HEX, 0); } - public TerminalNode INTEGER() { return getToken(PainlessParser.INTEGER, 0); } - public TerminalNode DECIMAL() { return getToken(PainlessParser.DECIMAL, 0); } - public NumericContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNumeric(this); - else return visitor.visitChildren(this); - } - } - public static class NewobjectContext extends PrimaryContext { - public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public TypeContext type() { - return getRuleContext(TypeContext.class,0); - } - public ArgumentsContext arguments() { - return getRuleContext(ArgumentsContext.class,0); - } - public NewobjectContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewobject(this); - else return visitor.visitChildren(this); - } - } - public static class PrecedenceContext extends PrimaryContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public PrecedenceContext(PrimaryContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPrecedence(this); - else return visitor.visitChildren(this); - } - } + public static class IneachContext extends RstatementContext { + public TerminalNode FOR() { + return getToken(PainlessParser.FOR, 0); + } - public final PrimaryContext primary() throws RecognitionException { - PrimaryContext _localctx = new PrimaryContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_primary); - int _la; - try { - setState(415); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { - case 1: - _localctx = new PrecedenceContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(396); - match(LP); - setState(397); - expression(); - setState(398); - match(RP); + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); } - break; - case 2: - _localctx = new NumericContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(400); - _la = _input.LA(1); - if ( !(((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - break; - case 3: - _localctx = new TrueContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(401); - match(TRUE); - } - break; - case 4: - _localctx = new FalseContext(_localctx); - enterOuterAlt(_localctx, 4); - { - setState(402); - match(FALSE); - } - break; - case 5: - _localctx = new NullContext(_localctx); - enterOuterAlt(_localctx, 5); - { - setState(403); - match(NULL); - } - break; - case 6: - _localctx = new StringContext(_localctx); - enterOuterAlt(_localctx, 6); - { - setState(404); - match(STRING); - } - break; - case 7: - _localctx = new RegexContext(_localctx); - enterOuterAlt(_localctx, 7); - { - setState(405); - match(REGEX); - } - break; - case 8: - _localctx = new ListinitContext(_localctx); - enterOuterAlt(_localctx, 8); - { - setState(406); - listinitializer(); - } - break; - case 9: - _localctx = new MapinitContext(_localctx); - enterOuterAlt(_localctx, 9); - { - setState(407); - mapinitializer(); - } - break; - case 10: - _localctx = new VariableContext(_localctx); - enterOuterAlt(_localctx, 10); - { - setState(408); - match(ID); - } - break; - case 11: - _localctx = new CalllocalContext(_localctx); - enterOuterAlt(_localctx, 11); - { - setState(409); - match(ID); - setState(410); - arguments(); - } - break; - case 12: - _localctx = new NewobjectContext(_localctx); - enterOuterAlt(_localctx, 12); - { - setState(411); - match(NEW); - setState(412); - type(); - setState(413); - arguments(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class PostfixContext extends ParserRuleContext { - public CallinvokeContext callinvoke() { - return getRuleContext(CallinvokeContext.class,0); - } - public FieldaccessContext fieldaccess() { - return getRuleContext(FieldaccessContext.class,0); - } - public BraceaccessContext braceaccess() { - return getRuleContext(BraceaccessContext.class,0); - } - public PostfixContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_postfix; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPostfix(this); - else return visitor.visitChildren(this); - } - } - - public final PostfixContext postfix() throws RecognitionException { - PostfixContext _localctx = new PostfixContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_postfix); - try { - setState(420); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(417); - callinvoke(); + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(418); - fieldaccess(); + + public TerminalNode IN() { + return getToken(PainlessParser.IN, 0); } - break; - case 3: - enterOuterAlt(_localctx, 3); - { - setState(419); - braceaccess(); + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class PostdotContext extends ParserRuleContext { - public CallinvokeContext callinvoke() { - return getRuleContext(CallinvokeContext.class,0); - } - public FieldaccessContext fieldaccess() { - return getRuleContext(FieldaccessContext.class,0); - } - public PostdotContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_postdot; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPostdot(this); - else return visitor.visitChildren(this); - } - } - - public final PostdotContext postdot() throws RecognitionException { - PostdotContext _localctx = new PostdotContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_postdot); - try { - setState(424); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(422); - callinvoke(); + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(423); - fieldaccess(); + + public TrailerContext trailer() { + return getRuleContext(TrailerContext.class, 0); } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class CallinvokeContext extends ParserRuleContext { - public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } - public ArgumentsContext arguments() { - return getRuleContext(ArgumentsContext.class,0); - } - public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } - public TerminalNode NSDOT() { return getToken(PainlessParser.NSDOT, 0); } - public CallinvokeContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_callinvoke; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCallinvoke(this); - else return visitor.visitChildren(this); - } - } + public IneachContext(RstatementContext ctx) { + copyFrom(ctx); + } - public final CallinvokeContext callinvoke() throws RecognitionException { - CallinvokeContext _localctx = new CallinvokeContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_callinvoke); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(426); - _la = _input.LA(1); - if ( !(_la==DOT || _la==NSDOT) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(427); - match(DOTID); - setState(428); - arguments(); - } + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitIneach(this); + else return visitor.visitChildren(this); + } } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class FieldaccessContext extends ParserRuleContext { - public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } - public TerminalNode NSDOT() { return getToken(PainlessParser.NSDOT, 0); } - public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } - public TerminalNode DOTINTEGER() { return getToken(PainlessParser.DOTINTEGER, 0); } - public FieldaccessContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_fieldaccess; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFieldaccess(this); - else return visitor.visitChildren(this); - } - } + public static class IfContext extends RstatementContext { + public TerminalNode IF() { + return getToken(PainlessParser.IF, 0); + } - public final FieldaccessContext fieldaccess() throws RecognitionException { - FieldaccessContext _localctx = new FieldaccessContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_fieldaccess); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(430); - _la = _input.LA(1); - if ( !(_la==DOT || _la==NSDOT) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(431); - _la = _input.LA(1); - if ( !(_la==DOTINTEGER || _la==DOTID) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } - public static class BraceaccessContext extends ParserRuleContext { - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public BraceaccessContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_braceaccess; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBraceaccess(this); - else return visitor.visitChildren(this); - } - } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } - public final BraceaccessContext braceaccess() throws RecognitionException { - BraceaccessContext _localctx = new BraceaccessContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_braceaccess); - try { - enterOuterAlt(_localctx, 1); - { - setState(433); - match(LBRACE); - setState(434); - expression(); - setState(435); - match(RBRACE); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } - public static class ArrayinitializerContext extends ParserRuleContext { - public ArrayinitializerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_arrayinitializer; } + public List trailer() { + return getRuleContexts(TrailerContext.class); + } - public ArrayinitializerContext() { } - public void copyFrom(ArrayinitializerContext ctx) { - super.copyFrom(ctx); - } - } - public static class NewstandardarrayContext extends ArrayinitializerContext { - public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public TypeContext type() { - return getRuleContext(TypeContext.class,0); - } - public List LBRACE() { return getTokens(PainlessParser.LBRACE); } - public TerminalNode LBRACE(int i) { - return getToken(PainlessParser.LBRACE, i); - } - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public List RBRACE() { return getTokens(PainlessParser.RBRACE); } - public TerminalNode RBRACE(int i) { - return getToken(PainlessParser.RBRACE, i); - } - public PostdotContext postdot() { - return getRuleContext(PostdotContext.class,0); - } - public List postfix() { - return getRuleContexts(PostfixContext.class); - } - public PostfixContext postfix(int i) { - return getRuleContext(PostfixContext.class,i); - } - public NewstandardarrayContext(ArrayinitializerContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewstandardarray(this); - else return visitor.visitChildren(this); - } - } - public static class NewinitializedarrayContext extends ArrayinitializerContext { - public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public TypeContext type() { - return getRuleContext(TypeContext.class,0); - } - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } - public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public List postfix() { - return getRuleContexts(PostfixContext.class); - } - public PostfixContext postfix(int i) { - return getRuleContext(PostfixContext.class,i); - } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public NewinitializedarrayContext(ArrayinitializerContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewinitializedarray(this); - else return visitor.visitChildren(this); - } - } + public TrailerContext trailer(int i) { + return getRuleContext(TrailerContext.class, i); + } - public final ArrayinitializerContext arrayinitializer() throws RecognitionException { - ArrayinitializerContext _localctx = new ArrayinitializerContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_arrayinitializer); - int _la; - try { - int _alt; - setState(478); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { - case 1: - _localctx = new NewstandardarrayContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(437); - match(NEW); - setState(438); - type(); - setState(443); - _errHandler.sync(this); - _alt = 1; - do { - switch (_alt) { - case 1: - { - { - setState(439); - match(LBRACE); - setState(440); - expression(); - setState(441); - match(RBRACE); - } - } - break; - default: - throw new NoViableAltException(this); - } - setState(445); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); - } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(454); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { - case 1: - { - setState(447); - postdot(); - setState(451); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(448); - postfix(); - } - } - } - setState(453); + public TerminalNode ELSE() { + return getToken(PainlessParser.ELSE, 0); + } + + public IfContext(RstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitIf(this); + else return visitor.visitChildren(this); + } + } + + public static class EachContext extends RstatementContext { + public TerminalNode FOR() { + return getToken(PainlessParser.FOR, 0); + } + + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class, 0); + } + + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public TerminalNode COLON() { + return getToken(PainlessParser.COLON, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public TrailerContext trailer() { + return getRuleContext(TrailerContext.class, 0); + } + + public EachContext(RstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitEach(this); + else return visitor.visitChildren(this); + } + } + + public final RstatementContext rstatement() throws RecognitionException { + RstatementContext _localctx = new RstatementContext(_ctx, getState()); + enterRule(_localctx, 8, RULE_rstatement); + int _la; + try { + int _alt; + setState(179); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); - } - } - break; - } - } - break; - case 2: - _localctx = new NewinitializedarrayContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(456); - match(NEW); - setState(457); - type(); - setState(458); - match(LBRACE); - setState(459); - match(RBRACE); - setState(460); - match(LBRACK); - setState(469); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { - { - setState(461); - expression(); - setState(466); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(462); - match(COMMA); - setState(463); - expression(); + switch (getInterpreter().adaptivePredict(_input, 12, _ctx)) { + case 1: + _localctx = new IfContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(119); + match(IF); + setState(120); + match(LP); + setState(121); + expression(); + setState(122); + match(RP); + setState(123); + trailer(); + setState(127); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 5, _ctx)) { + case 1: { + setState(124); + match(ELSE); + setState(125); + trailer(); + } + break; + case 2: { + setState(126); + if (!(_input.LA(1) != ELSE)) throw new FailedPredicateException(this, " _input.LA(1) != ELSE "); + } + break; + } + } + break; + case 2: + _localctx = new WhileContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(129); + match(WHILE); + setState(130); + match(LP); + setState(131); + expression(); + setState(132); + match(RP); + setState(135); + switch (_input.LA(1)) { + case LBRACK: + case LBRACE: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case PRIMITIVE: + case DEF: + case ID: { + setState(133); + trailer(); + } + break; + case SEMICOLON: { + setState(134); + empty(); + } + break; + default: + throw new NoViableAltException(this); + } + } + break; + case 3: + _localctx = new ForContext(_localctx); + enterOuterAlt(_localctx, 3); { + setState(137); + match(FOR); + setState(138); + match(LP); + setState(140); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L + << SUB) | (1L << INCR) | (1L << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { + { + setState(139); + initializer(); + } + } + + setState(142); + match(SEMICOLON); + setState(144); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L + << SUB) | (1L << INCR) | (1L << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { + { + setState(143); + expression(); + } + } + + setState(146); + match(SEMICOLON); + setState(148); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L + << SUB) | (1L << INCR) | (1L << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { + { + setState(147); + afterthought(); + } + } + + setState(150); + match(RP); + setState(153); + switch (_input.LA(1)) { + case LBRACK: + case LBRACE: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case PRIMITIVE: + case DEF: + case ID: { + setState(151); + trailer(); + } + break; + case SEMICOLON: { + setState(152); + empty(); + } + break; + default: + throw new NoViableAltException(this); + } + } + break; + case 4: + _localctx = new EachContext(_localctx); + enterOuterAlt(_localctx, 4); { + setState(155); + match(FOR); + setState(156); + match(LP); + setState(157); + decltype(); + setState(158); + match(ID); + setState(159); + match(COLON); + setState(160); + expression(); + setState(161); + match(RP); + setState(162); + trailer(); + } + break; + case 5: + _localctx = new IneachContext(_localctx); + enterOuterAlt(_localctx, 5); { + setState(164); + match(FOR); + setState(165); + match(LP); + setState(166); + match(ID); + setState(167); + match(IN); + setState(168); + expression(); + setState(169); + match(RP); + setState(170); + trailer(); + } + break; + case 6: + _localctx = new TryContext(_localctx); + enterOuterAlt(_localctx, 6); { + setState(172); + match(TRY); + setState(173); + block(); + setState(175); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: { + { + setState(174); + trap(); + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(177); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 11, _ctx); + } while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER); + } + break; } - } - setState(468); + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class DstatementContext extends ParserRuleContext { + public DstatementContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_dstatement; + } + + public DstatementContext() {} + + public void copyFrom(DstatementContext ctx) { + super.copyFrom(ctx); + } + } + + public static class DeclContext extends DstatementContext { + public DeclarationContext declaration() { + return getRuleContext(DeclarationContext.class, 0); + } + + public DeclContext(DstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitDecl(this); + else return visitor.visitChildren(this); + } + } + + public static class BreakContext extends DstatementContext { + public TerminalNode BREAK() { + return getToken(PainlessParser.BREAK, 0); + } + + public BreakContext(DstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitBreak(this); + else return visitor.visitChildren(this); + } + } + + public static class ThrowContext extends DstatementContext { + public TerminalNode THROW() { + return getToken(PainlessParser.THROW, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public ThrowContext(DstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitThrow(this); + else return visitor.visitChildren(this); + } + } + + public static class ContinueContext extends DstatementContext { + public TerminalNode CONTINUE() { + return getToken(PainlessParser.CONTINUE, 0); + } + + public ContinueContext(DstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitContinue(this); + else return visitor.visitChildren(this); + } + } + + public static class ExprContext extends DstatementContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public ExprContext(DstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitExpr(this); + else return visitor.visitChildren(this); + } + } + + public static class DoContext extends DstatementContext { + public TerminalNode DO() { + return getToken(PainlessParser.DO, 0); + } + + public BlockContext block() { + return getRuleContext(BlockContext.class, 0); + } + + public TerminalNode WHILE() { + return getToken(PainlessParser.WHILE, 0); + } + + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public DoContext(DstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitDo(this); + else return visitor.visitChildren(this); + } + } + + public static class ReturnContext extends DstatementContext { + public TerminalNode RETURN() { + return getToken(PainlessParser.RETURN, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public ReturnContext(DstatementContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitReturn(this); + else return visitor.visitChildren(this); + } + } + + public final DstatementContext dstatement() throws RecognitionException { + DstatementContext _localctx = new DstatementContext(_ctx, getState()); + enterRule(_localctx, 10, RULE_dstatement); + int _la; + try { + setState(198); _errHandler.sync(this); - _la = _input.LA(1); - } - } - } + switch (getInterpreter().adaptivePredict(_input, 14, _ctx)) { + case 1: + _localctx = new DoContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(181); + match(DO); + setState(182); + block(); + setState(183); + match(WHILE); + setState(184); + match(LP); + setState(185); + expression(); + setState(186); + match(RP); + } + break; + case 2: + _localctx = new DeclContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(188); + declaration(); + } + break; + case 3: + _localctx = new ContinueContext(_localctx); + enterOuterAlt(_localctx, 3); { + setState(189); + match(CONTINUE); + } + break; + case 4: + _localctx = new BreakContext(_localctx); + enterOuterAlt(_localctx, 4); { + setState(190); + match(BREAK); + } + break; + case 5: + _localctx = new ReturnContext(_localctx); + enterOuterAlt(_localctx, 5); { + setState(191); + match(RETURN); + setState(193); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L + << SUB) | (1L << INCR) | (1L << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { + { + setState(192); + expression(); + } + } - setState(471); - match(RBRACK); - setState(475); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(472); - postfix(); + } + break; + case 6: + _localctx = new ThrowContext(_localctx); + enterOuterAlt(_localctx, 6); { + setState(195); + match(THROW); + setState(196); + expression(); + } + break; + case 7: + _localctx = new ExprContext(_localctx); + enterOuterAlt(_localctx, 7); { + setState(197); + expression(); + } + break; } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class TrailerContext extends ParserRuleContext { + public BlockContext block() { + return getRuleContext(BlockContext.class, 0); + } + + public StatementContext statement() { + return getRuleContext(StatementContext.class, 0); + } + + public TrailerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_trailer; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitTrailer(this); + else return visitor.visitChildren(this); + } + } + + public final TrailerContext trailer() throws RecognitionException { + TrailerContext _localctx = new TrailerContext(_ctx, getState()); + enterRule(_localctx, 12, RULE_trailer); + try { + setState(202); + switch (_input.LA(1)) { + case LBRACK: + enterOuterAlt(_localctx, 1); { + setState(200); + block(); + } + break; + case LBRACE: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case PRIMITIVE: + case DEF: + case ID: + enterOuterAlt(_localctx, 2); { + setState(201); + statement(); + } + break; + default: + throw new NoViableAltException(this); } - } - setState(477); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); } - } - break; - } + return _localctx; } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class ListinitializerContext extends ParserRuleContext { - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public ListinitializerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_listinitializer; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitListinitializer(this); - else return visitor.visitChildren(this); - } - } - - public final ListinitializerContext listinitializer() throws RecognitionException { - ListinitializerContext _localctx = new ListinitializerContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_listinitializer); - int _la; - try { - setState(493); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(480); - match(LBRACE); - setState(481); - expression(); - setState(486); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(482); - match(COMMA); - setState(483); - expression(); - } - } - setState(488); - _errHandler.sync(this); - _la = _input.LA(1); + public static class BlockContext extends ParserRuleContext { + public TerminalNode LBRACK() { + return getToken(PainlessParser.LBRACK, 0); } - setState(489); - match(RBRACE); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(491); - match(LBRACE); - setState(492); - match(RBRACE); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class MapinitializerContext extends ParserRuleContext { - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public List maptoken() { - return getRuleContexts(MaptokenContext.class); - } - public MaptokenContext maptoken(int i) { - return getRuleContext(MaptokenContext.class,i); - } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } - public MapinitializerContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_mapinitializer; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMapinitializer(this); - else return visitor.visitChildren(this); - } - } - - public final MapinitializerContext mapinitializer() throws RecognitionException { - MapinitializerContext _localctx = new MapinitializerContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_mapinitializer); - int _la; - try { - setState(509); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(495); - match(LBRACE); - setState(496); - maptoken(); - setState(501); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(497); - match(COMMA); - setState(498); - maptoken(); - } - } - setState(503); - _errHandler.sync(this); - _la = _input.LA(1); + public TerminalNode RBRACK() { + return getToken(PainlessParser.RBRACK, 0); } - setState(504); - match(RBRACE); + + public List statement() { + return getRuleContexts(StatementContext.class); } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(506); - match(LBRACE); - setState(507); - match(COLON); - setState(508); - match(RBRACE); + + public StatementContext statement(int i) { + return getRuleContext(StatementContext.class, i); } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class MaptokenContext extends ParserRuleContext { - public List expression() { - return getRuleContexts(ExpressionContext.class); - } - public ExpressionContext expression(int i) { - return getRuleContext(ExpressionContext.class,i); - } - public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } - public MaptokenContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_maptoken; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMaptoken(this); - else return visitor.visitChildren(this); - } - } - - public final MaptokenContext maptoken() throws RecognitionException { - MaptokenContext _localctx = new MaptokenContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_maptoken); - try { - enterOuterAlt(_localctx, 1); - { - setState(511); - expression(); - setState(512); - match(COLON); - setState(513); - expression(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class ArgumentsContext extends ParserRuleContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public List argument() { - return getRuleContexts(ArgumentContext.class); - } - public ArgumentContext argument(int i) { - return getRuleContext(ArgumentContext.class,i); - } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public ArgumentsContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_arguments; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitArguments(this); - else return visitor.visitChildren(this); - } - } - - public final ArgumentsContext arguments() throws RecognitionException { - ArgumentsContext _localctx = new ArgumentsContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_arguments); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - { - setState(515); - match(LP); - setState(524); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << THIS) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { - { - setState(516); - argument(); - setState(521); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(517); - match(COMMA); - setState(518); - argument(); - } - } - setState(523); - _errHandler.sync(this); - _la = _input.LA(1); + public DstatementContext dstatement() { + return getRuleContext(DstatementContext.class, 0); } - } - } - setState(526); - match(RP); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class ArgumentContext extends ParserRuleContext { - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public LambdaContext lambda() { - return getRuleContext(LambdaContext.class,0); - } - public FuncrefContext funcref() { - return getRuleContext(FuncrefContext.class,0); - } - public ArgumentContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_argument; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitArgument(this); - else return visitor.visitChildren(this); - } - } - - public final ArgumentContext argument() throws RecognitionException { - ArgumentContext _localctx = new ArgumentContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_argument); - try { - setState(531); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(528); - expression(); + public BlockContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(529); - lambda(); - } - break; - case 3: - enterOuterAlt(_localctx, 3); - { - setState(530); - funcref(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class LambdaContext extends ParserRuleContext { - public TerminalNode ARROW() { return getToken(PainlessParser.ARROW, 0); } - public List lamtype() { - return getRuleContexts(LamtypeContext.class); - } - public LamtypeContext lamtype(int i) { - return getRuleContext(LamtypeContext.class,i); - } - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); - } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public LambdaContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_lambda; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLambda(this); - else return visitor.visitChildren(this); - } - } - - public final LambdaContext lambda() throws RecognitionException { - LambdaContext _localctx = new LambdaContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_lambda); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(546); - switch (_input.LA(1)) { - case PRIMITIVE: - case DEF: - case ID: - { - setState(533); - lamtype(); + @Override + public int getRuleIndex() { + return RULE_block; } - break; - case LP: - { - setState(534); - match(LP); - setState(543); - _la = _input.LA(1); - if (((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & ((1L << (PRIMITIVE - 81)) | (1L << (DEF - 81)) | (1L << (ID - 81)))) != 0)) { - { - setState(535); - lamtype(); - setState(540); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==COMMA) { + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitBlock(this); + else return visitor.visitChildren(this); + } + } + + public final BlockContext block() throws RecognitionException { + BlockContext _localctx = new BlockContext(_ctx, getState()); + enterRule(_localctx, 14, RULE_block); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); { + setState(204); + match(LBRACK); + setState(208); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 16, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + { + { + setState(205); + statement(); + } + } + } + setState(210); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 16, _ctx); + } + setState(212); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << DO) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L + << NEW) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L + << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { + { + setState(211); + dstatement(); + } + } + + setState(214); + match(RBRACK); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class EmptyContext extends ParserRuleContext { + public TerminalNode SEMICOLON() { + return getToken(PainlessParser.SEMICOLON, 0); + } + + public EmptyContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_empty; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitEmpty(this); + else return visitor.visitChildren(this); + } + } + + public final EmptyContext empty() throws RecognitionException { + EmptyContext _localctx = new EmptyContext(_ctx, getState()); + enterRule(_localctx, 16, RULE_empty); + try { + enterOuterAlt(_localctx, 1); { - setState(536); - match(COMMA); - setState(537); - lamtype(); + setState(216); + match(SEMICOLON); } - } - setState(542); + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class InitializerContext extends ParserRuleContext { + public DeclarationContext declaration() { + return getRuleContext(DeclarationContext.class, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public InitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_initializer; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitInitializer(this); + else return visitor.visitChildren(this); + } + } + + public final InitializerContext initializer() throws RecognitionException { + InitializerContext _localctx = new InitializerContext(_ctx, getState()); + enterRule(_localctx, 18, RULE_initializer); + try { + setState(220); _errHandler.sync(this); - _la = _input.LA(1); - } - } + switch (getInterpreter().adaptivePredict(_input, 18, _ctx)) { + case 1: + enterOuterAlt(_localctx, 1); { + setState(218); + declaration(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); { + setState(219); + expression(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class AfterthoughtContext extends ParserRuleContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); } - setState(545); - match(RP); + public AfterthoughtContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); } - break; - default: - throw new NoViableAltException(this); - } - setState(548); - match(ARROW); - setState(551); - switch (_input.LA(1)) { - case LBRACK: - { - setState(549); - block(); + + @Override + public int getRuleIndex() { + return RULE_afterthought; } - break; - case LBRACE: - case LP: - case NEW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case REGEX: - case TRUE: - case FALSE: - case NULL: - case ID: - { - setState(550); - expression(); + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitAfterthought(this); + else return visitor.visitChildren(this); } - break; - default: - throw new NoViableAltException(this); - } - } } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class LamtypeContext extends ParserRuleContext { - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public LamtypeContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_lamtype; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLamtype(this); - else return visitor.visitChildren(this); - } - } - - public final LamtypeContext lamtype() throws RecognitionException { - LamtypeContext _localctx = new LamtypeContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_lamtype); - try { - enterOuterAlt(_localctx, 1); - { - setState(554); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { - case 1: - { - setState(553); - decltype(); + public final AfterthoughtContext afterthought() throws RecognitionException { + AfterthoughtContext _localctx = new AfterthoughtContext(_ctx, getState()); + enterRule(_localctx, 20, RULE_afterthought); + try { + enterOuterAlt(_localctx, 1); + { + setState(222); + expression(); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); } - break; - } - setState(556); - match(ID); - } + return _localctx; } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class FuncrefContext extends ParserRuleContext { - public FuncrefContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_funcref; } - - public FuncrefContext() { } - public void copyFrom(FuncrefContext ctx) { - super.copyFrom(ctx); - } - } - public static class ClassfuncrefContext extends FuncrefContext { - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public ClassfuncrefContext(FuncrefContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitClassfuncref(this); - else return visitor.visitChildren(this); - } - } - public static class ConstructorfuncrefContext extends FuncrefContext { - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } - public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public ConstructorfuncrefContext(FuncrefContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitConstructorfuncref(this); - else return visitor.visitChildren(this); - } - } - public static class LocalfuncrefContext extends FuncrefContext { - public TerminalNode THIS() { return getToken(PainlessParser.THIS, 0); } - public TerminalNode REF() { return getToken(PainlessParser.REF, 0); } - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public LocalfuncrefContext(FuncrefContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLocalfuncref(this); - else return visitor.visitChildren(this); - } - } - - public final FuncrefContext funcref() throws RecognitionException { - FuncrefContext _localctx = new FuncrefContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_funcref); - try { - setState(569); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { - case 1: - _localctx = new ClassfuncrefContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(558); - decltype(); - setState(559); - match(REF); - setState(560); - match(ID); + public static class DeclarationContext extends ParserRuleContext { + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class, 0); } - break; - case 2: - _localctx = new ConstructorfuncrefContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(562); - decltype(); - setState(563); - match(REF); - setState(564); - match(NEW); - } - break; - case 3: - _localctx = new LocalfuncrefContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(566); - match(THIS); - setState(567); - match(REF); - setState(568); - match(ID); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { - switch (ruleIndex) { - case 4: - return rstatement_sempred((RstatementContext)_localctx, predIndex); - case 16: - return noncondexpression_sempred((NoncondexpressionContext)_localctx, predIndex); - } - return true; - } - private boolean rstatement_sempred(RstatementContext _localctx, int predIndex) { - switch (predIndex) { - case 0: - return _input.LA(1) != ELSE ; - } - return true; - } - private boolean noncondexpression_sempred(NoncondexpressionContext _localctx, int predIndex) { - switch (predIndex) { - case 1: - return precpred(_ctx, 13); - case 2: - return precpred(_ctx, 12); - case 3: - return precpred(_ctx, 11); - case 4: - return precpred(_ctx, 10); - case 5: - return precpred(_ctx, 9); - case 6: - return precpred(_ctx, 7); - case 7: - return precpred(_ctx, 6); - case 8: - return precpred(_ctx, 5); - case 9: - return precpred(_ctx, 4); - case 10: - return precpred(_ctx, 3); - case 11: - return precpred(_ctx, 2); - case 12: - return precpred(_ctx, 1); - case 13: - return precpred(_ctx, 8); - } - return true; - } + public List declvar() { + return getRuleContexts(DeclvarContext.class); + } - public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3W\u023e\4\2\t\2\4"+ - "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ - "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ - "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ - "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ - "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\3\2\7\2R\n\2\f\2\16"+ - "\2U\13\2\3\2\7\2X\n\2\f\2\16\2[\13\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\3\4\7\4k\n\4\f\4\16\4n\13\4\5\4p\n\4\3\4\3\4\3\5\3"+ - "\5\3\5\3\5\5\5x\n\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u0082\n\6\3\6"+ - "\3\6\3\6\3\6\3\6\3\6\5\6\u008a\n\6\3\6\3\6\3\6\5\6\u008f\n\6\3\6\3\6\5"+ - "\6\u0093\n\6\3\6\3\6\5\6\u0097\n\6\3\6\3\6\3\6\5\6\u009c\n\6\3\6\3\6\3"+ - "\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6"+ - "\6\6\u00b2\n\6\r\6\16\6\u00b3\5\6\u00b6\n\6\3\7\3\7\3\7\3\7\3\7\3\7\3"+ - "\7\3\7\3\7\3\7\3\7\3\7\5\7\u00c4\n\7\3\7\3\7\3\7\5\7\u00c9\n\7\3\b\3\b"+ - "\5\b\u00cd\n\b\3\t\3\t\7\t\u00d1\n\t\f\t\16\t\u00d4\13\t\3\t\5\t\u00d7"+ - "\n\t\3\t\3\t\3\n\3\n\3\13\3\13\5\13\u00df\n\13\3\f\3\f\3\r\3\r\3\r\3\r"+ - "\7\r\u00e7\n\r\f\r\16\r\u00ea\13\r\3\16\3\16\3\16\7\16\u00ef\n\16\f\16"+ - "\16\16\u00f2\13\16\3\17\3\17\3\17\3\17\3\17\7\17\u00f9\n\17\f\17\16\17"+ - "\u00fc\13\17\5\17\u00fe\n\17\3\20\3\20\3\20\5\20\u0103\n\20\3\21\3\21"+ - "\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+ - "\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+ - "\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+ - "\3\22\3\22\3\22\3\22\3\22\7\22\u0136\n\22\f\22\16\22\u0139\13\22\3\23"+ - "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u0146\n\23\3\24"+ - "\3\24\3\24\3\24\3\24\5\24\u014d\n\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25"+ - "\5\25\u0156\n\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\5\26"+ - "\u0162\n\26\3\27\3\27\3\30\3\30\3\30\6\30\u0169\n\30\r\30\16\30\u016a"+ - "\3\30\3\30\3\30\6\30\u0170\n\30\r\30\16\30\u0171\3\30\3\30\3\30\7\30\u0177"+ - "\n\30\f\30\16\30\u017a\13\30\3\30\3\30\7\30\u017e\n\30\f\30\16\30\u0181"+ - "\13\30\5\30\u0183\n\30\3\31\3\31\7\31\u0187\n\31\f\31\16\31\u018a\13\31"+ - "\3\31\5\31\u018d\n\31\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32"+ - "\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u01a2\n\32\3\33\3\33"+ - "\3\33\5\33\u01a7\n\33\3\34\3\34\5\34\u01ab\n\34\3\35\3\35\3\35\3\35\3"+ - "\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \6 \u01be\n \r \16"+ - " \u01bf\3 \3 \7 \u01c4\n \f \16 \u01c7\13 \5 \u01c9\n \3 \3 \3 \3 \3 "+ - "\3 \3 \3 \7 \u01d3\n \f \16 \u01d6\13 \5 \u01d8\n \3 \3 \7 \u01dc\n \f"+ - " \16 \u01df\13 \5 \u01e1\n \3!\3!\3!\3!\7!\u01e7\n!\f!\16!\u01ea\13!\3"+ - "!\3!\3!\3!\5!\u01f0\n!\3\"\3\"\3\"\3\"\7\"\u01f6\n\"\f\"\16\"\u01f9\13"+ - "\"\3\"\3\"\3\"\3\"\3\"\5\"\u0200\n\"\3#\3#\3#\3#\3$\3$\3$\3$\7$\u020a"+ - "\n$\f$\16$\u020d\13$\5$\u020f\n$\3$\3$\3%\3%\3%\5%\u0216\n%\3&\3&\3&\3"+ - "&\3&\7&\u021d\n&\f&\16&\u0220\13&\5&\u0222\n&\3&\5&\u0225\n&\3&\3&\3&"+ - "\5&\u022a\n&\3\'\5\'\u022d\n\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3"+ - "(\5(\u023c\n(\3(\2\3\")\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*"+ - ",.\60\62\64\668:<>@BDFHJLN\2\20\3\3\16\16\3\2 \"\3\2#$\3\2:;\3\2%\'\3"+ - "\2(+\3\2,/\3\2>I\3\2<=\3\2\36\37\3\2ST\3\2JM\3\2\13\f\3\2VW\u0279\2S\3"+ - "\2\2\2\4^\3\2\2\2\6c\3\2\2\2\bw\3\2\2\2\n\u00b5\3\2\2\2\f\u00c8\3\2\2"+ - "\2\16\u00cc\3\2\2\2\20\u00ce\3\2\2\2\22\u00da\3\2\2\2\24\u00de\3\2\2\2"+ - "\26\u00e0\3\2\2\2\30\u00e2\3\2\2\2\32\u00eb\3\2\2\2\34\u00fd\3\2\2\2\36"+ - "\u00ff\3\2\2\2 \u0104\3\2\2\2\"\u010b\3\2\2\2$\u0145\3\2\2\2&\u014c\3"+ - "\2\2\2(\u0155\3\2\2\2*\u0161\3\2\2\2,\u0163\3\2\2\2.\u0182\3\2\2\2\60"+ - "\u018c\3\2\2\2\62\u01a1\3\2\2\2\64\u01a6\3\2\2\2\66\u01aa\3\2\2\28\u01ac"+ - "\3\2\2\2:\u01b0\3\2\2\2<\u01b3\3\2\2\2>\u01e0\3\2\2\2@\u01ef\3\2\2\2B"+ - "\u01ff\3\2\2\2D\u0201\3\2\2\2F\u0205\3\2\2\2H\u0215\3\2\2\2J\u0224\3\2"+ - "\2\2L\u022c\3\2\2\2N\u023b\3\2\2\2PR\5\4\3\2QP\3\2\2\2RU\3\2\2\2SQ\3\2"+ - "\2\2ST\3\2\2\2TY\3\2\2\2US\3\2\2\2VX\5\b\5\2WV\3\2\2\2X[\3\2\2\2YW\3\2"+ - "\2\2YZ\3\2\2\2Z\\\3\2\2\2[Y\3\2\2\2\\]\7\2\2\3]\3\3\2\2\2^_\5\32\16\2"+ - "_`\7U\2\2`a\5\6\4\2ab\5\20\t\2b\5\3\2\2\2co\7\t\2\2de\5\32\16\2el\7U\2"+ - "\2fg\7\r\2\2gh\5\32\16\2hi\7U\2\2ik\3\2\2\2jf\3\2\2\2kn\3\2\2\2lj\3\2"+ - "\2\2lm\3\2\2\2mp\3\2\2\2nl\3\2\2\2od\3\2\2\2op\3\2\2\2pq\3\2\2\2qr\7\n"+ - "\2\2r\7\3\2\2\2sx\5\n\6\2tu\5\f\7\2uv\t\2\2\2vx\3\2\2\2ws\3\2\2\2wt\3"+ - "\2\2\2x\t\3\2\2\2yz\7\17\2\2z{\7\t\2\2{|\5$\23\2|}\7\n\2\2}\u0081\5\16"+ - "\b\2~\177\7\21\2\2\177\u0082\5\16\b\2\u0080\u0082\6\6\2\2\u0081~\3\2\2"+ - "\2\u0081\u0080\3\2\2\2\u0082\u00b6\3\2\2\2\u0083\u0084\7\22\2\2\u0084"+ - "\u0085\7\t\2\2\u0085\u0086\5$\23\2\u0086\u0089\7\n\2\2\u0087\u008a\5\16"+ - "\b\2\u0088\u008a\5\22\n\2\u0089\u0087\3\2\2\2\u0089\u0088\3\2\2\2\u008a"+ - "\u00b6\3\2\2\2\u008b\u008c\7\24\2\2\u008c\u008e\7\t\2\2\u008d\u008f\5"+ - "\24\13\2\u008e\u008d\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0090\3\2\2\2\u0090"+ - "\u0092\7\16\2\2\u0091\u0093\5$\23\2\u0092\u0091\3\2\2\2\u0092\u0093\3"+ - "\2\2\2\u0093\u0094\3\2\2\2\u0094\u0096\7\16\2\2\u0095\u0097\5\26\f\2\u0096"+ - "\u0095\3\2\2\2\u0096\u0097\3\2\2\2\u0097\u0098\3\2\2\2\u0098\u009b\7\n"+ - "\2\2\u0099\u009c\5\16\b\2\u009a\u009c\5\22\n\2\u009b\u0099\3\2\2\2\u009b"+ - "\u009a\3\2\2\2\u009c\u00b6\3\2\2\2\u009d\u009e\7\24\2\2\u009e\u009f\7"+ - "\t\2\2\u009f\u00a0\5\32\16\2\u00a0\u00a1\7U\2\2\u00a1\u00a2\7\66\2\2\u00a2"+ - "\u00a3\5$\23\2\u00a3\u00a4\7\n\2\2\u00a4\u00a5\5\16\b\2\u00a5\u00b6\3"+ - "\2\2\2\u00a6\u00a7\7\24\2\2\u00a7\u00a8\7\t\2\2\u00a8\u00a9\7U\2\2\u00a9"+ - "\u00aa\7\20\2\2\u00aa\u00ab\5$\23\2\u00ab\u00ac\7\n\2\2\u00ac\u00ad\5"+ - "\16\b\2\u00ad\u00b6\3\2\2\2\u00ae\u00af\7\31\2\2\u00af\u00b1\5\20\t\2"+ - "\u00b0\u00b2\5 \21\2\u00b1\u00b0\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00b1"+ - "\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\u00b6\3\2\2\2\u00b5y\3\2\2\2\u00b5"+ - "\u0083\3\2\2\2\u00b5\u008b\3\2\2\2\u00b5\u009d\3\2\2\2\u00b5\u00a6\3\2"+ - "\2\2\u00b5\u00ae\3\2\2\2\u00b6\13\3\2\2\2\u00b7\u00b8\7\23\2\2\u00b8\u00b9"+ - "\5\20\t\2\u00b9\u00ba\7\22\2\2\u00ba\u00bb\7\t\2\2\u00bb\u00bc\5$\23\2"+ - "\u00bc\u00bd\7\n\2\2\u00bd\u00c9\3\2\2\2\u00be\u00c9\5\30\r\2\u00bf\u00c9"+ - "\7\25\2\2\u00c0\u00c9\7\26\2\2\u00c1\u00c3\7\27\2\2\u00c2\u00c4\5$\23"+ - "\2\u00c3\u00c2\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00c9\3\2\2\2\u00c5\u00c6"+ - "\7\33\2\2\u00c6\u00c9\5$\23\2\u00c7\u00c9\5$\23\2\u00c8\u00b7\3\2\2\2"+ - "\u00c8\u00be\3\2\2\2\u00c8\u00bf\3\2\2\2\u00c8\u00c0\3\2\2\2\u00c8\u00c1"+ - "\3\2\2\2\u00c8\u00c5\3\2\2\2\u00c8\u00c7\3\2\2\2\u00c9\r\3\2\2\2\u00ca"+ - "\u00cd\5\20\t\2\u00cb\u00cd\5\b\5\2\u00cc\u00ca\3\2\2\2\u00cc\u00cb\3"+ - "\2\2\2\u00cd\17\3\2\2\2\u00ce\u00d2\7\5\2\2\u00cf\u00d1\5\b\5\2\u00d0"+ - "\u00cf\3\2\2\2\u00d1\u00d4\3\2\2\2\u00d2\u00d0\3\2\2\2\u00d2\u00d3\3\2"+ - "\2\2\u00d3\u00d6\3\2\2\2\u00d4\u00d2\3\2\2\2\u00d5\u00d7\5\f\7\2\u00d6"+ - "\u00d5\3\2\2\2\u00d6\u00d7\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\u00d9\7\6"+ - "\2\2\u00d9\21\3\2\2\2\u00da\u00db\7\16\2\2\u00db\23\3\2\2\2\u00dc\u00df"+ - "\5\30\r\2\u00dd\u00df\5$\23\2\u00de\u00dc\3\2\2\2\u00de\u00dd\3\2\2\2"+ - "\u00df\25\3\2\2\2\u00e0\u00e1\5$\23\2\u00e1\27\3\2\2\2\u00e2\u00e3\5\32"+ - "\16\2\u00e3\u00e8\5\36\20\2\u00e4\u00e5\7\r\2\2\u00e5\u00e7\5\36\20\2"+ - "\u00e6\u00e4\3\2\2\2\u00e7\u00ea\3\2\2\2\u00e8\u00e6\3\2\2\2\u00e8\u00e9"+ - "\3\2\2\2\u00e9\31\3\2\2\2\u00ea\u00e8\3\2\2\2\u00eb\u00f0\5\34\17\2\u00ec"+ - "\u00ed\7\7\2\2\u00ed\u00ef\7\b\2\2\u00ee\u00ec\3\2\2\2\u00ef\u00f2\3\2"+ - "\2\2\u00f0\u00ee\3\2\2\2\u00f0\u00f1\3\2\2\2\u00f1\33\3\2\2\2\u00f2\u00f0"+ - "\3\2\2\2\u00f3\u00fe\7T\2\2\u00f4\u00fe\7S\2\2\u00f5\u00fa\7U\2\2\u00f6"+ - "\u00f7\7\13\2\2\u00f7\u00f9\7W\2\2\u00f8\u00f6\3\2\2\2\u00f9\u00fc\3\2"+ - "\2\2\u00fa\u00f8\3\2\2\2\u00fa\u00fb\3\2\2\2\u00fb\u00fe\3\2\2\2\u00fc"+ - "\u00fa\3\2\2\2\u00fd\u00f3\3\2\2\2\u00fd\u00f4\3\2\2\2\u00fd\u00f5\3\2"+ - "\2\2\u00fe\35\3\2\2\2\u00ff\u0102\7U\2\2\u0100\u0101\7>\2\2\u0101\u0103"+ - "\5$\23\2\u0102\u0100\3\2\2\2\u0102\u0103\3\2\2\2\u0103\37\3\2\2\2\u0104"+ - "\u0105\7\32\2\2\u0105\u0106\7\t\2\2\u0106\u0107\5\34\17\2\u0107\u0108"+ - "\7U\2\2\u0108\u0109\7\n\2\2\u0109\u010a\5\20\t\2\u010a!\3\2\2\2\u010b"+ - "\u010c\b\22\1\2\u010c\u010d\5&\24\2\u010d\u0137\3\2\2\2\u010e\u010f\f"+ - "\17\2\2\u010f\u0110\t\3\2\2\u0110\u0136\5\"\22\20\u0111\u0112\f\16\2\2"+ - "\u0112\u0113\t\4\2\2\u0113\u0136\5\"\22\17\u0114\u0115\f\r\2\2\u0115\u0116"+ - "\t\5\2\2\u0116\u0136\5\"\22\16\u0117\u0118\f\f\2\2\u0118\u0119\t\6\2\2"+ - "\u0119\u0136\5\"\22\r\u011a\u011b\f\13\2\2\u011b\u011c\t\7\2\2\u011c\u0136"+ - "\5\"\22\f\u011d\u011e\f\t\2\2\u011e\u011f\t\b\2\2\u011f\u0136\5\"\22\n"+ - "\u0120\u0121\f\b\2\2\u0121\u0122\7\60\2\2\u0122\u0136\5\"\22\t\u0123\u0124"+ - "\f\7\2\2\u0124\u0125\7\61\2\2\u0125\u0136\5\"\22\b\u0126\u0127\f\6\2\2"+ - "\u0127\u0128\7\62\2\2\u0128\u0136\5\"\22\7\u0129\u012a\f\5\2\2\u012a\u012b"+ - "\7\63\2\2\u012b\u0136\5\"\22\6\u012c\u012d\f\4\2\2\u012d\u012e\7\64\2"+ - "\2\u012e\u0136\5\"\22\5\u012f\u0130\f\3\2\2\u0130\u0131\7\67\2\2\u0131"+ - "\u0136\5\"\22\3\u0132\u0133\f\n\2\2\u0133\u0134\7\35\2\2\u0134\u0136\5"+ - "\32\16\2\u0135\u010e\3\2\2\2\u0135\u0111\3\2\2\2\u0135\u0114\3\2\2\2\u0135"+ - "\u0117\3\2\2\2\u0135\u011a\3\2\2\2\u0135\u011d\3\2\2\2\u0135\u0120\3\2"+ - "\2\2\u0135\u0123\3\2\2\2\u0135\u0126\3\2\2\2\u0135\u0129\3\2\2\2\u0135"+ - "\u012c\3\2\2\2\u0135\u012f\3\2\2\2\u0135\u0132\3\2\2\2\u0136\u0139\3\2"+ - "\2\2\u0137\u0135\3\2\2\2\u0137\u0138\3\2\2\2\u0138#\3\2\2\2\u0139\u0137"+ - "\3\2\2\2\u013a\u0146\5\"\22\2\u013b\u013c\5\"\22\2\u013c\u013d\7\65\2"+ - "\2\u013d\u013e\5$\23\2\u013e\u013f\7\66\2\2\u013f\u0140\5$\23\2\u0140"+ - "\u0146\3\2\2\2\u0141\u0142\5\"\22\2\u0142\u0143\t\t\2\2\u0143\u0144\5"+ - "$\23\2\u0144\u0146\3\2\2\2\u0145\u013a\3\2\2\2\u0145\u013b\3\2\2\2\u0145"+ - "\u0141\3\2\2\2\u0146%\3\2\2\2\u0147\u0148\t\n\2\2\u0148\u014d\5\60\31"+ - "\2\u0149\u014a\t\4\2\2\u014a\u014d\5&\24\2\u014b\u014d\5(\25\2\u014c\u0147"+ - "\3\2\2\2\u014c\u0149\3\2\2\2\u014c\u014b\3\2\2\2\u014d\'\3\2\2\2\u014e"+ - "\u0156\5\60\31\2\u014f\u0150\5\60\31\2\u0150\u0151\t\n\2\2\u0151\u0156"+ - "\3\2\2\2\u0152\u0153\t\13\2\2\u0153\u0156\5&\24\2\u0154\u0156\5*\26\2"+ - "\u0155\u014e\3\2\2\2\u0155\u014f\3\2\2\2\u0155\u0152\3\2\2\2\u0155\u0154"+ - "\3\2\2\2\u0156)\3\2\2\2\u0157\u0158\7\t\2\2\u0158\u0159\5,\27\2\u0159"+ - "\u015a\7\n\2\2\u015a\u015b\5&\24\2\u015b\u0162\3\2\2\2\u015c\u015d\7\t"+ - "\2\2\u015d\u015e\5.\30\2\u015e\u015f\7\n\2\2\u015f\u0160\5(\25\2\u0160"+ - "\u0162\3\2\2\2\u0161\u0157\3\2\2\2\u0161\u015c\3\2\2\2\u0162+\3\2\2\2"+ - "\u0163\u0164\t\f\2\2\u0164-\3\2\2\2\u0165\u0168\7T\2\2\u0166\u0167\7\7"+ - "\2\2\u0167\u0169\7\b\2\2\u0168\u0166\3\2\2\2\u0169\u016a\3\2\2\2\u016a"+ - "\u0168\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u0183\3\2\2\2\u016c\u016f\7S"+ - "\2\2\u016d\u016e\7\7\2\2\u016e\u0170\7\b\2\2\u016f\u016d\3\2\2\2\u0170"+ - "\u0171\3\2\2\2\u0171\u016f\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0183\3\2"+ - "\2\2\u0173\u0178\7U\2\2\u0174\u0175\7\13\2\2\u0175\u0177\7W\2\2\u0176"+ - "\u0174\3\2\2\2\u0177\u017a\3\2\2\2\u0178\u0176\3\2\2\2\u0178\u0179\3\2"+ - "\2\2\u0179\u017f\3\2\2\2\u017a\u0178\3\2\2\2\u017b\u017c\7\7\2\2\u017c"+ - "\u017e\7\b\2\2\u017d\u017b\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u017d\3\2"+ - "\2\2\u017f\u0180\3\2\2\2\u0180\u0183\3\2\2\2\u0181\u017f\3\2\2\2\u0182"+ - "\u0165\3\2\2\2\u0182\u016c\3\2\2\2\u0182\u0173\3\2\2\2\u0183/\3\2\2\2"+ - "\u0184\u0188\5\62\32\2\u0185\u0187\5\64\33\2\u0186\u0185\3\2\2\2\u0187"+ - "\u018a\3\2\2\2\u0188\u0186\3\2\2\2\u0188\u0189\3\2\2\2\u0189\u018d\3\2"+ - "\2\2\u018a\u0188\3\2\2\2\u018b\u018d\5> \2\u018c\u0184\3\2\2\2\u018c\u018b"+ - "\3\2\2\2\u018d\61\3\2\2\2\u018e\u018f\7\t\2\2\u018f\u0190\5$\23\2\u0190"+ - "\u0191\7\n\2\2\u0191\u01a2\3\2\2\2\u0192\u01a2\t\r\2\2\u0193\u01a2\7P"+ - "\2\2\u0194\u01a2\7Q\2\2\u0195\u01a2\7R\2\2\u0196\u01a2\7N\2\2\u0197\u01a2"+ - "\7O\2\2\u0198\u01a2\5@!\2\u0199\u01a2\5B\"\2\u019a\u01a2\7U\2\2\u019b"+ - "\u019c\7U\2\2\u019c\u01a2\5F$\2\u019d\u019e\7\30\2\2\u019e\u019f\5\34"+ - "\17\2\u019f\u01a0\5F$\2\u01a0\u01a2\3\2\2\2\u01a1\u018e\3\2\2\2\u01a1"+ - "\u0192\3\2\2\2\u01a1\u0193\3\2\2\2\u01a1\u0194\3\2\2\2\u01a1\u0195\3\2"+ - "\2\2\u01a1\u0196\3\2\2\2\u01a1\u0197\3\2\2\2\u01a1\u0198\3\2\2\2\u01a1"+ - "\u0199\3\2\2\2\u01a1\u019a\3\2\2\2\u01a1\u019b\3\2\2\2\u01a1\u019d\3\2"+ - "\2\2\u01a2\63\3\2\2\2\u01a3\u01a7\58\35\2\u01a4\u01a7\5:\36\2\u01a5\u01a7"+ - "\5<\37\2\u01a6\u01a3\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a6\u01a5\3\2\2\2\u01a7"+ - "\65\3\2\2\2\u01a8\u01ab\58\35\2\u01a9\u01ab\5:\36\2\u01aa\u01a8\3\2\2"+ - "\2\u01aa\u01a9\3\2\2\2\u01ab\67\3\2\2\2\u01ac\u01ad\t\16\2\2\u01ad\u01ae"+ - "\7W\2\2\u01ae\u01af\5F$\2\u01af9\3\2\2\2\u01b0\u01b1\t\16\2\2\u01b1\u01b2"+ - "\t\17\2\2\u01b2;\3\2\2\2\u01b3\u01b4\7\7\2\2\u01b4\u01b5\5$\23\2\u01b5"+ - "\u01b6\7\b\2\2\u01b6=\3\2\2\2\u01b7\u01b8\7\30\2\2\u01b8\u01bd\5\34\17"+ - "\2\u01b9\u01ba\7\7\2\2\u01ba\u01bb\5$\23\2\u01bb\u01bc\7\b\2\2\u01bc\u01be"+ - "\3\2\2\2\u01bd\u01b9\3\2\2\2\u01be\u01bf\3\2\2\2\u01bf\u01bd\3\2\2\2\u01bf"+ - "\u01c0\3\2\2\2\u01c0\u01c8\3\2\2\2\u01c1\u01c5\5\66\34\2\u01c2\u01c4\5"+ - "\64\33\2\u01c3\u01c2\3\2\2\2\u01c4\u01c7\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c5"+ - "\u01c6\3\2\2\2\u01c6\u01c9\3\2\2\2\u01c7\u01c5\3\2\2\2\u01c8\u01c1\3\2"+ - "\2\2\u01c8\u01c9\3\2\2\2\u01c9\u01e1\3\2\2\2\u01ca\u01cb\7\30\2\2\u01cb"+ - "\u01cc\5\34\17\2\u01cc\u01cd\7\7\2\2\u01cd\u01ce\7\b\2\2\u01ce\u01d7\7"+ - "\5\2\2\u01cf\u01d4\5$\23\2\u01d0\u01d1\7\r\2\2\u01d1\u01d3\5$\23\2\u01d2"+ - "\u01d0\3\2\2\2\u01d3\u01d6\3\2\2\2\u01d4\u01d2\3\2\2\2\u01d4\u01d5\3\2"+ - "\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d4\3\2\2\2\u01d7\u01cf\3\2\2\2\u01d7"+ - "\u01d8\3\2\2\2\u01d8\u01d9\3\2\2\2\u01d9\u01dd\7\6\2\2\u01da\u01dc\5\64"+ - "\33\2\u01db\u01da\3\2\2\2\u01dc\u01df\3\2\2\2\u01dd\u01db\3\2\2\2\u01dd"+ - "\u01de\3\2\2\2\u01de\u01e1\3\2\2\2\u01df\u01dd\3\2\2\2\u01e0\u01b7\3\2"+ - "\2\2\u01e0\u01ca\3\2\2\2\u01e1?\3\2\2\2\u01e2\u01e3\7\7\2\2\u01e3\u01e8"+ - "\5$\23\2\u01e4\u01e5\7\r\2\2\u01e5\u01e7\5$\23\2\u01e6\u01e4\3\2\2\2\u01e7"+ - "\u01ea\3\2\2\2\u01e8\u01e6\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01eb\3\2"+ - "\2\2\u01ea\u01e8\3\2\2\2\u01eb\u01ec\7\b\2\2\u01ec\u01f0\3\2\2\2\u01ed"+ - "\u01ee\7\7\2\2\u01ee\u01f0\7\b\2\2\u01ef\u01e2\3\2\2\2\u01ef\u01ed\3\2"+ - "\2\2\u01f0A\3\2\2\2\u01f1\u01f2\7\7\2\2\u01f2\u01f7\5D#\2\u01f3\u01f4"+ - "\7\r\2\2\u01f4\u01f6\5D#\2\u01f5\u01f3\3\2\2\2\u01f6\u01f9\3\2\2\2\u01f7"+ - "\u01f5\3\2\2\2\u01f7\u01f8\3\2\2\2\u01f8\u01fa\3\2\2\2\u01f9\u01f7\3\2"+ - "\2\2\u01fa\u01fb\7\b\2\2\u01fb\u0200\3\2\2\2\u01fc\u01fd\7\7\2\2\u01fd"+ - "\u01fe\7\66\2\2\u01fe\u0200\7\b\2\2\u01ff\u01f1\3\2\2\2\u01ff\u01fc\3"+ - "\2\2\2\u0200C\3\2\2\2\u0201\u0202\5$\23\2\u0202\u0203\7\66\2\2\u0203\u0204"+ - "\5$\23\2\u0204E\3\2\2\2\u0205\u020e\7\t\2\2\u0206\u020b\5H%\2\u0207\u0208"+ - "\7\r\2\2\u0208\u020a\5H%\2\u0209\u0207\3\2\2\2\u020a\u020d\3\2\2\2\u020b"+ - "\u0209\3\2\2\2\u020b\u020c\3\2\2\2\u020c\u020f\3\2\2\2\u020d\u020b\3\2"+ - "\2\2\u020e\u0206\3\2\2\2\u020e\u020f\3\2\2\2\u020f\u0210\3\2\2\2\u0210"+ - "\u0211\7\n\2\2\u0211G\3\2\2\2\u0212\u0216\5$\23\2\u0213\u0216\5J&\2\u0214"+ - "\u0216\5N(\2\u0215\u0212\3\2\2\2\u0215\u0213\3\2\2\2\u0215\u0214\3\2\2"+ - "\2\u0216I\3\2\2\2\u0217\u0225\5L\'\2\u0218\u0221\7\t\2\2\u0219\u021e\5"+ - "L\'\2\u021a\u021b\7\r\2\2\u021b\u021d\5L\'\2\u021c\u021a\3\2\2\2\u021d"+ - "\u0220\3\2\2\2\u021e\u021c\3\2\2\2\u021e\u021f\3\2\2\2\u021f\u0222\3\2"+ - "\2\2\u0220\u021e\3\2\2\2\u0221\u0219\3\2\2\2\u0221\u0222\3\2\2\2\u0222"+ - "\u0223\3\2\2\2\u0223\u0225\7\n\2\2\u0224\u0217\3\2\2\2\u0224\u0218\3\2"+ - "\2\2\u0225\u0226\3\2\2\2\u0226\u0229\79\2\2\u0227\u022a\5\20\t\2\u0228"+ - "\u022a\5$\23\2\u0229\u0227\3\2\2\2\u0229\u0228\3\2\2\2\u022aK\3\2\2\2"+ - "\u022b\u022d\5\32\16\2\u022c\u022b\3\2\2\2\u022c\u022d\3\2\2\2\u022d\u022e"+ - "\3\2\2\2\u022e\u022f\7U\2\2\u022fM\3\2\2\2\u0230\u0231\5\32\16\2\u0231"+ - "\u0232\78\2\2\u0232\u0233\7U\2\2\u0233\u023c\3\2\2\2\u0234\u0235\5\32"+ - "\16\2\u0235\u0236\78\2\2\u0236\u0237\7\30\2\2\u0237\u023c\3\2\2\2\u0238"+ - "\u0239\7\34\2\2\u0239\u023a\78\2\2\u023a\u023c\7U\2\2\u023b\u0230\3\2"+ - "\2\2\u023b\u0234\3\2\2\2\u023b\u0238\3\2\2\2\u023cO\3\2\2\2>SYlow\u0081"+ - "\u0089\u008e\u0092\u0096\u009b\u00b3\u00b5\u00c3\u00c8\u00cc\u00d2\u00d6"+ - "\u00de\u00e8\u00f0\u00fa\u00fd\u0102\u0135\u0137\u0145\u014c\u0155\u0161"+ - "\u016a\u0171\u0178\u017f\u0182\u0188\u018c\u01a1\u01a6\u01aa\u01bf\u01c5"+ - "\u01c8\u01d4\u01d7\u01dd\u01e0\u01e8\u01ef\u01f7\u01ff\u020b\u020e\u0215"+ - "\u021e\u0221\u0224\u0229\u022c\u023b"; - public static final ATN _ATN = - new ATNDeserializer().deserialize(_serializedATN.toCharArray()); - static { - _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; - for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { - _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + public DeclvarContext declvar(int i) { + return getRuleContext(DeclvarContext.class, i); + } + + public List COMMA() { + return getTokens(PainlessParser.COMMA); + } + + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + + public DeclarationContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_declaration; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitDeclaration(this); + else return visitor.visitChildren(this); + } + } + + public final DeclarationContext declaration() throws RecognitionException { + DeclarationContext _localctx = new DeclarationContext(_ctx, getState()); + enterRule(_localctx, 22, RULE_declaration); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(224); + decltype(); + setState(225); + declvar(); + setState(230); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == COMMA) { + { + { + setState(226); + match(COMMA); + setState(227); + declvar(); + } + } + setState(232); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class DecltypeContext extends ParserRuleContext { + public TypeContext type() { + return getRuleContext(TypeContext.class, 0); + } + + public List LBRACE() { + return getTokens(PainlessParser.LBRACE); + } + + public TerminalNode LBRACE(int i) { + return getToken(PainlessParser.LBRACE, i); + } + + public List RBRACE() { + return getTokens(PainlessParser.RBRACE); + } + + public TerminalNode RBRACE(int i) { + return getToken(PainlessParser.RBRACE, i); + } + + public DecltypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_decltype; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitDecltype(this); + else return visitor.visitChildren(this); + } + } + + public final DecltypeContext decltype() throws RecognitionException { + DecltypeContext _localctx = new DecltypeContext(_ctx, getState()); + enterRule(_localctx, 24, RULE_decltype); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(233); + type(); + setState(238); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 20, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + { + { + setState(234); + match(LBRACE); + setState(235); + match(RBRACE); + } + } + } + setState(240); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 20, _ctx); + } + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class TypeContext extends ParserRuleContext { + public TerminalNode DEF() { + return getToken(PainlessParser.DEF, 0); + } + + public TerminalNode PRIMITIVE() { + return getToken(PainlessParser.PRIMITIVE, 0); + } + + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public List DOT() { + return getTokens(PainlessParser.DOT); + } + + public TerminalNode DOT(int i) { + return getToken(PainlessParser.DOT, i); + } + + public List DOTID() { + return getTokens(PainlessParser.DOTID); + } + + public TerminalNode DOTID(int i) { + return getToken(PainlessParser.DOTID, i); + } + + public TypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_type; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitType(this); + else return visitor.visitChildren(this); + } + } + + public final TypeContext type() throws RecognitionException { + TypeContext _localctx = new TypeContext(_ctx, getState()); + enterRule(_localctx, 26, RULE_type); + try { + int _alt; + setState(251); + switch (_input.LA(1)) { + case DEF: + enterOuterAlt(_localctx, 1); { + setState(241); + match(DEF); + } + break; + case PRIMITIVE: + enterOuterAlt(_localctx, 2); { + setState(242); + match(PRIMITIVE); + } + break; + case ID: + enterOuterAlt(_localctx, 3); { + setState(243); + match(ID); + setState(248); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 21, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + { + { + setState(244); + match(DOT); + setState(245); + match(DOTID); + } + } + } + setState(250); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 21, _ctx); + } + } + break; + default: + throw new NoViableAltException(this); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class DeclvarContext extends ParserRuleContext { + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public TerminalNode ASSIGN() { + return getToken(PainlessParser.ASSIGN, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public DeclvarContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_declvar; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitDeclvar(this); + else return visitor.visitChildren(this); + } + } + + public final DeclvarContext declvar() throws RecognitionException { + DeclvarContext _localctx = new DeclvarContext(_ctx, getState()); + enterRule(_localctx, 28, RULE_declvar); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(253); + match(ID); + setState(256); + _la = _input.LA(1); + if (_la == ASSIGN) { + { + setState(254); + match(ASSIGN); + setState(255); + expression(); + } + } + + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class TrapContext extends ParserRuleContext { + public TerminalNode CATCH() { + return getToken(PainlessParser.CATCH, 0); + } + + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public TypeContext type() { + return getRuleContext(TypeContext.class, 0); + } + + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public BlockContext block() { + return getRuleContext(BlockContext.class, 0); + } + + public TrapContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_trap; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitTrap(this); + else return visitor.visitChildren(this); + } + } + + public final TrapContext trap() throws RecognitionException { + TrapContext _localctx = new TrapContext(_ctx, getState()); + enterRule(_localctx, 30, RULE_trap); + try { + enterOuterAlt(_localctx, 1); + { + setState(258); + match(CATCH); + setState(259); + match(LP); + setState(260); + type(); + setState(261); + match(ID); + setState(262); + match(RP); + setState(263); + block(); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class NoncondexpressionContext extends ParserRuleContext { + public NoncondexpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_noncondexpression; + } + + public NoncondexpressionContext() {} + + public void copyFrom(NoncondexpressionContext ctx) { + super.copyFrom(ctx); + } + } + + public static class SingleContext extends NoncondexpressionContext { + public UnaryContext unary() { + return getRuleContext(UnaryContext.class, 0); + } + + public SingleContext(NoncondexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitSingle(this); + else return visitor.visitChildren(this); + } + } + + public static class CompContext extends NoncondexpressionContext { + public List noncondexpression() { + return getRuleContexts(NoncondexpressionContext.class); + } + + public NoncondexpressionContext noncondexpression(int i) { + return getRuleContext(NoncondexpressionContext.class, i); + } + + public TerminalNode LT() { + return getToken(PainlessParser.LT, 0); + } + + public TerminalNode LTE() { + return getToken(PainlessParser.LTE, 0); + } + + public TerminalNode GT() { + return getToken(PainlessParser.GT, 0); + } + + public TerminalNode GTE() { + return getToken(PainlessParser.GTE, 0); + } + + public TerminalNode EQ() { + return getToken(PainlessParser.EQ, 0); + } + + public TerminalNode EQR() { + return getToken(PainlessParser.EQR, 0); + } + + public TerminalNode NE() { + return getToken(PainlessParser.NE, 0); + } + + public TerminalNode NER() { + return getToken(PainlessParser.NER, 0); + } + + public CompContext(NoncondexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitComp(this); + else return visitor.visitChildren(this); + } + } + + public static class BoolContext extends NoncondexpressionContext { + public List noncondexpression() { + return getRuleContexts(NoncondexpressionContext.class); + } + + public NoncondexpressionContext noncondexpression(int i) { + return getRuleContext(NoncondexpressionContext.class, i); + } + + public TerminalNode BOOLAND() { + return getToken(PainlessParser.BOOLAND, 0); + } + + public TerminalNode BOOLOR() { + return getToken(PainlessParser.BOOLOR, 0); + } + + public BoolContext(NoncondexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitBool(this); + else return visitor.visitChildren(this); + } + } + + public static class BinaryContext extends NoncondexpressionContext { + public List noncondexpression() { + return getRuleContexts(NoncondexpressionContext.class); + } + + public NoncondexpressionContext noncondexpression(int i) { + return getRuleContext(NoncondexpressionContext.class, i); + } + + public TerminalNode MUL() { + return getToken(PainlessParser.MUL, 0); + } + + public TerminalNode DIV() { + return getToken(PainlessParser.DIV, 0); + } + + public TerminalNode REM() { + return getToken(PainlessParser.REM, 0); + } + + public TerminalNode ADD() { + return getToken(PainlessParser.ADD, 0); + } + + public TerminalNode SUB() { + return getToken(PainlessParser.SUB, 0); + } + + public TerminalNode FIND() { + return getToken(PainlessParser.FIND, 0); + } + + public TerminalNode MATCH() { + return getToken(PainlessParser.MATCH, 0); + } + + public TerminalNode LSH() { + return getToken(PainlessParser.LSH, 0); + } + + public TerminalNode RSH() { + return getToken(PainlessParser.RSH, 0); + } + + public TerminalNode USH() { + return getToken(PainlessParser.USH, 0); + } + + public TerminalNode BWAND() { + return getToken(PainlessParser.BWAND, 0); + } + + public TerminalNode XOR() { + return getToken(PainlessParser.XOR, 0); + } + + public TerminalNode BWOR() { + return getToken(PainlessParser.BWOR, 0); + } + + public BinaryContext(NoncondexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitBinary(this); + else return visitor.visitChildren(this); + } + } + + public static class ElvisContext extends NoncondexpressionContext { + public List noncondexpression() { + return getRuleContexts(NoncondexpressionContext.class); + } + + public NoncondexpressionContext noncondexpression(int i) { + return getRuleContext(NoncondexpressionContext.class, i); + } + + public TerminalNode ELVIS() { + return getToken(PainlessParser.ELVIS, 0); + } + + public ElvisContext(NoncondexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitElvis(this); + else return visitor.visitChildren(this); + } + } + + public static class InstanceofContext extends NoncondexpressionContext { + public NoncondexpressionContext noncondexpression() { + return getRuleContext(NoncondexpressionContext.class, 0); + } + + public TerminalNode INSTANCEOF() { + return getToken(PainlessParser.INSTANCEOF, 0); + } + + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class, 0); + } + + public InstanceofContext(NoncondexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitInstanceof(this); + else return visitor.visitChildren(this); + } + } + + public final NoncondexpressionContext noncondexpression() throws RecognitionException { + return noncondexpression(0); + } + + private NoncondexpressionContext noncondexpression(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + NoncondexpressionContext _localctx = new NoncondexpressionContext(_ctx, _parentState); + NoncondexpressionContext _prevctx = _localctx; + int _startState = 32; + enterRecursionRule(_localctx, 32, RULE_noncondexpression, _p); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + { + _localctx = new SingleContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + + setState(266); + unary(); + } + _ctx.stop = _input.LT(-1); + setState(309); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 25, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + if (_parseListeners != null) triggerExitRuleEvent(); + _prevctx = _localctx; + { + setState(307); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 24, _ctx)) { + case 1: { + _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(268); + if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)"); + setState(269); + _la = _input.LA(1); + if (!((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0))) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(270); + noncondexpression(14); + } + break; + case 2: { + _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(271); + if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); + setState(272); + _la = _input.LA(1); + if (!(_la == ADD || _la == SUB)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(273); + noncondexpression(13); + } + break; + case 3: { + _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(274); + if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); + setState(275); + _la = _input.LA(1); + if (!(_la == FIND || _la == MATCH)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(276); + noncondexpression(12); + } + break; + case 4: { + _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(277); + if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); + setState(278); + _la = _input.LA(1); + if (!((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0))) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(279); + noncondexpression(11); + } + break; + case 5: { + _localctx = new CompContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(280); + if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); + setState(281); + _la = _input.LA(1); + if (!((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0))) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(282); + noncondexpression(10); + } + break; + case 6: { + _localctx = new CompContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(283); + if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); + setState(284); + _la = _input.LA(1); + if (!((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0))) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(285); + noncondexpression(8); + } + break; + case 7: { + _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(286); + if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); + setState(287); + match(BWAND); + setState(288); + noncondexpression(7); + } + break; + case 8: { + _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(289); + if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); + setState(290); + match(XOR); + setState(291); + noncondexpression(6); + } + break; + case 9: { + _localctx = new BinaryContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(292); + if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); + setState(293); + match(BWOR); + setState(294); + noncondexpression(5); + } + break; + case 10: { + _localctx = new BoolContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(295); + if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); + setState(296); + match(BOOLAND); + setState(297); + noncondexpression(4); + } + break; + case 11: { + _localctx = new BoolContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(298); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + setState(299); + match(BOOLOR); + setState(300); + noncondexpression(3); + } + break; + case 12: { + _localctx = new ElvisContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(301); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(302); + match(ELVIS); + setState(303); + noncondexpression(1); + } + break; + case 13: { + _localctx = new InstanceofContext(new NoncondexpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_noncondexpression); + setState(304); + if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); + setState(305); + match(INSTANCEOF); + setState(306); + decltype(); + } + break; + } + } + } + setState(311); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 25, _ctx); + } + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + unrollRecursionContexts(_parentctx); + } + return _localctx; + } + + public static class ExpressionContext extends ParserRuleContext { + public ExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_expression; + } + + public ExpressionContext() {} + + public void copyFrom(ExpressionContext ctx) { + super.copyFrom(ctx); + } + } + + public static class ConditionalContext extends ExpressionContext { + public NoncondexpressionContext noncondexpression() { + return getRuleContext(NoncondexpressionContext.class, 0); + } + + public TerminalNode COND() { + return getToken(PainlessParser.COND, 0); + } + + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class, i); + } + + public TerminalNode COLON() { + return getToken(PainlessParser.COLON, 0); + } + + public ConditionalContext(ExpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitConditional(this); + else return visitor.visitChildren(this); + } + } + + public static class AssignmentContext extends ExpressionContext { + public NoncondexpressionContext noncondexpression() { + return getRuleContext(NoncondexpressionContext.class, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public TerminalNode ASSIGN() { + return getToken(PainlessParser.ASSIGN, 0); + } + + public TerminalNode AADD() { + return getToken(PainlessParser.AADD, 0); + } + + public TerminalNode ASUB() { + return getToken(PainlessParser.ASUB, 0); + } + + public TerminalNode AMUL() { + return getToken(PainlessParser.AMUL, 0); + } + + public TerminalNode ADIV() { + return getToken(PainlessParser.ADIV, 0); + } + + public TerminalNode AREM() { + return getToken(PainlessParser.AREM, 0); + } + + public TerminalNode AAND() { + return getToken(PainlessParser.AAND, 0); + } + + public TerminalNode AXOR() { + return getToken(PainlessParser.AXOR, 0); + } + + public TerminalNode AOR() { + return getToken(PainlessParser.AOR, 0); + } + + public TerminalNode ALSH() { + return getToken(PainlessParser.ALSH, 0); + } + + public TerminalNode ARSH() { + return getToken(PainlessParser.ARSH, 0); + } + + public TerminalNode AUSH() { + return getToken(PainlessParser.AUSH, 0); + } + + public AssignmentContext(ExpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitAssignment(this); + else return visitor.visitChildren(this); + } + } + + public static class NonconditionalContext extends ExpressionContext { + public NoncondexpressionContext noncondexpression() { + return getRuleContext(NoncondexpressionContext.class, 0); + } + + public NonconditionalContext(ExpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNonconditional(this); + else return visitor.visitChildren(this); + } + } + + public final ExpressionContext expression() throws RecognitionException { + ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); + enterRule(_localctx, 34, RULE_expression); + int _la; + try { + setState(323); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 26, _ctx)) { + case 1: + _localctx = new NonconditionalContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(312); + noncondexpression(0); + } + break; + case 2: + _localctx = new ConditionalContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(313); + noncondexpression(0); + setState(314); + match(COND); + setState(315); + expression(); + setState(316); + match(COLON); + setState(317); + expression(); + } + break; + case 3: + _localctx = new AssignmentContext(_localctx); + enterOuterAlt(_localctx, 3); { + setState(319); + noncondexpression(0); + setState(320); + _la = _input.LA(1); + if (!(((((_la - 60)) & ~0x3f) == 0 + && ((1L << (_la - 60)) & ((1L << (ASSIGN - 60)) | (1L << (AADD - 60)) | (1L << (ASUB - 60)) | (1L << (AMUL - 60)) + | (1L << (ADIV - 60)) | (1L << (AREM - 60)) | (1L << (AAND - 60)) | (1L << (AXOR - 60)) | (1L << (AOR - 60)) + | (1L << (ALSH - 60)) | (1L << (ARSH - 60)) | (1L << (AUSH - 60)))) != 0))) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(321); + expression(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class UnaryContext extends ParserRuleContext { + public UnaryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_unary; + } + + public UnaryContext() {} + + public void copyFrom(UnaryContext ctx) { + super.copyFrom(ctx); + } + } + + public static class NotaddsubContext extends UnaryContext { + public UnarynotaddsubContext unarynotaddsub() { + return getRuleContext(UnarynotaddsubContext.class, 0); + } + + public NotaddsubContext(UnaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNotaddsub(this); + else return visitor.visitChildren(this); + } + } + + public static class PreContext extends UnaryContext { + public ChainContext chain() { + return getRuleContext(ChainContext.class, 0); + } + + public TerminalNode INCR() { + return getToken(PainlessParser.INCR, 0); + } + + public TerminalNode DECR() { + return getToken(PainlessParser.DECR, 0); + } + + public PreContext(UnaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitPre(this); + else return visitor.visitChildren(this); + } + } + + public static class AddsubContext extends UnaryContext { + public UnaryContext unary() { + return getRuleContext(UnaryContext.class, 0); + } + + public TerminalNode ADD() { + return getToken(PainlessParser.ADD, 0); + } + + public TerminalNode SUB() { + return getToken(PainlessParser.SUB, 0); + } + + public AddsubContext(UnaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitAddsub(this); + else return visitor.visitChildren(this); + } + } + + public final UnaryContext unary() throws RecognitionException { + UnaryContext _localctx = new UnaryContext(_ctx, getState()); + enterRule(_localctx, 36, RULE_unary); + int _la; + try { + setState(330); + switch (_input.LA(1)) { + case INCR: + case DECR: + _localctx = new PreContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(325); + _la = _input.LA(1); + if (!(_la == INCR || _la == DECR)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(326); + chain(); + } + break; + case ADD: + case SUB: + _localctx = new AddsubContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(327); + _la = _input.LA(1); + if (!(_la == ADD || _la == SUB)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(328); + unary(); + } + break; + case LBRACE: + case LP: + case NEW: + case BOOLNOT: + case BWNOT: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case ID: + _localctx = new NotaddsubContext(_localctx); + enterOuterAlt(_localctx, 3); { + setState(329); + unarynotaddsub(); + } + break; + default: + throw new NoViableAltException(this); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class UnarynotaddsubContext extends ParserRuleContext { + public UnarynotaddsubContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_unarynotaddsub; + } + + public UnarynotaddsubContext() {} + + public void copyFrom(UnarynotaddsubContext ctx) { + super.copyFrom(ctx); + } + } + + public static class CastContext extends UnarynotaddsubContext { + public CastexpressionContext castexpression() { + return getRuleContext(CastexpressionContext.class, 0); + } + + public CastContext(UnarynotaddsubContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitCast(this); + else return visitor.visitChildren(this); + } + } + + public static class NotContext extends UnarynotaddsubContext { + public UnaryContext unary() { + return getRuleContext(UnaryContext.class, 0); + } + + public TerminalNode BOOLNOT() { + return getToken(PainlessParser.BOOLNOT, 0); + } + + public TerminalNode BWNOT() { + return getToken(PainlessParser.BWNOT, 0); + } + + public NotContext(UnarynotaddsubContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNot(this); + else return visitor.visitChildren(this); + } + } + + public static class ReadContext extends UnarynotaddsubContext { + public ChainContext chain() { + return getRuleContext(ChainContext.class, 0); + } + + public ReadContext(UnarynotaddsubContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitRead(this); + else return visitor.visitChildren(this); + } + } + + public static class PostContext extends UnarynotaddsubContext { + public ChainContext chain() { + return getRuleContext(ChainContext.class, 0); + } + + public TerminalNode INCR() { + return getToken(PainlessParser.INCR, 0); + } + + public TerminalNode DECR() { + return getToken(PainlessParser.DECR, 0); + } + + public PostContext(UnarynotaddsubContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitPost(this); + else return visitor.visitChildren(this); + } + } + + public final UnarynotaddsubContext unarynotaddsub() throws RecognitionException { + UnarynotaddsubContext _localctx = new UnarynotaddsubContext(_ctx, getState()); + enterRule(_localctx, 38, RULE_unarynotaddsub); + int _la; + try { + setState(339); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 28, _ctx)) { + case 1: + _localctx = new ReadContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(332); + chain(); + } + break; + case 2: + _localctx = new PostContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(333); + chain(); + setState(334); + _la = _input.LA(1); + if (!(_la == INCR || _la == DECR)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + break; + case 3: + _localctx = new NotContext(_localctx); + enterOuterAlt(_localctx, 3); { + setState(336); + _la = _input.LA(1); + if (!(_la == BOOLNOT || _la == BWNOT)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(337); + unary(); + } + break; + case 4: + _localctx = new CastContext(_localctx); + enterOuterAlt(_localctx, 4); { + setState(338); + castexpression(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class CastexpressionContext extends ParserRuleContext { + public CastexpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_castexpression; + } + + public CastexpressionContext() {} + + public void copyFrom(CastexpressionContext ctx) { + super.copyFrom(ctx); + } + } + + public static class RefcastContext extends CastexpressionContext { + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public RefcasttypeContext refcasttype() { + return getRuleContext(RefcasttypeContext.class, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public UnarynotaddsubContext unarynotaddsub() { + return getRuleContext(UnarynotaddsubContext.class, 0); + } + + public RefcastContext(CastexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitRefcast(this); + else return visitor.visitChildren(this); + } + } + + public static class PrimordefcastContext extends CastexpressionContext { + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public PrimordefcasttypeContext primordefcasttype() { + return getRuleContext(PrimordefcasttypeContext.class, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public UnaryContext unary() { + return getRuleContext(UnaryContext.class, 0); + } + + public PrimordefcastContext(CastexpressionContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitPrimordefcast(this); + else return visitor.visitChildren(this); + } + } + + public final CastexpressionContext castexpression() throws RecognitionException { + CastexpressionContext _localctx = new CastexpressionContext(_ctx, getState()); + enterRule(_localctx, 40, RULE_castexpression); + try { + setState(351); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 29, _ctx)) { + case 1: + _localctx = new PrimordefcastContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(341); + match(LP); + setState(342); + primordefcasttype(); + setState(343); + match(RP); + setState(344); + unary(); + } + break; + case 2: + _localctx = new RefcastContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(346); + match(LP); + setState(347); + refcasttype(); + setState(348); + match(RP); + setState(349); + unarynotaddsub(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class PrimordefcasttypeContext extends ParserRuleContext { + public TerminalNode DEF() { + return getToken(PainlessParser.DEF, 0); + } + + public TerminalNode PRIMITIVE() { + return getToken(PainlessParser.PRIMITIVE, 0); + } + + public PrimordefcasttypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_primordefcasttype; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitPrimordefcasttype( + this + ); + else return visitor.visitChildren(this); + } + } + + public final PrimordefcasttypeContext primordefcasttype() throws RecognitionException { + PrimordefcasttypeContext _localctx = new PrimordefcasttypeContext(_ctx, getState()); + enterRule(_localctx, 42, RULE_primordefcasttype); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(353); + _la = _input.LA(1); + if (!(_la == PRIMITIVE || _la == DEF)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class RefcasttypeContext extends ParserRuleContext { + public TerminalNode DEF() { + return getToken(PainlessParser.DEF, 0); + } + + public List LBRACE() { + return getTokens(PainlessParser.LBRACE); + } + + public TerminalNode LBRACE(int i) { + return getToken(PainlessParser.LBRACE, i); + } + + public List RBRACE() { + return getTokens(PainlessParser.RBRACE); + } + + public TerminalNode RBRACE(int i) { + return getToken(PainlessParser.RBRACE, i); + } + + public TerminalNode PRIMITIVE() { + return getToken(PainlessParser.PRIMITIVE, 0); + } + + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public List DOT() { + return getTokens(PainlessParser.DOT); + } + + public TerminalNode DOT(int i) { + return getToken(PainlessParser.DOT, i); + } + + public List DOTID() { + return getTokens(PainlessParser.DOTID); + } + + public TerminalNode DOTID(int i) { + return getToken(PainlessParser.DOTID, i); + } + + public RefcasttypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_refcasttype; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitRefcasttype(this); + else return visitor.visitChildren(this); + } + } + + public final RefcasttypeContext refcasttype() throws RecognitionException { + RefcasttypeContext _localctx = new RefcasttypeContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_refcasttype); + int _la; + try { + setState(384); + switch (_input.LA(1)) { + case DEF: + enterOuterAlt(_localctx, 1); { + setState(355); + match(DEF); + setState(358); + _errHandler.sync(this); + _la = _input.LA(1); + do { + { + { + setState(356); + match(LBRACE); + setState(357); + match(RBRACE); + } + } + setState(360); + _errHandler.sync(this); + _la = _input.LA(1); + } while (_la == LBRACE); + } + break; + case PRIMITIVE: + enterOuterAlt(_localctx, 2); { + setState(362); + match(PRIMITIVE); + setState(365); + _errHandler.sync(this); + _la = _input.LA(1); + do { + { + { + setState(363); + match(LBRACE); + setState(364); + match(RBRACE); + } + } + setState(367); + _errHandler.sync(this); + _la = _input.LA(1); + } while (_la == LBRACE); + } + break; + case ID: + enterOuterAlt(_localctx, 3); { + setState(369); + match(ID); + setState(374); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == DOT) { + { + { + setState(370); + match(DOT); + setState(371); + match(DOTID); + } + } + setState(376); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(381); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == LBRACE) { + { + { + setState(377); + match(LBRACE); + setState(378); + match(RBRACE); + } + } + setState(383); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + break; + default: + throw new NoViableAltException(this); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class ChainContext extends ParserRuleContext { + public ChainContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_chain; + } + + public ChainContext() {} + + public void copyFrom(ChainContext ctx) { + super.copyFrom(ctx); + } + } + + public static class DynamicContext extends ChainContext { + public PrimaryContext primary() { + return getRuleContext(PrimaryContext.class, 0); + } + + public List postfix() { + return getRuleContexts(PostfixContext.class); + } + + public PostfixContext postfix(int i) { + return getRuleContext(PostfixContext.class, i); + } + + public DynamicContext(ChainContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitDynamic(this); + else return visitor.visitChildren(this); + } + } + + public static class NewarrayContext extends ChainContext { + public ArrayinitializerContext arrayinitializer() { + return getRuleContext(ArrayinitializerContext.class, 0); + } + + public NewarrayContext(ChainContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNewarray(this); + else return visitor.visitChildren(this); + } + } + + public final ChainContext chain() throws RecognitionException { + ChainContext _localctx = new ChainContext(_ctx, getState()); + enterRule(_localctx, 46, RULE_chain); + try { + int _alt; + setState(394); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 36, _ctx)) { + case 1: + _localctx = new DynamicContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(386); + primary(); + setState(390); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 35, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + { + { + setState(387); + postfix(); + } + } + } + setState(392); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 35, _ctx); + } + } + break; + case 2: + _localctx = new NewarrayContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(393); + arrayinitializer(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class PrimaryContext extends ParserRuleContext { + public PrimaryContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_primary; + } + + public PrimaryContext() {} + + public void copyFrom(PrimaryContext ctx) { + super.copyFrom(ctx); + } + } + + public static class ListinitContext extends PrimaryContext { + public ListinitializerContext listinitializer() { + return getRuleContext(ListinitializerContext.class, 0); + } + + public ListinitContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitListinit(this); + else return visitor.visitChildren(this); + } + } + + public static class RegexContext extends PrimaryContext { + public TerminalNode REGEX() { + return getToken(PainlessParser.REGEX, 0); + } + + public RegexContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitRegex(this); + else return visitor.visitChildren(this); + } + } + + public static class NullContext extends PrimaryContext { + public TerminalNode NULL() { + return getToken(PainlessParser.NULL, 0); + } + + public NullContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNull(this); + else return visitor.visitChildren(this); + } + } + + public static class StringContext extends PrimaryContext { + public TerminalNode STRING() { + return getToken(PainlessParser.STRING, 0); + } + + public StringContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitString(this); + else return visitor.visitChildren(this); + } + } + + public static class MapinitContext extends PrimaryContext { + public MapinitializerContext mapinitializer() { + return getRuleContext(MapinitializerContext.class, 0); + } + + public MapinitContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitMapinit(this); + else return visitor.visitChildren(this); + } + } + + public static class CalllocalContext extends PrimaryContext { + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public ArgumentsContext arguments() { + return getRuleContext(ArgumentsContext.class, 0); + } + + public CalllocalContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitCalllocal(this); + else return visitor.visitChildren(this); + } + } + + public static class TrueContext extends PrimaryContext { + public TerminalNode TRUE() { + return getToken(PainlessParser.TRUE, 0); + } + + public TrueContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitTrue(this); + else return visitor.visitChildren(this); + } + } + + public static class FalseContext extends PrimaryContext { + public TerminalNode FALSE() { + return getToken(PainlessParser.FALSE, 0); + } + + public FalseContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitFalse(this); + else return visitor.visitChildren(this); + } + } + + public static class VariableContext extends PrimaryContext { + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public VariableContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitVariable(this); + else return visitor.visitChildren(this); + } + } + + public static class NumericContext extends PrimaryContext { + public TerminalNode OCTAL() { + return getToken(PainlessParser.OCTAL, 0); + } + + public TerminalNode HEX() { + return getToken(PainlessParser.HEX, 0); + } + + public TerminalNode INTEGER() { + return getToken(PainlessParser.INTEGER, 0); + } + + public TerminalNode DECIMAL() { + return getToken(PainlessParser.DECIMAL, 0); + } + + public NumericContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNumeric(this); + else return visitor.visitChildren(this); + } + } + + public static class NewobjectContext extends PrimaryContext { + public TerminalNode NEW() { + return getToken(PainlessParser.NEW, 0); + } + + public TypeContext type() { + return getRuleContext(TypeContext.class, 0); + } + + public ArgumentsContext arguments() { + return getRuleContext(ArgumentsContext.class, 0); + } + + public NewobjectContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNewobject(this); + else return visitor.visitChildren(this); + } + } + + public static class PrecedenceContext extends PrimaryContext { + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public PrecedenceContext(PrimaryContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitPrecedence(this); + else return visitor.visitChildren(this); + } + } + + public final PrimaryContext primary() throws RecognitionException { + PrimaryContext _localctx = new PrimaryContext(_ctx, getState()); + enterRule(_localctx, 48, RULE_primary); + int _la; + try { + setState(415); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 37, _ctx)) { + case 1: + _localctx = new PrecedenceContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(396); + match(LP); + setState(397); + expression(); + setState(398); + match(RP); + } + break; + case 2: + _localctx = new NumericContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(400); + _la = _input.LA(1); + if (!(((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)))) != 0))) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + break; + case 3: + _localctx = new TrueContext(_localctx); + enterOuterAlt(_localctx, 3); { + setState(401); + match(TRUE); + } + break; + case 4: + _localctx = new FalseContext(_localctx); + enterOuterAlt(_localctx, 4); { + setState(402); + match(FALSE); + } + break; + case 5: + _localctx = new NullContext(_localctx); + enterOuterAlt(_localctx, 5); { + setState(403); + match(NULL); + } + break; + case 6: + _localctx = new StringContext(_localctx); + enterOuterAlt(_localctx, 6); { + setState(404); + match(STRING); + } + break; + case 7: + _localctx = new RegexContext(_localctx); + enterOuterAlt(_localctx, 7); { + setState(405); + match(REGEX); + } + break; + case 8: + _localctx = new ListinitContext(_localctx); + enterOuterAlt(_localctx, 8); { + setState(406); + listinitializer(); + } + break; + case 9: + _localctx = new MapinitContext(_localctx); + enterOuterAlt(_localctx, 9); { + setState(407); + mapinitializer(); + } + break; + case 10: + _localctx = new VariableContext(_localctx); + enterOuterAlt(_localctx, 10); { + setState(408); + match(ID); + } + break; + case 11: + _localctx = new CalllocalContext(_localctx); + enterOuterAlt(_localctx, 11); { + setState(409); + match(ID); + setState(410); + arguments(); + } + break; + case 12: + _localctx = new NewobjectContext(_localctx); + enterOuterAlt(_localctx, 12); { + setState(411); + match(NEW); + setState(412); + type(); + setState(413); + arguments(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class PostfixContext extends ParserRuleContext { + public CallinvokeContext callinvoke() { + return getRuleContext(CallinvokeContext.class, 0); + } + + public FieldaccessContext fieldaccess() { + return getRuleContext(FieldaccessContext.class, 0); + } + + public BraceaccessContext braceaccess() { + return getRuleContext(BraceaccessContext.class, 0); + } + + public PostfixContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_postfix; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitPostfix(this); + else return visitor.visitChildren(this); + } + } + + public final PostfixContext postfix() throws RecognitionException { + PostfixContext _localctx = new PostfixContext(_ctx, getState()); + enterRule(_localctx, 50, RULE_postfix); + try { + setState(420); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 38, _ctx)) { + case 1: + enterOuterAlt(_localctx, 1); { + setState(417); + callinvoke(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); { + setState(418); + fieldaccess(); + } + break; + case 3: + enterOuterAlt(_localctx, 3); { + setState(419); + braceaccess(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class PostdotContext extends ParserRuleContext { + public CallinvokeContext callinvoke() { + return getRuleContext(CallinvokeContext.class, 0); + } + + public FieldaccessContext fieldaccess() { + return getRuleContext(FieldaccessContext.class, 0); + } + + public PostdotContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_postdot; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitPostdot(this); + else return visitor.visitChildren(this); + } + } + + public final PostdotContext postdot() throws RecognitionException { + PostdotContext _localctx = new PostdotContext(_ctx, getState()); + enterRule(_localctx, 52, RULE_postdot); + try { + setState(424); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 39, _ctx)) { + case 1: + enterOuterAlt(_localctx, 1); { + setState(422); + callinvoke(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); { + setState(423); + fieldaccess(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class CallinvokeContext extends ParserRuleContext { + public TerminalNode DOTID() { + return getToken(PainlessParser.DOTID, 0); + } + + public ArgumentsContext arguments() { + return getRuleContext(ArgumentsContext.class, 0); + } + + public TerminalNode DOT() { + return getToken(PainlessParser.DOT, 0); + } + + public TerminalNode NSDOT() { + return getToken(PainlessParser.NSDOT, 0); + } + + public CallinvokeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_callinvoke; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitCallinvoke(this); + else return visitor.visitChildren(this); + } + } + + public final CallinvokeContext callinvoke() throws RecognitionException { + CallinvokeContext _localctx = new CallinvokeContext(_ctx, getState()); + enterRule(_localctx, 54, RULE_callinvoke); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(426); + _la = _input.LA(1); + if (!(_la == DOT || _la == NSDOT)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(427); + match(DOTID); + setState(428); + arguments(); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class FieldaccessContext extends ParserRuleContext { + public TerminalNode DOT() { + return getToken(PainlessParser.DOT, 0); + } + + public TerminalNode NSDOT() { + return getToken(PainlessParser.NSDOT, 0); + } + + public TerminalNode DOTID() { + return getToken(PainlessParser.DOTID, 0); + } + + public TerminalNode DOTINTEGER() { + return getToken(PainlessParser.DOTINTEGER, 0); + } + + public FieldaccessContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_fieldaccess; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitFieldaccess(this); + else return visitor.visitChildren(this); + } + } + + public final FieldaccessContext fieldaccess() throws RecognitionException { + FieldaccessContext _localctx = new FieldaccessContext(_ctx, getState()); + enterRule(_localctx, 56, RULE_fieldaccess); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(430); + _la = _input.LA(1); + if (!(_la == DOT || _la == NSDOT)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(431); + _la = _input.LA(1); + if (!(_la == DOTINTEGER || _la == DOTID)) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class BraceaccessContext extends ParserRuleContext { + public TerminalNode LBRACE() { + return getToken(PainlessParser.LBRACE, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public TerminalNode RBRACE() { + return getToken(PainlessParser.RBRACE, 0); + } + + public BraceaccessContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_braceaccess; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitBraceaccess(this); + else return visitor.visitChildren(this); + } + } + + public final BraceaccessContext braceaccess() throws RecognitionException { + BraceaccessContext _localctx = new BraceaccessContext(_ctx, getState()); + enterRule(_localctx, 58, RULE_braceaccess); + try { + enterOuterAlt(_localctx, 1); + { + setState(433); + match(LBRACE); + setState(434); + expression(); + setState(435); + match(RBRACE); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class ArrayinitializerContext extends ParserRuleContext { + public ArrayinitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_arrayinitializer; + } + + public ArrayinitializerContext() {} + + public void copyFrom(ArrayinitializerContext ctx) { + super.copyFrom(ctx); + } + } + + public static class NewstandardarrayContext extends ArrayinitializerContext { + public TerminalNode NEW() { + return getToken(PainlessParser.NEW, 0); + } + + public TypeContext type() { + return getRuleContext(TypeContext.class, 0); + } + + public List LBRACE() { + return getTokens(PainlessParser.LBRACE); + } + + public TerminalNode LBRACE(int i) { + return getToken(PainlessParser.LBRACE, i); + } + + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class, i); + } + + public List RBRACE() { + return getTokens(PainlessParser.RBRACE); + } + + public TerminalNode RBRACE(int i) { + return getToken(PainlessParser.RBRACE, i); + } + + public PostdotContext postdot() { + return getRuleContext(PostdotContext.class, 0); + } + + public List postfix() { + return getRuleContexts(PostfixContext.class); + } + + public PostfixContext postfix(int i) { + return getRuleContext(PostfixContext.class, i); + } + + public NewstandardarrayContext(ArrayinitializerContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNewstandardarray(this); + else return visitor.visitChildren(this); + } + } + + public static class NewinitializedarrayContext extends ArrayinitializerContext { + public TerminalNode NEW() { + return getToken(PainlessParser.NEW, 0); + } + + public TypeContext type() { + return getRuleContext(TypeContext.class, 0); + } + + public TerminalNode LBRACE() { + return getToken(PainlessParser.LBRACE, 0); + } + + public TerminalNode RBRACE() { + return getToken(PainlessParser.RBRACE, 0); + } + + public TerminalNode LBRACK() { + return getToken(PainlessParser.LBRACK, 0); + } + + public TerminalNode RBRACK() { + return getToken(PainlessParser.RBRACK, 0); + } + + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class, i); + } + + public List postfix() { + return getRuleContexts(PostfixContext.class); + } + + public PostfixContext postfix(int i) { + return getRuleContext(PostfixContext.class, i); + } + + public List COMMA() { + return getTokens(PainlessParser.COMMA); + } + + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + + public NewinitializedarrayContext(ArrayinitializerContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitNewinitializedarray( + this + ); + else return visitor.visitChildren(this); + } + } + + public final ArrayinitializerContext arrayinitializer() throws RecognitionException { + ArrayinitializerContext _localctx = new ArrayinitializerContext(_ctx, getState()); + enterRule(_localctx, 60, RULE_arrayinitializer); + int _la; + try { + int _alt; + setState(478); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 46, _ctx)) { + case 1: + _localctx = new NewstandardarrayContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(437); + match(NEW); + setState(438); + type(); + setState(443); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: { + { + setState(439); + match(LBRACE); + setState(440); + expression(); + setState(441); + match(RBRACE); + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(445); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 40, _ctx); + } while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER); + setState(454); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 42, _ctx)) { + case 1: { + setState(447); + postdot(); + setState(451); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 41, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + { + { + setState(448); + postfix(); + } + } + } + setState(453); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 41, _ctx); + } + } + break; + } + } + break; + case 2: + _localctx = new NewinitializedarrayContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(456); + match(NEW); + setState(457); + type(); + setState(458); + match(LBRACE); + setState(459); + match(RBRACE); + setState(460); + match(LBRACK); + setState(469); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L + << SUB) | (1L << INCR) | (1L << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (ID - 72)))) != 0)) { + { + setState(461); + expression(); + setState(466); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == COMMA) { + { + { + setState(462); + match(COMMA); + setState(463); + expression(); + } + } + setState(468); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + + setState(471); + match(RBRACK); + setState(475); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 45, _ctx); + while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { + if (_alt == 1) { + { + { + setState(472); + postfix(); + } + } + } + setState(477); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input, 45, _ctx); + } + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class ListinitializerContext extends ParserRuleContext { + public TerminalNode LBRACE() { + return getToken(PainlessParser.LBRACE, 0); + } + + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class, i); + } + + public TerminalNode RBRACE() { + return getToken(PainlessParser.RBRACE, 0); + } + + public List COMMA() { + return getTokens(PainlessParser.COMMA); + } + + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + + public ListinitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_listinitializer; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitListinitializer(this); + else return visitor.visitChildren(this); + } + } + + public final ListinitializerContext listinitializer() throws RecognitionException { + ListinitializerContext _localctx = new ListinitializerContext(_ctx, getState()); + enterRule(_localctx, 62, RULE_listinitializer); + int _la; + try { + setState(493); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 48, _ctx)) { + case 1: + enterOuterAlt(_localctx, 1); { + setState(480); + match(LBRACE); + setState(481); + expression(); + setState(486); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == COMMA) { + { + { + setState(482); + match(COMMA); + setState(483); + expression(); + } + } + setState(488); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(489); + match(RBRACE); + } + break; + case 2: + enterOuterAlt(_localctx, 2); { + setState(491); + match(LBRACE); + setState(492); + match(RBRACE); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class MapinitializerContext extends ParserRuleContext { + public TerminalNode LBRACE() { + return getToken(PainlessParser.LBRACE, 0); + } + + public List maptoken() { + return getRuleContexts(MaptokenContext.class); + } + + public MaptokenContext maptoken(int i) { + return getRuleContext(MaptokenContext.class, i); + } + + public TerminalNode RBRACE() { + return getToken(PainlessParser.RBRACE, 0); + } + + public List COMMA() { + return getTokens(PainlessParser.COMMA); + } + + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + + public TerminalNode COLON() { + return getToken(PainlessParser.COLON, 0); + } + + public MapinitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_mapinitializer; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitMapinitializer(this); + else return visitor.visitChildren(this); + } + } + + public final MapinitializerContext mapinitializer() throws RecognitionException { + MapinitializerContext _localctx = new MapinitializerContext(_ctx, getState()); + enterRule(_localctx, 64, RULE_mapinitializer); + int _la; + try { + setState(509); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 50, _ctx)) { + case 1: + enterOuterAlt(_localctx, 1); { + setState(495); + match(LBRACE); + setState(496); + maptoken(); + setState(501); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == COMMA) { + { + { + setState(497); + match(COMMA); + setState(498); + maptoken(); + } + } + setState(503); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(504); + match(RBRACE); + } + break; + case 2: + enterOuterAlt(_localctx, 2); { + setState(506); + match(LBRACE); + setState(507); + match(COLON); + setState(508); + match(RBRACE); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class MaptokenContext extends ParserRuleContext { + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class, i); + } + + public TerminalNode COLON() { + return getToken(PainlessParser.COLON, 0); + } + + public MaptokenContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_maptoken; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitMaptoken(this); + else return visitor.visitChildren(this); + } + } + + public final MaptokenContext maptoken() throws RecognitionException { + MaptokenContext _localctx = new MaptokenContext(_ctx, getState()); + enterRule(_localctx, 66, RULE_maptoken); + try { + enterOuterAlt(_localctx, 1); + { + setState(511); + expression(); + setState(512); + match(COLON); + setState(513); + expression(); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class ArgumentsContext extends ParserRuleContext { + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public List argument() { + return getRuleContexts(ArgumentContext.class); + } + + public ArgumentContext argument(int i) { + return getRuleContext(ArgumentContext.class, i); + } + + public List COMMA() { + return getTokens(PainlessParser.COMMA); + } + + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + + public ArgumentsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_arguments; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitArguments(this); + else return visitor.visitChildren(this); + } + } + + public final ArgumentsContext arguments() throws RecognitionException { + ArgumentsContext _localctx = new ArgumentsContext(_ctx, getState()); + enterRule(_localctx, 68, RULE_arguments); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + { + setState(515); + match(LP); + setState(524); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 + && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << THIS) | (1L << BOOLNOT) | (1L << BWNOT) | (1L + << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) + || ((((_la - 72)) & ~0x3f) == 0 + && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL + - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L + << (NULL - 72)) | (1L << (PRIMITIVE - 72)) | (1L << (DEF - 72)) | (1L << (ID - 72)))) != 0)) { + { + setState(516); + argument(); + setState(521); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == COMMA) { + { + { + setState(517); + match(COMMA); + setState(518); + argument(); + } + } + setState(523); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + + setState(526); + match(RP); + } + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class ArgumentContext extends ParserRuleContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public LambdaContext lambda() { + return getRuleContext(LambdaContext.class, 0); + } + + public FuncrefContext funcref() { + return getRuleContext(FuncrefContext.class, 0); + } + + public ArgumentContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_argument; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitArgument(this); + else return visitor.visitChildren(this); + } + } + + public final ArgumentContext argument() throws RecognitionException { + ArgumentContext _localctx = new ArgumentContext(_ctx, getState()); + enterRule(_localctx, 70, RULE_argument); + try { + setState(531); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 53, _ctx)) { + case 1: + enterOuterAlt(_localctx, 1); { + setState(528); + expression(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); { + setState(529); + lambda(); + } + break; + case 3: + enterOuterAlt(_localctx, 3); { + setState(530); + funcref(); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class LambdaContext extends ParserRuleContext { + public TerminalNode ARROW() { + return getToken(PainlessParser.ARROW, 0); + } + + public List lamtype() { + return getRuleContexts(LamtypeContext.class); + } + + public LamtypeContext lamtype(int i) { + return getRuleContext(LamtypeContext.class, i); + } + + public TerminalNode LP() { + return getToken(PainlessParser.LP, 0); + } + + public TerminalNode RP() { + return getToken(PainlessParser.RP, 0); + } + + public BlockContext block() { + return getRuleContext(BlockContext.class, 0); + } + + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class, 0); + } + + public List COMMA() { + return getTokens(PainlessParser.COMMA); + } + + public TerminalNode COMMA(int i) { + return getToken(PainlessParser.COMMA, i); + } + + public LambdaContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_lambda; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitLambda(this); + else return visitor.visitChildren(this); + } + } + + public final LambdaContext lambda() throws RecognitionException { + LambdaContext _localctx = new LambdaContext(_ctx, getState()); + enterRule(_localctx, 72, RULE_lambda); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(546); + switch (_input.LA(1)) { + case PRIMITIVE: + case DEF: + case ID: { + setState(533); + lamtype(); + } + break; + case LP: { + setState(534); + match(LP); + setState(543); + _la = _input.LA(1); + if (((((_la - 81)) & ~0x3f) == 0 + && ((1L << (_la - 81)) & ((1L << (PRIMITIVE - 81)) | (1L << (DEF - 81)) | (1L << (ID - 81)))) != 0)) { + { + setState(535); + lamtype(); + setState(540); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la == COMMA) { + { + { + setState(536); + match(COMMA); + setState(537); + lamtype(); + } + } + setState(542); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + + setState(545); + match(RP); + } + break; + default: + throw new NoViableAltException(this); + } + setState(548); + match(ARROW); + setState(551); + switch (_input.LA(1)) { + case LBRACK: { + setState(549); + block(); + } + break; + case LBRACE: + case LP: + case NEW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case REGEX: + case TRUE: + case FALSE: + case NULL: + case ID: { + setState(550); + expression(); + } + break; + default: + throw new NoViableAltException(this); + } + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class LamtypeContext extends ParserRuleContext { + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class, 0); + } + + public LamtypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_lamtype; + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitLamtype(this); + else return visitor.visitChildren(this); + } + } + + public final LamtypeContext lamtype() throws RecognitionException { + LamtypeContext _localctx = new LamtypeContext(_ctx, getState()); + enterRule(_localctx, 74, RULE_lamtype); + try { + enterOuterAlt(_localctx, 1); + { + setState(554); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 58, _ctx)) { + case 1: { + setState(553); + decltype(); + } + break; + } + setState(556); + match(ID); + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public static class FuncrefContext extends ParserRuleContext { + public FuncrefContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + + @Override + public int getRuleIndex() { + return RULE_funcref; + } + + public FuncrefContext() {} + + public void copyFrom(FuncrefContext ctx) { + super.copyFrom(ctx); + } + } + + public static class ClassfuncrefContext extends FuncrefContext { + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class, 0); + } + + public TerminalNode REF() { + return getToken(PainlessParser.REF, 0); + } + + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public ClassfuncrefContext(FuncrefContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitClassfuncref(this); + else return visitor.visitChildren(this); + } + } + + public static class ConstructorfuncrefContext extends FuncrefContext { + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class, 0); + } + + public TerminalNode REF() { + return getToken(PainlessParser.REF, 0); + } + + public TerminalNode NEW() { + return getToken(PainlessParser.NEW, 0); + } + + public ConstructorfuncrefContext(FuncrefContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitConstructorfuncref( + this + ); + else return visitor.visitChildren(this); + } + } + + public static class LocalfuncrefContext extends FuncrefContext { + public TerminalNode THIS() { + return getToken(PainlessParser.THIS, 0); + } + + public TerminalNode REF() { + return getToken(PainlessParser.REF, 0); + } + + public TerminalNode ID() { + return getToken(PainlessParser.ID, 0); + } + + public LocalfuncrefContext(FuncrefContext ctx) { + copyFrom(ctx); + } + + @Override + public T accept(ParseTreeVisitor visitor) { + if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor) visitor).visitLocalfuncref(this); + else return visitor.visitChildren(this); + } + } + + public final FuncrefContext funcref() throws RecognitionException { + FuncrefContext _localctx = new FuncrefContext(_ctx, getState()); + enterRule(_localctx, 76, RULE_funcref); + try { + setState(569); + _errHandler.sync(this); + switch (getInterpreter().adaptivePredict(_input, 59, _ctx)) { + case 1: + _localctx = new ClassfuncrefContext(_localctx); + enterOuterAlt(_localctx, 1); { + setState(558); + decltype(); + setState(559); + match(REF); + setState(560); + match(ID); + } + break; + case 2: + _localctx = new ConstructorfuncrefContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(562); + decltype(); + setState(563); + match(REF); + setState(564); + match(NEW); + } + break; + case 3: + _localctx = new LocalfuncrefContext(_localctx); + enterOuterAlt(_localctx, 3); { + setState(566); + match(THIS); + setState(567); + match(REF); + setState(568); + match(ID); + } + break; + } + } catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } finally { + exitRule(); + } + return _localctx; + } + + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { + switch (ruleIndex) { + case 4: + return rstatement_sempred((RstatementContext) _localctx, predIndex); + case 16: + return noncondexpression_sempred((NoncondexpressionContext) _localctx, predIndex); + } + return true; + } + + private boolean rstatement_sempred(RstatementContext _localctx, int predIndex) { + switch (predIndex) { + case 0: + return _input.LA(1) != ELSE; + } + return true; + } + + private boolean noncondexpression_sempred(NoncondexpressionContext _localctx, int predIndex) { + switch (predIndex) { + case 1: + return precpred(_ctx, 13); + case 2: + return precpred(_ctx, 12); + case 3: + return precpred(_ctx, 11); + case 4: + return precpred(_ctx, 10); + case 5: + return precpred(_ctx, 9); + case 6: + return precpred(_ctx, 7); + case 7: + return precpred(_ctx, 6); + case 8: + return precpred(_ctx, 5); + case 9: + return precpred(_ctx, 4); + case 10: + return precpred(_ctx, 3); + case 11: + return precpred(_ctx, 2); + case 12: + return precpred(_ctx, 1); + case 13: + return precpred(_ctx, 8); + } + return true; + } + + public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3W\u023e\4\2\t\2\4" + + "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t" + + "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22" + + "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31" + + "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!" + + "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\3\2\7\2R\n\2\f\2\16" + + "\2U\13\2\3\2\7\2X\n\2\f\2\16\2[\13\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\4\3" + + "\4\3\4\3\4\3\4\3\4\3\4\7\4k\n\4\f\4\16\4n\13\4\5\4p\n\4\3\4\3\4\3\5\3" + + "\5\3\5\3\5\5\5x\n\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u0082\n\6\3\6" + + "\3\6\3\6\3\6\3\6\3\6\5\6\u008a\n\6\3\6\3\6\3\6\5\6\u008f\n\6\3\6\3\6\5" + + "\6\u0093\n\6\3\6\3\6\5\6\u0097\n\6\3\6\3\6\3\6\5\6\u009c\n\6\3\6\3\6\3" + + "\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6" + + "\6\6\u00b2\n\6\r\6\16\6\u00b3\5\6\u00b6\n\6\3\7\3\7\3\7\3\7\3\7\3\7\3" + + "\7\3\7\3\7\3\7\3\7\3\7\5\7\u00c4\n\7\3\7\3\7\3\7\5\7\u00c9\n\7\3\b\3\b" + + "\5\b\u00cd\n\b\3\t\3\t\7\t\u00d1\n\t\f\t\16\t\u00d4\13\t\3\t\5\t\u00d7" + + "\n\t\3\t\3\t\3\n\3\n\3\13\3\13\5\13\u00df\n\13\3\f\3\f\3\r\3\r\3\r\3\r" + + "\7\r\u00e7\n\r\f\r\16\r\u00ea\13\r\3\16\3\16\3\16\7\16\u00ef\n\16\f\16" + + "\16\16\u00f2\13\16\3\17\3\17\3\17\3\17\3\17\7\17\u00f9\n\17\f\17\16\17" + + "\u00fc\13\17\5\17\u00fe\n\17\3\20\3\20\3\20\5\20\u0103\n\20\3\21\3\21" + + "\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22" + + "\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22" + + "\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22" + + "\3\22\3\22\3\22\3\22\3\22\7\22\u0136\n\22\f\22\16\22\u0139\13\22\3\23" + + "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u0146\n\23\3\24" + + "\3\24\3\24\3\24\3\24\5\24\u014d\n\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25" + + "\5\25\u0156\n\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\5\26" + + "\u0162\n\26\3\27\3\27\3\30\3\30\3\30\6\30\u0169\n\30\r\30\16\30\u016a" + + "\3\30\3\30\3\30\6\30\u0170\n\30\r\30\16\30\u0171\3\30\3\30\3\30\7\30\u0177" + + "\n\30\f\30\16\30\u017a\13\30\3\30\3\30\7\30\u017e\n\30\f\30\16\30\u0181" + + "\13\30\5\30\u0183\n\30\3\31\3\31\7\31\u0187\n\31\f\31\16\31\u018a\13\31" + + "\3\31\5\31\u018d\n\31\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32" + + "\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u01a2\n\32\3\33\3\33" + + "\3\33\5\33\u01a7\n\33\3\34\3\34\5\34\u01ab\n\34\3\35\3\35\3\35\3\35\3" + + "\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \6 \u01be\n \r \16" + + " \u01bf\3 \3 \7 \u01c4\n \f \16 \u01c7\13 \5 \u01c9\n \3 \3 \3 \3 \3 " + + "\3 \3 \3 \7 \u01d3\n \f \16 \u01d6\13 \5 \u01d8\n \3 \3 \7 \u01dc\n \f" + + " \16 \u01df\13 \5 \u01e1\n \3!\3!\3!\3!\7!\u01e7\n!\f!\16!\u01ea\13!\3" + + "!\3!\3!\3!\5!\u01f0\n!\3\"\3\"\3\"\3\"\7\"\u01f6\n\"\f\"\16\"\u01f9\13" + + "\"\3\"\3\"\3\"\3\"\3\"\5\"\u0200\n\"\3#\3#\3#\3#\3$\3$\3$\3$\7$\u020a" + + "\n$\f$\16$\u020d\13$\5$\u020f\n$\3$\3$\3%\3%\3%\5%\u0216\n%\3&\3&\3&\3" + + "&\3&\7&\u021d\n&\f&\16&\u0220\13&\5&\u0222\n&\3&\5&\u0225\n&\3&\3&\3&" + + "\5&\u022a\n&\3\'\5\'\u022d\n\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3" + + "(\5(\u023c\n(\3(\2\3\")\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*" + + ",.\60\62\64\668:<>@BDFHJLN\2\20\3\3\16\16\3\2 \"\3\2#$\3\2:;\3\2%\'\3" + + "\2(+\3\2,/\3\2>I\3\2<=\3\2\36\37\3\2ST\3\2JM\3\2\13\f\3\2VW\u0279\2S\3" + + "\2\2\2\4^\3\2\2\2\6c\3\2\2\2\bw\3\2\2\2\n\u00b5\3\2\2\2\f\u00c8\3\2\2" + + "\2\16\u00cc\3\2\2\2\20\u00ce\3\2\2\2\22\u00da\3\2\2\2\24\u00de\3\2\2\2" + + "\26\u00e0\3\2\2\2\30\u00e2\3\2\2\2\32\u00eb\3\2\2\2\34\u00fd\3\2\2\2\36" + + "\u00ff\3\2\2\2 \u0104\3\2\2\2\"\u010b\3\2\2\2$\u0145\3\2\2\2&\u014c\3" + + "\2\2\2(\u0155\3\2\2\2*\u0161\3\2\2\2,\u0163\3\2\2\2.\u0182\3\2\2\2\60" + + "\u018c\3\2\2\2\62\u01a1\3\2\2\2\64\u01a6\3\2\2\2\66\u01aa\3\2\2\28\u01ac" + + "\3\2\2\2:\u01b0\3\2\2\2<\u01b3\3\2\2\2>\u01e0\3\2\2\2@\u01ef\3\2\2\2B" + + "\u01ff\3\2\2\2D\u0201\3\2\2\2F\u0205\3\2\2\2H\u0215\3\2\2\2J\u0224\3\2" + + "\2\2L\u022c\3\2\2\2N\u023b\3\2\2\2PR\5\4\3\2QP\3\2\2\2RU\3\2\2\2SQ\3\2" + + "\2\2ST\3\2\2\2TY\3\2\2\2US\3\2\2\2VX\5\b\5\2WV\3\2\2\2X[\3\2\2\2YW\3\2" + + "\2\2YZ\3\2\2\2Z\\\3\2\2\2[Y\3\2\2\2\\]\7\2\2\3]\3\3\2\2\2^_\5\32\16\2" + + "_`\7U\2\2`a\5\6\4\2ab\5\20\t\2b\5\3\2\2\2co\7\t\2\2de\5\32\16\2el\7U\2" + + "\2fg\7\r\2\2gh\5\32\16\2hi\7U\2\2ik\3\2\2\2jf\3\2\2\2kn\3\2\2\2lj\3\2" + + "\2\2lm\3\2\2\2mp\3\2\2\2nl\3\2\2\2od\3\2\2\2op\3\2\2\2pq\3\2\2\2qr\7\n" + + "\2\2r\7\3\2\2\2sx\5\n\6\2tu\5\f\7\2uv\t\2\2\2vx\3\2\2\2ws\3\2\2\2wt\3" + + "\2\2\2x\t\3\2\2\2yz\7\17\2\2z{\7\t\2\2{|\5$\23\2|}\7\n\2\2}\u0081\5\16" + + "\b\2~\177\7\21\2\2\177\u0082\5\16\b\2\u0080\u0082\6\6\2\2\u0081~\3\2\2" + + "\2\u0081\u0080\3\2\2\2\u0082\u00b6\3\2\2\2\u0083\u0084\7\22\2\2\u0084" + + "\u0085\7\t\2\2\u0085\u0086\5$\23\2\u0086\u0089\7\n\2\2\u0087\u008a\5\16" + + "\b\2\u0088\u008a\5\22\n\2\u0089\u0087\3\2\2\2\u0089\u0088\3\2\2\2\u008a" + + "\u00b6\3\2\2\2\u008b\u008c\7\24\2\2\u008c\u008e\7\t\2\2\u008d\u008f\5" + + "\24\13\2\u008e\u008d\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0090\3\2\2\2\u0090" + + "\u0092\7\16\2\2\u0091\u0093\5$\23\2\u0092\u0091\3\2\2\2\u0092\u0093\3" + + "\2\2\2\u0093\u0094\3\2\2\2\u0094\u0096\7\16\2\2\u0095\u0097\5\26\f\2\u0096" + + "\u0095\3\2\2\2\u0096\u0097\3\2\2\2\u0097\u0098\3\2\2\2\u0098\u009b\7\n" + + "\2\2\u0099\u009c\5\16\b\2\u009a\u009c\5\22\n\2\u009b\u0099\3\2\2\2\u009b" + + "\u009a\3\2\2\2\u009c\u00b6\3\2\2\2\u009d\u009e\7\24\2\2\u009e\u009f\7" + + "\t\2\2\u009f\u00a0\5\32\16\2\u00a0\u00a1\7U\2\2\u00a1\u00a2\7\66\2\2\u00a2" + + "\u00a3\5$\23\2\u00a3\u00a4\7\n\2\2\u00a4\u00a5\5\16\b\2\u00a5\u00b6\3" + + "\2\2\2\u00a6\u00a7\7\24\2\2\u00a7\u00a8\7\t\2\2\u00a8\u00a9\7U\2\2\u00a9" + + "\u00aa\7\20\2\2\u00aa\u00ab\5$\23\2\u00ab\u00ac\7\n\2\2\u00ac\u00ad\5" + + "\16\b\2\u00ad\u00b6\3\2\2\2\u00ae\u00af\7\31\2\2\u00af\u00b1\5\20\t\2" + + "\u00b0\u00b2\5 \21\2\u00b1\u00b0\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00b1" + + "\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\u00b6\3\2\2\2\u00b5y\3\2\2\2\u00b5" + + "\u0083\3\2\2\2\u00b5\u008b\3\2\2\2\u00b5\u009d\3\2\2\2\u00b5\u00a6\3\2" + + "\2\2\u00b5\u00ae\3\2\2\2\u00b6\13\3\2\2\2\u00b7\u00b8\7\23\2\2\u00b8\u00b9" + + "\5\20\t\2\u00b9\u00ba\7\22\2\2\u00ba\u00bb\7\t\2\2\u00bb\u00bc\5$\23\2" + + "\u00bc\u00bd\7\n\2\2\u00bd\u00c9\3\2\2\2\u00be\u00c9\5\30\r\2\u00bf\u00c9" + + "\7\25\2\2\u00c0\u00c9\7\26\2\2\u00c1\u00c3\7\27\2\2\u00c2\u00c4\5$\23" + + "\2\u00c3\u00c2\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00c9\3\2\2\2\u00c5\u00c6" + + "\7\33\2\2\u00c6\u00c9\5$\23\2\u00c7\u00c9\5$\23\2\u00c8\u00b7\3\2\2\2" + + "\u00c8\u00be\3\2\2\2\u00c8\u00bf\3\2\2\2\u00c8\u00c0\3\2\2\2\u00c8\u00c1" + + "\3\2\2\2\u00c8\u00c5\3\2\2\2\u00c8\u00c7\3\2\2\2\u00c9\r\3\2\2\2\u00ca" + + "\u00cd\5\20\t\2\u00cb\u00cd\5\b\5\2\u00cc\u00ca\3\2\2\2\u00cc\u00cb\3" + + "\2\2\2\u00cd\17\3\2\2\2\u00ce\u00d2\7\5\2\2\u00cf\u00d1\5\b\5\2\u00d0" + + "\u00cf\3\2\2\2\u00d1\u00d4\3\2\2\2\u00d2\u00d0\3\2\2\2\u00d2\u00d3\3\2" + + "\2\2\u00d3\u00d6\3\2\2\2\u00d4\u00d2\3\2\2\2\u00d5\u00d7\5\f\7\2\u00d6" + + "\u00d5\3\2\2\2\u00d6\u00d7\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\u00d9\7\6" + + "\2\2\u00d9\21\3\2\2\2\u00da\u00db\7\16\2\2\u00db\23\3\2\2\2\u00dc\u00df" + + "\5\30\r\2\u00dd\u00df\5$\23\2\u00de\u00dc\3\2\2\2\u00de\u00dd\3\2\2\2" + + "\u00df\25\3\2\2\2\u00e0\u00e1\5$\23\2\u00e1\27\3\2\2\2\u00e2\u00e3\5\32" + + "\16\2\u00e3\u00e8\5\36\20\2\u00e4\u00e5\7\r\2\2\u00e5\u00e7\5\36\20\2" + + "\u00e6\u00e4\3\2\2\2\u00e7\u00ea\3\2\2\2\u00e8\u00e6\3\2\2\2\u00e8\u00e9" + + "\3\2\2\2\u00e9\31\3\2\2\2\u00ea\u00e8\3\2\2\2\u00eb\u00f0\5\34\17\2\u00ec" + + "\u00ed\7\7\2\2\u00ed\u00ef\7\b\2\2\u00ee\u00ec\3\2\2\2\u00ef\u00f2\3\2" + + "\2\2\u00f0\u00ee\3\2\2\2\u00f0\u00f1\3\2\2\2\u00f1\33\3\2\2\2\u00f2\u00f0" + + "\3\2\2\2\u00f3\u00fe\7T\2\2\u00f4\u00fe\7S\2\2\u00f5\u00fa\7U\2\2\u00f6" + + "\u00f7\7\13\2\2\u00f7\u00f9\7W\2\2\u00f8\u00f6\3\2\2\2\u00f9\u00fc\3\2" + + "\2\2\u00fa\u00f8\3\2\2\2\u00fa\u00fb\3\2\2\2\u00fb\u00fe\3\2\2\2\u00fc" + + "\u00fa\3\2\2\2\u00fd\u00f3\3\2\2\2\u00fd\u00f4\3\2\2\2\u00fd\u00f5\3\2" + + "\2\2\u00fe\35\3\2\2\2\u00ff\u0102\7U\2\2\u0100\u0101\7>\2\2\u0101\u0103" + + "\5$\23\2\u0102\u0100\3\2\2\2\u0102\u0103\3\2\2\2\u0103\37\3\2\2\2\u0104" + + "\u0105\7\32\2\2\u0105\u0106\7\t\2\2\u0106\u0107\5\34\17\2\u0107\u0108" + + "\7U\2\2\u0108\u0109\7\n\2\2\u0109\u010a\5\20\t\2\u010a!\3\2\2\2\u010b" + + "\u010c\b\22\1\2\u010c\u010d\5&\24\2\u010d\u0137\3\2\2\2\u010e\u010f\f" + + "\17\2\2\u010f\u0110\t\3\2\2\u0110\u0136\5\"\22\20\u0111\u0112\f\16\2\2" + + "\u0112\u0113\t\4\2\2\u0113\u0136\5\"\22\17\u0114\u0115\f\r\2\2\u0115\u0116" + + "\t\5\2\2\u0116\u0136\5\"\22\16\u0117\u0118\f\f\2\2\u0118\u0119\t\6\2\2" + + "\u0119\u0136\5\"\22\r\u011a\u011b\f\13\2\2\u011b\u011c\t\7\2\2\u011c\u0136" + + "\5\"\22\f\u011d\u011e\f\t\2\2\u011e\u011f\t\b\2\2\u011f\u0136\5\"\22\n" + + "\u0120\u0121\f\b\2\2\u0121\u0122\7\60\2\2\u0122\u0136\5\"\22\t\u0123\u0124" + + "\f\7\2\2\u0124\u0125\7\61\2\2\u0125\u0136\5\"\22\b\u0126\u0127\f\6\2\2" + + "\u0127\u0128\7\62\2\2\u0128\u0136\5\"\22\7\u0129\u012a\f\5\2\2\u012a\u012b" + + "\7\63\2\2\u012b\u0136\5\"\22\6\u012c\u012d\f\4\2\2\u012d\u012e\7\64\2" + + "\2\u012e\u0136\5\"\22\5\u012f\u0130\f\3\2\2\u0130\u0131\7\67\2\2\u0131" + + "\u0136\5\"\22\3\u0132\u0133\f\n\2\2\u0133\u0134\7\35\2\2\u0134\u0136\5" + + "\32\16\2\u0135\u010e\3\2\2\2\u0135\u0111\3\2\2\2\u0135\u0114\3\2\2\2\u0135" + + "\u0117\3\2\2\2\u0135\u011a\3\2\2\2\u0135\u011d\3\2\2\2\u0135\u0120\3\2" + + "\2\2\u0135\u0123\3\2\2\2\u0135\u0126\3\2\2\2\u0135\u0129\3\2\2\2\u0135" + + "\u012c\3\2\2\2\u0135\u012f\3\2\2\2\u0135\u0132\3\2\2\2\u0136\u0139\3\2" + + "\2\2\u0137\u0135\3\2\2\2\u0137\u0138\3\2\2\2\u0138#\3\2\2\2\u0139\u0137" + + "\3\2\2\2\u013a\u0146\5\"\22\2\u013b\u013c\5\"\22\2\u013c\u013d\7\65\2" + + "\2\u013d\u013e\5$\23\2\u013e\u013f\7\66\2\2\u013f\u0140\5$\23\2\u0140" + + "\u0146\3\2\2\2\u0141\u0142\5\"\22\2\u0142\u0143\t\t\2\2\u0143\u0144\5" + + "$\23\2\u0144\u0146\3\2\2\2\u0145\u013a\3\2\2\2\u0145\u013b\3\2\2\2\u0145" + + "\u0141\3\2\2\2\u0146%\3\2\2\2\u0147\u0148\t\n\2\2\u0148\u014d\5\60\31" + + "\2\u0149\u014a\t\4\2\2\u014a\u014d\5&\24\2\u014b\u014d\5(\25\2\u014c\u0147" + + "\3\2\2\2\u014c\u0149\3\2\2\2\u014c\u014b\3\2\2\2\u014d\'\3\2\2\2\u014e" + + "\u0156\5\60\31\2\u014f\u0150\5\60\31\2\u0150\u0151\t\n\2\2\u0151\u0156" + + "\3\2\2\2\u0152\u0153\t\13\2\2\u0153\u0156\5&\24\2\u0154\u0156\5*\26\2" + + "\u0155\u014e\3\2\2\2\u0155\u014f\3\2\2\2\u0155\u0152\3\2\2\2\u0155\u0154" + + "\3\2\2\2\u0156)\3\2\2\2\u0157\u0158\7\t\2\2\u0158\u0159\5,\27\2\u0159" + + "\u015a\7\n\2\2\u015a\u015b\5&\24\2\u015b\u0162\3\2\2\2\u015c\u015d\7\t" + + "\2\2\u015d\u015e\5.\30\2\u015e\u015f\7\n\2\2\u015f\u0160\5(\25\2\u0160" + + "\u0162\3\2\2\2\u0161\u0157\3\2\2\2\u0161\u015c\3\2\2\2\u0162+\3\2\2\2" + + "\u0163\u0164\t\f\2\2\u0164-\3\2\2\2\u0165\u0168\7T\2\2\u0166\u0167\7\7" + + "\2\2\u0167\u0169\7\b\2\2\u0168\u0166\3\2\2\2\u0169\u016a\3\2\2\2\u016a" + + "\u0168\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u0183\3\2\2\2\u016c\u016f\7S" + + "\2\2\u016d\u016e\7\7\2\2\u016e\u0170\7\b\2\2\u016f\u016d\3\2\2\2\u0170" + + "\u0171\3\2\2\2\u0171\u016f\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0183\3\2" + + "\2\2\u0173\u0178\7U\2\2\u0174\u0175\7\13\2\2\u0175\u0177\7W\2\2\u0176" + + "\u0174\3\2\2\2\u0177\u017a\3\2\2\2\u0178\u0176\3\2\2\2\u0178\u0179\3\2" + + "\2\2\u0179\u017f\3\2\2\2\u017a\u0178\3\2\2\2\u017b\u017c\7\7\2\2\u017c" + + "\u017e\7\b\2\2\u017d\u017b\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u017d\3\2" + + "\2\2\u017f\u0180\3\2\2\2\u0180\u0183\3\2\2\2\u0181\u017f\3\2\2\2\u0182" + + "\u0165\3\2\2\2\u0182\u016c\3\2\2\2\u0182\u0173\3\2\2\2\u0183/\3\2\2\2" + + "\u0184\u0188\5\62\32\2\u0185\u0187\5\64\33\2\u0186\u0185\3\2\2\2\u0187" + + "\u018a\3\2\2\2\u0188\u0186\3\2\2\2\u0188\u0189\3\2\2\2\u0189\u018d\3\2" + + "\2\2\u018a\u0188\3\2\2\2\u018b\u018d\5> \2\u018c\u0184\3\2\2\2\u018c\u018b" + + "\3\2\2\2\u018d\61\3\2\2\2\u018e\u018f\7\t\2\2\u018f\u0190\5$\23\2\u0190" + + "\u0191\7\n\2\2\u0191\u01a2\3\2\2\2\u0192\u01a2\t\r\2\2\u0193\u01a2\7P" + + "\2\2\u0194\u01a2\7Q\2\2\u0195\u01a2\7R\2\2\u0196\u01a2\7N\2\2\u0197\u01a2" + + "\7O\2\2\u0198\u01a2\5@!\2\u0199\u01a2\5B\"\2\u019a\u01a2\7U\2\2\u019b" + + "\u019c\7U\2\2\u019c\u01a2\5F$\2\u019d\u019e\7\30\2\2\u019e\u019f\5\34" + + "\17\2\u019f\u01a0\5F$\2\u01a0\u01a2\3\2\2\2\u01a1\u018e\3\2\2\2\u01a1" + + "\u0192\3\2\2\2\u01a1\u0193\3\2\2\2\u01a1\u0194\3\2\2\2\u01a1\u0195\3\2" + + "\2\2\u01a1\u0196\3\2\2\2\u01a1\u0197\3\2\2\2\u01a1\u0198\3\2\2\2\u01a1" + + "\u0199\3\2\2\2\u01a1\u019a\3\2\2\2\u01a1\u019b\3\2\2\2\u01a1\u019d\3\2" + + "\2\2\u01a2\63\3\2\2\2\u01a3\u01a7\58\35\2\u01a4\u01a7\5:\36\2\u01a5\u01a7" + + "\5<\37\2\u01a6\u01a3\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a6\u01a5\3\2\2\2\u01a7" + + "\65\3\2\2\2\u01a8\u01ab\58\35\2\u01a9\u01ab\5:\36\2\u01aa\u01a8\3\2\2" + + "\2\u01aa\u01a9\3\2\2\2\u01ab\67\3\2\2\2\u01ac\u01ad\t\16\2\2\u01ad\u01ae" + + "\7W\2\2\u01ae\u01af\5F$\2\u01af9\3\2\2\2\u01b0\u01b1\t\16\2\2\u01b1\u01b2" + + "\t\17\2\2\u01b2;\3\2\2\2\u01b3\u01b4\7\7\2\2\u01b4\u01b5\5$\23\2\u01b5" + + "\u01b6\7\b\2\2\u01b6=\3\2\2\2\u01b7\u01b8\7\30\2\2\u01b8\u01bd\5\34\17" + + "\2\u01b9\u01ba\7\7\2\2\u01ba\u01bb\5$\23\2\u01bb\u01bc\7\b\2\2\u01bc\u01be" + + "\3\2\2\2\u01bd\u01b9\3\2\2\2\u01be\u01bf\3\2\2\2\u01bf\u01bd\3\2\2\2\u01bf" + + "\u01c0\3\2\2\2\u01c0\u01c8\3\2\2\2\u01c1\u01c5\5\66\34\2\u01c2\u01c4\5" + + "\64\33\2\u01c3\u01c2\3\2\2\2\u01c4\u01c7\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c5" + + "\u01c6\3\2\2\2\u01c6\u01c9\3\2\2\2\u01c7\u01c5\3\2\2\2\u01c8\u01c1\3\2" + + "\2\2\u01c8\u01c9\3\2\2\2\u01c9\u01e1\3\2\2\2\u01ca\u01cb\7\30\2\2\u01cb" + + "\u01cc\5\34\17\2\u01cc\u01cd\7\7\2\2\u01cd\u01ce\7\b\2\2\u01ce\u01d7\7" + + "\5\2\2\u01cf\u01d4\5$\23\2\u01d0\u01d1\7\r\2\2\u01d1\u01d3\5$\23\2\u01d2" + + "\u01d0\3\2\2\2\u01d3\u01d6\3\2\2\2\u01d4\u01d2\3\2\2\2\u01d4\u01d5\3\2" + + "\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d4\3\2\2\2\u01d7\u01cf\3\2\2\2\u01d7" + + "\u01d8\3\2\2\2\u01d8\u01d9\3\2\2\2\u01d9\u01dd\7\6\2\2\u01da\u01dc\5\64" + + "\33\2\u01db\u01da\3\2\2\2\u01dc\u01df\3\2\2\2\u01dd\u01db\3\2\2\2\u01dd" + + "\u01de\3\2\2\2\u01de\u01e1\3\2\2\2\u01df\u01dd\3\2\2\2\u01e0\u01b7\3\2" + + "\2\2\u01e0\u01ca\3\2\2\2\u01e1?\3\2\2\2\u01e2\u01e3\7\7\2\2\u01e3\u01e8" + + "\5$\23\2\u01e4\u01e5\7\r\2\2\u01e5\u01e7\5$\23\2\u01e6\u01e4\3\2\2\2\u01e7" + + "\u01ea\3\2\2\2\u01e8\u01e6\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01eb\3\2" + + "\2\2\u01ea\u01e8\3\2\2\2\u01eb\u01ec\7\b\2\2\u01ec\u01f0\3\2\2\2\u01ed" + + "\u01ee\7\7\2\2\u01ee\u01f0\7\b\2\2\u01ef\u01e2\3\2\2\2\u01ef\u01ed\3\2" + + "\2\2\u01f0A\3\2\2\2\u01f1\u01f2\7\7\2\2\u01f2\u01f7\5D#\2\u01f3\u01f4" + + "\7\r\2\2\u01f4\u01f6\5D#\2\u01f5\u01f3\3\2\2\2\u01f6\u01f9\3\2\2\2\u01f7" + + "\u01f5\3\2\2\2\u01f7\u01f8\3\2\2\2\u01f8\u01fa\3\2\2\2\u01f9\u01f7\3\2" + + "\2\2\u01fa\u01fb\7\b\2\2\u01fb\u0200\3\2\2\2\u01fc\u01fd\7\7\2\2\u01fd" + + "\u01fe\7\66\2\2\u01fe\u0200\7\b\2\2\u01ff\u01f1\3\2\2\2\u01ff\u01fc\3" + + "\2\2\2\u0200C\3\2\2\2\u0201\u0202\5$\23\2\u0202\u0203\7\66\2\2\u0203\u0204" + + "\5$\23\2\u0204E\3\2\2\2\u0205\u020e\7\t\2\2\u0206\u020b\5H%\2\u0207\u0208" + + "\7\r\2\2\u0208\u020a\5H%\2\u0209\u0207\3\2\2\2\u020a\u020d\3\2\2\2\u020b" + + "\u0209\3\2\2\2\u020b\u020c\3\2\2\2\u020c\u020f\3\2\2\2\u020d\u020b\3\2" + + "\2\2\u020e\u0206\3\2\2\2\u020e\u020f\3\2\2\2\u020f\u0210\3\2\2\2\u0210" + + "\u0211\7\n\2\2\u0211G\3\2\2\2\u0212\u0216\5$\23\2\u0213\u0216\5J&\2\u0214" + + "\u0216\5N(\2\u0215\u0212\3\2\2\2\u0215\u0213\3\2\2\2\u0215\u0214\3\2\2" + + "\2\u0216I\3\2\2\2\u0217\u0225\5L\'\2\u0218\u0221\7\t\2\2\u0219\u021e\5" + + "L\'\2\u021a\u021b\7\r\2\2\u021b\u021d\5L\'\2\u021c\u021a\3\2\2\2\u021d" + + "\u0220\3\2\2\2\u021e\u021c\3\2\2\2\u021e\u021f\3\2\2\2\u021f\u0222\3\2" + + "\2\2\u0220\u021e\3\2\2\2\u0221\u0219\3\2\2\2\u0221\u0222\3\2\2\2\u0222" + + "\u0223\3\2\2\2\u0223\u0225\7\n\2\2\u0224\u0217\3\2\2\2\u0224\u0218\3\2" + + "\2\2\u0225\u0226\3\2\2\2\u0226\u0229\79\2\2\u0227\u022a\5\20\t\2\u0228" + + "\u022a\5$\23\2\u0229\u0227\3\2\2\2\u0229\u0228\3\2\2\2\u022aK\3\2\2\2" + + "\u022b\u022d\5\32\16\2\u022c\u022b\3\2\2\2\u022c\u022d\3\2\2\2\u022d\u022e" + + "\3\2\2\2\u022e\u022f\7U\2\2\u022fM\3\2\2\2\u0230\u0231\5\32\16\2\u0231" + + "\u0232\78\2\2\u0232\u0233\7U\2\2\u0233\u023c\3\2\2\2\u0234\u0235\5\32" + + "\16\2\u0235\u0236\78\2\2\u0236\u0237\7\30\2\2\u0237\u023c\3\2\2\2\u0238" + + "\u0239\7\34\2\2\u0239\u023a\78\2\2\u023a\u023c\7U\2\2\u023b\u0230\3\2" + + "\2\2\u023b\u0234\3\2\2\2\u023b\u0238\3\2\2\2\u023cO\3\2\2\2>SYlow\u0081" + + "\u0089\u008e\u0092\u0096\u009b\u00b3\u00b5\u00c3\u00c8\u00cc\u00d2\u00d6" + + "\u00de\u00e8\u00f0\u00fa\u00fd\u0102\u0135\u0137\u0145\u014c\u0155\u0161" + + "\u016a\u0171\u0178\u017f\u0182\u0188\u018c\u01a1\u01a6\u01aa\u01bf\u01c5" + + "\u01c8\u01d4\u01d7\u01dd\u01e0\u01e8\u01ef\u01f7\u01ff\u020b\u020e\u0215" + + "\u021e\u0221\u0224\u0229\u022c\u023b"; + public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); + static { + _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; + for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { + _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + } } - } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserBaseVisitor.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserBaseVisitor.java index bfd63b7ebfb..f347d90f2cc 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserBaseVisitor.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserBaseVisitor.java @@ -32,6 +32,7 @@ */ package org.opensearch.painless.antlr; + import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; /** @@ -43,550 +44,861 @@ import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; * operations with no return type. */ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implements PainlessParserVisitor { - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitSource(PainlessParser.SourceContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitFunction(PainlessParser.FunctionContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitParameters(PainlessParser.ParametersContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitStatement(PainlessParser.StatementContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitIf(PainlessParser.IfContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitWhile(PainlessParser.WhileContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitFor(PainlessParser.ForContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitEach(PainlessParser.EachContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitIneach(PainlessParser.IneachContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitTry(PainlessParser.TryContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitDo(PainlessParser.DoContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitDecl(PainlessParser.DeclContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitContinue(PainlessParser.ContinueContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitBreak(PainlessParser.BreakContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitReturn(PainlessParser.ReturnContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitThrow(PainlessParser.ThrowContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitExpr(PainlessParser.ExprContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitTrailer(PainlessParser.TrailerContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitBlock(PainlessParser.BlockContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitEmpty(PainlessParser.EmptyContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitInitializer(PainlessParser.InitializerContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitAfterthought(PainlessParser.AfterthoughtContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitDeclaration(PainlessParser.DeclarationContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitDecltype(PainlessParser.DecltypeContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitType(PainlessParser.TypeContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitDeclvar(PainlessParser.DeclvarContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitTrap(PainlessParser.TrapContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitSingle(PainlessParser.SingleContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitComp(PainlessParser.CompContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitBool(PainlessParser.BoolContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitBinary(PainlessParser.BinaryContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitElvis(PainlessParser.ElvisContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitInstanceof(PainlessParser.InstanceofContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNonconditional(PainlessParser.NonconditionalContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitConditional(PainlessParser.ConditionalContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitAssignment(PainlessParser.AssignmentContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPre(PainlessParser.PreContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitAddsub(PainlessParser.AddsubContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNotaddsub(PainlessParser.NotaddsubContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitRead(PainlessParser.ReadContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPost(PainlessParser.PostContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNot(PainlessParser.NotContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitCast(PainlessParser.CastContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPrimordefcast(PainlessParser.PrimordefcastContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitRefcast(PainlessParser.RefcastContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPrimordefcasttype(PainlessParser.PrimordefcasttypeContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitRefcasttype(PainlessParser.RefcasttypeContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitDynamic(PainlessParser.DynamicContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNewarray(PainlessParser.NewarrayContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPrecedence(PainlessParser.PrecedenceContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNumeric(PainlessParser.NumericContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNull(PainlessParser.NullContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitString(PainlessParser.StringContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitRegex(PainlessParser.RegexContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitListinit(PainlessParser.ListinitContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMapinit(PainlessParser.MapinitContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitVariable(PainlessParser.VariableContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitCalllocal(PainlessParser.CalllocalContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNewobject(PainlessParser.NewobjectContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPostfix(PainlessParser.PostfixContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPostdot(PainlessParser.PostdotContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitCallinvoke(PainlessParser.CallinvokeContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitFieldaccess(PainlessParser.FieldaccessContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitBraceaccess(PainlessParser.BraceaccessContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNewstandardarray(PainlessParser.NewstandardarrayContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitNewinitializedarray(PainlessParser.NewinitializedarrayContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitListinitializer(PainlessParser.ListinitializerContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMapinitializer(PainlessParser.MapinitializerContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMaptoken(PainlessParser.MaptokenContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitArguments(PainlessParser.ArgumentsContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitArgument(PainlessParser.ArgumentContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitLambda(PainlessParser.LambdaContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitLamtype(PainlessParser.LamtypeContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitSource(PainlessParser.SourceContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitFunction(PainlessParser.FunctionContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitParameters(PainlessParser.ParametersContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitStatement(PainlessParser.StatementContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitIf(PainlessParser.IfContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitWhile(PainlessParser.WhileContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitFor(PainlessParser.ForContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitEach(PainlessParser.EachContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitIneach(PainlessParser.IneachContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitTry(PainlessParser.TryContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitDo(PainlessParser.DoContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitDecl(PainlessParser.DeclContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitContinue(PainlessParser.ContinueContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitBreak(PainlessParser.BreakContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitReturn(PainlessParser.ReturnContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitThrow(PainlessParser.ThrowContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitExpr(PainlessParser.ExprContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitTrailer(PainlessParser.TrailerContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitBlock(PainlessParser.BlockContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitEmpty(PainlessParser.EmptyContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitInitializer(PainlessParser.InitializerContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitAfterthought(PainlessParser.AfterthoughtContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitDeclaration(PainlessParser.DeclarationContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitDecltype(PainlessParser.DecltypeContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitType(PainlessParser.TypeContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitDeclvar(PainlessParser.DeclvarContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitTrap(PainlessParser.TrapContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitSingle(PainlessParser.SingleContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitComp(PainlessParser.CompContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitBool(PainlessParser.BoolContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitBinary(PainlessParser.BinaryContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitElvis(PainlessParser.ElvisContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitInstanceof(PainlessParser.InstanceofContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNonconditional(PainlessParser.NonconditionalContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitConditional(PainlessParser.ConditionalContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitAssignment(PainlessParser.AssignmentContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitPre(PainlessParser.PreContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitAddsub(PainlessParser.AddsubContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNotaddsub(PainlessParser.NotaddsubContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitRead(PainlessParser.ReadContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitPost(PainlessParser.PostContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNot(PainlessParser.NotContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitCast(PainlessParser.CastContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitPrimordefcast(PainlessParser.PrimordefcastContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitRefcast(PainlessParser.RefcastContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitPrimordefcasttype(PainlessParser.PrimordefcasttypeContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitRefcasttype(PainlessParser.RefcasttypeContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitDynamic(PainlessParser.DynamicContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNewarray(PainlessParser.NewarrayContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitPrecedence(PainlessParser.PrecedenceContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNumeric(PainlessParser.NumericContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitTrue(PainlessParser.TrueContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitFalse(PainlessParser.FalseContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNull(PainlessParser.NullContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitString(PainlessParser.StringContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitRegex(PainlessParser.RegexContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitListinit(PainlessParser.ListinitContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitMapinit(PainlessParser.MapinitContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitVariable(PainlessParser.VariableContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitCalllocal(PainlessParser.CalllocalContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNewobject(PainlessParser.NewobjectContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitPostfix(PainlessParser.PostfixContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitPostdot(PainlessParser.PostdotContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitCallinvoke(PainlessParser.CallinvokeContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitFieldaccess(PainlessParser.FieldaccessContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitBraceaccess(PainlessParser.BraceaccessContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNewstandardarray(PainlessParser.NewstandardarrayContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitNewinitializedarray(PainlessParser.NewinitializedarrayContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitListinitializer(PainlessParser.ListinitializerContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitMapinitializer(PainlessParser.MapinitializerContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitMaptoken(PainlessParser.MaptokenContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitArguments(PainlessParser.ArgumentsContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitArgument(PainlessParser.ArgumentContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitLambda(PainlessParser.LambdaContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitLamtype(PainlessParser.LamtypeContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx) { + return visitChildren(ctx); + } + + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override + public T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx) { + return visitChildren(ctx); + } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserVisitor.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserVisitor.java index 6a0f1653116..d63ef17ac0b 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserVisitor.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParserVisitor.java @@ -32,6 +32,7 @@ */ package org.opensearch.painless.antlr; + import org.antlr.v4.runtime.tree.ParseTreeVisitor; /** @@ -42,522 +43,599 @@ import org.antlr.v4.runtime.tree.ParseTreeVisitor; * operations with no return type. */ interface PainlessParserVisitor extends ParseTreeVisitor { - /** - * Visit a parse tree produced by {@link PainlessParser#source}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitSource(PainlessParser.SourceContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#function}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFunction(PainlessParser.FunctionContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#parameters}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitParameters(PainlessParser.ParametersContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitStatement(PainlessParser.StatementContext ctx); - /** - * Visit a parse tree produced by the {@code if} - * labeled alternative in {@link PainlessParser#rstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitIf(PainlessParser.IfContext ctx); - /** - * Visit a parse tree produced by the {@code while} - * labeled alternative in {@link PainlessParser#rstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitWhile(PainlessParser.WhileContext ctx); - /** - * Visit a parse tree produced by the {@code for} - * labeled alternative in {@link PainlessParser#rstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFor(PainlessParser.ForContext ctx); - /** - * Visit a parse tree produced by the {@code each} - * labeled alternative in {@link PainlessParser#rstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitEach(PainlessParser.EachContext ctx); - /** - * Visit a parse tree produced by the {@code ineach} - * labeled alternative in {@link PainlessParser#rstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitIneach(PainlessParser.IneachContext ctx); - /** - * Visit a parse tree produced by the {@code try} - * labeled alternative in {@link PainlessParser#rstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTry(PainlessParser.TryContext ctx); - /** - * Visit a parse tree produced by the {@code do} - * labeled alternative in {@link PainlessParser#dstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDo(PainlessParser.DoContext ctx); - /** - * Visit a parse tree produced by the {@code decl} - * labeled alternative in {@link PainlessParser#dstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDecl(PainlessParser.DeclContext ctx); - /** - * Visit a parse tree produced by the {@code continue} - * labeled alternative in {@link PainlessParser#dstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitContinue(PainlessParser.ContinueContext ctx); - /** - * Visit a parse tree produced by the {@code break} - * labeled alternative in {@link PainlessParser#dstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBreak(PainlessParser.BreakContext ctx); - /** - * Visit a parse tree produced by the {@code return} - * labeled alternative in {@link PainlessParser#dstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitReturn(PainlessParser.ReturnContext ctx); - /** - * Visit a parse tree produced by the {@code throw} - * labeled alternative in {@link PainlessParser#dstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitThrow(PainlessParser.ThrowContext ctx); - /** - * Visit a parse tree produced by the {@code expr} - * labeled alternative in {@link PainlessParser#dstatement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExpr(PainlessParser.ExprContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#trailer}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTrailer(PainlessParser.TrailerContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#block}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBlock(PainlessParser.BlockContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#empty}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitEmpty(PainlessParser.EmptyContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#initializer}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitInitializer(PainlessParser.InitializerContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#afterthought}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitAfterthought(PainlessParser.AfterthoughtContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#declaration}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDeclaration(PainlessParser.DeclarationContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#decltype}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDecltype(PainlessParser.DecltypeContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#type}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitType(PainlessParser.TypeContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#declvar}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDeclvar(PainlessParser.DeclvarContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#trap}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTrap(PainlessParser.TrapContext ctx); - /** - * Visit a parse tree produced by the {@code single} - * labeled alternative in {@link PainlessParser#noncondexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitSingle(PainlessParser.SingleContext ctx); - /** - * Visit a parse tree produced by the {@code comp} - * labeled alternative in {@link PainlessParser#noncondexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitComp(PainlessParser.CompContext ctx); - /** - * Visit a parse tree produced by the {@code bool} - * labeled alternative in {@link PainlessParser#noncondexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBool(PainlessParser.BoolContext ctx); - /** - * Visit a parse tree produced by the {@code binary} - * labeled alternative in {@link PainlessParser#noncondexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBinary(PainlessParser.BinaryContext ctx); - /** - * Visit a parse tree produced by the {@code elvis} - * labeled alternative in {@link PainlessParser#noncondexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitElvis(PainlessParser.ElvisContext ctx); - /** - * Visit a parse tree produced by the {@code instanceof} - * labeled alternative in {@link PainlessParser#noncondexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitInstanceof(PainlessParser.InstanceofContext ctx); - /** - * Visit a parse tree produced by the {@code nonconditional} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNonconditional(PainlessParser.NonconditionalContext ctx); - /** - * Visit a parse tree produced by the {@code conditional} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitConditional(PainlessParser.ConditionalContext ctx); - /** - * Visit a parse tree produced by the {@code assignment} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitAssignment(PainlessParser.AssignmentContext ctx); - /** - * Visit a parse tree produced by the {@code pre} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPre(PainlessParser.PreContext ctx); - /** - * Visit a parse tree produced by the {@code addsub} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitAddsub(PainlessParser.AddsubContext ctx); - /** - * Visit a parse tree produced by the {@code notaddsub} - * labeled alternative in {@link PainlessParser#unary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNotaddsub(PainlessParser.NotaddsubContext ctx); - /** - * Visit a parse tree produced by the {@code read} - * labeled alternative in {@link PainlessParser#unarynotaddsub}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitRead(PainlessParser.ReadContext ctx); - /** - * Visit a parse tree produced by the {@code post} - * labeled alternative in {@link PainlessParser#unarynotaddsub}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPost(PainlessParser.PostContext ctx); - /** - * Visit a parse tree produced by the {@code not} - * labeled alternative in {@link PainlessParser#unarynotaddsub}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNot(PainlessParser.NotContext ctx); - /** - * Visit a parse tree produced by the {@code cast} - * labeled alternative in {@link PainlessParser#unarynotaddsub}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitCast(PainlessParser.CastContext ctx); - /** - * Visit a parse tree produced by the {@code primordefcast} - * labeled alternative in {@link PainlessParser#castexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPrimordefcast(PainlessParser.PrimordefcastContext ctx); - /** - * Visit a parse tree produced by the {@code refcast} - * labeled alternative in {@link PainlessParser#castexpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitRefcast(PainlessParser.RefcastContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#primordefcasttype}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPrimordefcasttype(PainlessParser.PrimordefcasttypeContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#refcasttype}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitRefcasttype(PainlessParser.RefcasttypeContext ctx); - /** - * Visit a parse tree produced by the {@code dynamic} - * labeled alternative in {@link PainlessParser#chain}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDynamic(PainlessParser.DynamicContext ctx); - /** - * Visit a parse tree produced by the {@code newarray} - * labeled alternative in {@link PainlessParser#chain}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNewarray(PainlessParser.NewarrayContext ctx); - /** - * Visit a parse tree produced by the {@code precedence} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPrecedence(PainlessParser.PrecedenceContext ctx); - /** - * Visit a parse tree produced by the {@code numeric} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNumeric(PainlessParser.NumericContext ctx); - /** - * Visit a parse tree produced by the {@code true} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTrue(PainlessParser.TrueContext ctx); - /** - * Visit a parse tree produced by the {@code false} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFalse(PainlessParser.FalseContext ctx); - /** - * Visit a parse tree produced by the {@code null} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNull(PainlessParser.NullContext ctx); - /** - * Visit a parse tree produced by the {@code string} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitString(PainlessParser.StringContext ctx); - /** - * Visit a parse tree produced by the {@code regex} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitRegex(PainlessParser.RegexContext ctx); - /** - * Visit a parse tree produced by the {@code listinit} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitListinit(PainlessParser.ListinitContext ctx); - /** - * Visit a parse tree produced by the {@code mapinit} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMapinit(PainlessParser.MapinitContext ctx); - /** - * Visit a parse tree produced by the {@code variable} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitVariable(PainlessParser.VariableContext ctx); - /** - * Visit a parse tree produced by the {@code calllocal} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitCalllocal(PainlessParser.CalllocalContext ctx); - /** - * Visit a parse tree produced by the {@code newobject} - * labeled alternative in {@link PainlessParser#primary}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNewobject(PainlessParser.NewobjectContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#postfix}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPostfix(PainlessParser.PostfixContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#postdot}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPostdot(PainlessParser.PostdotContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#callinvoke}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitCallinvoke(PainlessParser.CallinvokeContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#fieldaccess}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFieldaccess(PainlessParser.FieldaccessContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#braceaccess}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBraceaccess(PainlessParser.BraceaccessContext ctx); - /** - * Visit a parse tree produced by the {@code newstandardarray} - * labeled alternative in {@link PainlessParser#arrayinitializer}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNewstandardarray(PainlessParser.NewstandardarrayContext ctx); - /** - * Visit a parse tree produced by the {@code newinitializedarray} - * labeled alternative in {@link PainlessParser#arrayinitializer}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNewinitializedarray(PainlessParser.NewinitializedarrayContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#listinitializer}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitListinitializer(PainlessParser.ListinitializerContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#mapinitializer}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMapinitializer(PainlessParser.MapinitializerContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#maptoken}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMaptoken(PainlessParser.MaptokenContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#arguments}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitArguments(PainlessParser.ArgumentsContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#argument}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitArgument(PainlessParser.ArgumentContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#lambda}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitLambda(PainlessParser.LambdaContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#lamtype}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitLamtype(PainlessParser.LamtypeContext ctx); - /** - * Visit a parse tree produced by the {@code classfuncref} - * labeled alternative in {@link PainlessParser#funcref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx); - /** - * Visit a parse tree produced by the {@code constructorfuncref} - * labeled alternative in {@link PainlessParser#funcref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx); - /** - * Visit a parse tree produced by the {@code localfuncref} - * labeled alternative in {@link PainlessParser#funcref}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#source}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSource(PainlessParser.SourceContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#function}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunction(PainlessParser.FunctionContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#parameters}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitParameters(PainlessParser.ParametersContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitStatement(PainlessParser.StatementContext ctx); + + /** + * Visit a parse tree produced by the {@code if} + * labeled alternative in {@link PainlessParser#rstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIf(PainlessParser.IfContext ctx); + + /** + * Visit a parse tree produced by the {@code while} + * labeled alternative in {@link PainlessParser#rstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitWhile(PainlessParser.WhileContext ctx); + + /** + * Visit a parse tree produced by the {@code for} + * labeled alternative in {@link PainlessParser#rstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFor(PainlessParser.ForContext ctx); + + /** + * Visit a parse tree produced by the {@code each} + * labeled alternative in {@link PainlessParser#rstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEach(PainlessParser.EachContext ctx); + + /** + * Visit a parse tree produced by the {@code ineach} + * labeled alternative in {@link PainlessParser#rstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIneach(PainlessParser.IneachContext ctx); + + /** + * Visit a parse tree produced by the {@code try} + * labeled alternative in {@link PainlessParser#rstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTry(PainlessParser.TryContext ctx); + + /** + * Visit a parse tree produced by the {@code do} + * labeled alternative in {@link PainlessParser#dstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDo(PainlessParser.DoContext ctx); + + /** + * Visit a parse tree produced by the {@code decl} + * labeled alternative in {@link PainlessParser#dstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDecl(PainlessParser.DeclContext ctx); + + /** + * Visit a parse tree produced by the {@code continue} + * labeled alternative in {@link PainlessParser#dstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitContinue(PainlessParser.ContinueContext ctx); + + /** + * Visit a parse tree produced by the {@code break} + * labeled alternative in {@link PainlessParser#dstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBreak(PainlessParser.BreakContext ctx); + + /** + * Visit a parse tree produced by the {@code return} + * labeled alternative in {@link PainlessParser#dstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitReturn(PainlessParser.ReturnContext ctx); + + /** + * Visit a parse tree produced by the {@code throw} + * labeled alternative in {@link PainlessParser#dstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitThrow(PainlessParser.ThrowContext ctx); + + /** + * Visit a parse tree produced by the {@code expr} + * labeled alternative in {@link PainlessParser#dstatement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExpr(PainlessParser.ExprContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#trailer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrailer(PainlessParser.TrailerContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#block}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBlock(PainlessParser.BlockContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#empty}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEmpty(PainlessParser.EmptyContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#initializer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitInitializer(PainlessParser.InitializerContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#afterthought}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAfterthought(PainlessParser.AfterthoughtContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#declaration}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDeclaration(PainlessParser.DeclarationContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#decltype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDecltype(PainlessParser.DecltypeContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#type}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitType(PainlessParser.TypeContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#declvar}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDeclvar(PainlessParser.DeclvarContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#trap}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrap(PainlessParser.TrapContext ctx); + + /** + * Visit a parse tree produced by the {@code single} + * labeled alternative in {@link PainlessParser#noncondexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSingle(PainlessParser.SingleContext ctx); + + /** + * Visit a parse tree produced by the {@code comp} + * labeled alternative in {@link PainlessParser#noncondexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitComp(PainlessParser.CompContext ctx); + + /** + * Visit a parse tree produced by the {@code bool} + * labeled alternative in {@link PainlessParser#noncondexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBool(PainlessParser.BoolContext ctx); + + /** + * Visit a parse tree produced by the {@code binary} + * labeled alternative in {@link PainlessParser#noncondexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBinary(PainlessParser.BinaryContext ctx); + + /** + * Visit a parse tree produced by the {@code elvis} + * labeled alternative in {@link PainlessParser#noncondexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitElvis(PainlessParser.ElvisContext ctx); + + /** + * Visit a parse tree produced by the {@code instanceof} + * labeled alternative in {@link PainlessParser#noncondexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitInstanceof(PainlessParser.InstanceofContext ctx); + + /** + * Visit a parse tree produced by the {@code nonconditional} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNonconditional(PainlessParser.NonconditionalContext ctx); + + /** + * Visit a parse tree produced by the {@code conditional} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitConditional(PainlessParser.ConditionalContext ctx); + + /** + * Visit a parse tree produced by the {@code assignment} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAssignment(PainlessParser.AssignmentContext ctx); + + /** + * Visit a parse tree produced by the {@code pre} + * labeled alternative in {@link PainlessParser#unary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPre(PainlessParser.PreContext ctx); + + /** + * Visit a parse tree produced by the {@code addsub} + * labeled alternative in {@link PainlessParser#unary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAddsub(PainlessParser.AddsubContext ctx); + + /** + * Visit a parse tree produced by the {@code notaddsub} + * labeled alternative in {@link PainlessParser#unary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNotaddsub(PainlessParser.NotaddsubContext ctx); + + /** + * Visit a parse tree produced by the {@code read} + * labeled alternative in {@link PainlessParser#unarynotaddsub}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitRead(PainlessParser.ReadContext ctx); + + /** + * Visit a parse tree produced by the {@code post} + * labeled alternative in {@link PainlessParser#unarynotaddsub}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPost(PainlessParser.PostContext ctx); + + /** + * Visit a parse tree produced by the {@code not} + * labeled alternative in {@link PainlessParser#unarynotaddsub}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNot(PainlessParser.NotContext ctx); + + /** + * Visit a parse tree produced by the {@code cast} + * labeled alternative in {@link PainlessParser#unarynotaddsub}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCast(PainlessParser.CastContext ctx); + + /** + * Visit a parse tree produced by the {@code primordefcast} + * labeled alternative in {@link PainlessParser#castexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPrimordefcast(PainlessParser.PrimordefcastContext ctx); + + /** + * Visit a parse tree produced by the {@code refcast} + * labeled alternative in {@link PainlessParser#castexpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitRefcast(PainlessParser.RefcastContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#primordefcasttype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPrimordefcasttype(PainlessParser.PrimordefcasttypeContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#refcasttype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitRefcasttype(PainlessParser.RefcasttypeContext ctx); + + /** + * Visit a parse tree produced by the {@code dynamic} + * labeled alternative in {@link PainlessParser#chain}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDynamic(PainlessParser.DynamicContext ctx); + + /** + * Visit a parse tree produced by the {@code newarray} + * labeled alternative in {@link PainlessParser#chain}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNewarray(PainlessParser.NewarrayContext ctx); + + /** + * Visit a parse tree produced by the {@code precedence} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPrecedence(PainlessParser.PrecedenceContext ctx); + + /** + * Visit a parse tree produced by the {@code numeric} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNumeric(PainlessParser.NumericContext ctx); + + /** + * Visit a parse tree produced by the {@code true} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrue(PainlessParser.TrueContext ctx); + + /** + * Visit a parse tree produced by the {@code false} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFalse(PainlessParser.FalseContext ctx); + + /** + * Visit a parse tree produced by the {@code null} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNull(PainlessParser.NullContext ctx); + + /** + * Visit a parse tree produced by the {@code string} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitString(PainlessParser.StringContext ctx); + + /** + * Visit a parse tree produced by the {@code regex} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitRegex(PainlessParser.RegexContext ctx); + + /** + * Visit a parse tree produced by the {@code listinit} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitListinit(PainlessParser.ListinitContext ctx); + + /** + * Visit a parse tree produced by the {@code mapinit} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMapinit(PainlessParser.MapinitContext ctx); + + /** + * Visit a parse tree produced by the {@code variable} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitVariable(PainlessParser.VariableContext ctx); + + /** + * Visit a parse tree produced by the {@code calllocal} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCalllocal(PainlessParser.CalllocalContext ctx); + + /** + * Visit a parse tree produced by the {@code newobject} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNewobject(PainlessParser.NewobjectContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#postfix}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPostfix(PainlessParser.PostfixContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#postdot}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPostdot(PainlessParser.PostdotContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#callinvoke}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCallinvoke(PainlessParser.CallinvokeContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#fieldaccess}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFieldaccess(PainlessParser.FieldaccessContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#braceaccess}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBraceaccess(PainlessParser.BraceaccessContext ctx); + + /** + * Visit a parse tree produced by the {@code newstandardarray} + * labeled alternative in {@link PainlessParser#arrayinitializer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNewstandardarray(PainlessParser.NewstandardarrayContext ctx); + + /** + * Visit a parse tree produced by the {@code newinitializedarray} + * labeled alternative in {@link PainlessParser#arrayinitializer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNewinitializedarray(PainlessParser.NewinitializedarrayContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#listinitializer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitListinitializer(PainlessParser.ListinitializerContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#mapinitializer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMapinitializer(PainlessParser.MapinitializerContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#maptoken}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMaptoken(PainlessParser.MaptokenContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#arguments}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitArguments(PainlessParser.ArgumentsContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#argument}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitArgument(PainlessParser.ArgumentContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#lambda}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitLambda(PainlessParser.LambdaContext ctx); + + /** + * Visit a parse tree produced by {@link PainlessParser#lamtype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitLamtype(PainlessParser.LamtypeContext ctx); + + /** + * Visit a parse tree produced by the {@code classfuncref} + * labeled alternative in {@link PainlessParser#funcref}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitClassfuncref(PainlessParser.ClassfuncrefContext ctx); + + /** + * Visit a parse tree produced by the {@code constructorfuncref} + * labeled alternative in {@link PainlessParser#funcref}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitConstructorfuncref(PainlessParser.ConstructorfuncrefContext ctx); + + /** + * Visit a parse tree produced by the {@code localfuncref} + * labeled alternative in {@link PainlessParser#funcref}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitLocalfuncref(PainlessParser.LocalfuncrefContext ctx); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/ParserErrorStrategy.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/ParserErrorStrategy.java index 2644c7e3d98..7ddf9cd54fe 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/ParserErrorStrategy.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/ParserErrorStrategy.java @@ -58,8 +58,12 @@ final class ParserErrorStrategy extends DefaultErrorStrategy { if (token == null) { message = "no parse token found."; } else if (re instanceof InputMismatchException) { - message = "unexpected token [" + getTokenErrorDisplay(token) + "]" + - " was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; + message = "unexpected token [" + + getTokenErrorDisplay(token) + + "]" + + " was expecting one of [" + + re.getExpectedTokens().toString(recognizer.getVocabulary()) + + "]."; } else if (re instanceof NoViableAltException) { if (token.getType() == PainlessParser.EOF) { message = "unexpected end of script."; @@ -67,7 +71,7 @@ final class ParserErrorStrategy extends DefaultErrorStrategy { message = "invalid sequence of tokens near [" + getTokenErrorDisplay(token) + "]."; } } else { - message = "unexpected token near [" + getTokenErrorDisplay(token) + "]."; + message = "unexpected token near [" + getTokenErrorDisplay(token) + "]."; } Location location = new Location(sourceName, token == null ? -1 : token.getStartIndex()); @@ -77,14 +81,17 @@ final class ParserErrorStrategy extends DefaultErrorStrategy { @Override public Token recoverInline(final Parser recognizer) throws RecognitionException { final Token token = recognizer.getCurrentToken(); - final String message = "unexpected token [" + getTokenErrorDisplay(token) + "]" + - " was expecting one of [" + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; + final String message = "unexpected token [" + + getTokenErrorDisplay(token) + + "]" + + " was expecting one of [" + + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + + "]."; Location location = new Location(sourceName, token.getStartIndex()); throw location.createError(new IllegalArgumentException(message)); } @Override - public void sync(final Parser recognizer) { - } + public void sync(final Parser recognizer) {} } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java index 55f280f11c0..719a69a9977 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java @@ -196,7 +196,7 @@ public final class Walker extends PainlessParserBaseVisitor { this.identifier = 0; - this.source = (SClass)visit(buildAntlrTree(sourceText)); + this.source = (SClass) visit(buildAntlrTree(sourceText)); } private int nextIdentifier() { @@ -227,10 +227,15 @@ public final class Walker extends PainlessParserBaseVisitor { // a second listener to fail the test when the above happens. parser.addErrorListener(new BaseErrorListener() { @Override - public void syntaxError(final Recognizer recognizer, final Object offendingSymbol, final int line, - final int charPositionInLine, final String msg, final RecognitionException e) { - throw new AssertionError("line: " + line + ", offset: " + charPositionInLine + - ", symbol:" + offendingSymbol + " " + msg); + public void syntaxError( + final Recognizer recognizer, + final Object offendingSymbol, + final int line, + final int charPositionInLine, + final String msg, + final RecognitionException e + ) { + throw new AssertionError("line: " + line + ", offset: " + charPositionInLine + ", symbol:" + offendingSymbol + " " + msg); } }); @@ -252,7 +257,7 @@ public final class Walker extends PainlessParserBaseVisitor { List functions = new ArrayList<>(); for (FunctionContext function : ctx.function()) { - functions.add((SFunction)visit(function)); + functions.add((SFunction) visit(function)); } // handle the code to generate the execute method here @@ -261,12 +266,23 @@ public final class Walker extends PainlessParserBaseVisitor { List statements = new ArrayList<>(); for (StatementContext statement : ctx.statement()) { - statements.add((AStatement)visit(statement)); + statements.add((AStatement) visit(statement)); } // generate the execute method from the collected statements and parameters - SFunction execute = new SFunction(nextIdentifier(), location(ctx), "", "execute", emptyList(), emptyList(), - new SBlock(nextIdentifier(), location(ctx), statements), false, false, false, false); + SFunction execute = new SFunction( + nextIdentifier(), + location(ctx), + "", + "execute", + emptyList(), + emptyList(), + new SBlock(nextIdentifier(), location(ctx), statements), + false, + false, + false, + false + ); functions.add(execute); return new SClass(nextIdentifier(), location(ctx), functions); @@ -289,15 +305,26 @@ public final class Walker extends PainlessParserBaseVisitor { } for (StatementContext statement : ctx.block().statement()) { - statements.add((AStatement)visit(statement)); + statements.add((AStatement) visit(statement)); } if (ctx.block().dstatement() != null) { - statements.add((AStatement)visit(ctx.block().dstatement())); + statements.add((AStatement) visit(ctx.block().dstatement())); } - return new SFunction(nextIdentifier(), location(ctx), - rtnType, name, paramTypes, paramNames, new SBlock(nextIdentifier(), location(ctx), statements), false, true, false, false); + return new SFunction( + nextIdentifier(), + location(ctx), + rtnType, + name, + paramTypes, + paramNames, + new SBlock(nextIdentifier(), location(ctx), statements), + false, + true, + false, + false + ); } @Override @@ -318,11 +345,11 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitIf(IfContext ctx) { - AExpression expression = (AExpression)visit(ctx.expression()); - SBlock ifblock = (SBlock)visit(ctx.trailer(0)); + AExpression expression = (AExpression) visit(ctx.expression()); + SBlock ifblock = (SBlock) visit(ctx.trailer(0)); if (ctx.trailer().size() > 1) { - SBlock elseblock = (SBlock)visit(ctx.trailer(1)); + SBlock elseblock = (SBlock) visit(ctx.trailer(1)); return new SIfElse(nextIdentifier(), location(ctx), expression, ifblock, elseblock); } else { @@ -332,10 +359,10 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitWhile(WhileContext ctx) { - AExpression expression = (AExpression)visit(ctx.expression()); + AExpression expression = (AExpression) visit(ctx.expression()); if (ctx.trailer() != null) { - SBlock block = (SBlock)visit(ctx.trailer()); + SBlock block = (SBlock) visit(ctx.trailer()); return new SWhile(nextIdentifier(), location(ctx), expression, block); } else if (ctx.empty() != null) { @@ -347,8 +374,8 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitDo(DoContext ctx) { - AExpression expression = (AExpression)visit(ctx.expression()); - SBlock block = (SBlock)visit(ctx.block()); + AExpression expression = (AExpression) visit(ctx.expression()); + SBlock block = (SBlock) visit(ctx.block()); return new SDo(nextIdentifier(), location(ctx), expression, block); } @@ -356,11 +383,11 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitFor(ForContext ctx) { ANode initializer = ctx.initializer() == null ? null : visit(ctx.initializer()); - AExpression expression = ctx.expression() == null ? null : (AExpression)visit(ctx.expression()); - AExpression afterthought = ctx.afterthought() == null ? null : (AExpression)visit(ctx.afterthought()); + AExpression expression = ctx.expression() == null ? null : (AExpression) visit(ctx.expression()); + AExpression afterthought = ctx.afterthought() == null ? null : (AExpression) visit(ctx.afterthought()); if (ctx.trailer() != null) { - SBlock block = (SBlock)visit(ctx.trailer()); + SBlock block = (SBlock) visit(ctx.trailer()); return new SFor(nextIdentifier(), location(ctx), initializer, expression, afterthought, block); } else if (ctx.empty() != null) { @@ -374,8 +401,8 @@ public final class Walker extends PainlessParserBaseVisitor { public ANode visitEach(EachContext ctx) { String type = ctx.decltype().getText(); String name = ctx.ID().getText(); - AExpression expression = (AExpression)visit(ctx.expression()); - SBlock block = (SBlock)visit(ctx.trailer()); + AExpression expression = (AExpression) visit(ctx.expression()); + SBlock block = (SBlock) visit(ctx.trailer()); return new SEach(nextIdentifier(), location(ctx), type, name, expression, block); } @@ -383,8 +410,8 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitIneach(IneachContext ctx) { String name = ctx.ID().getText(); - AExpression expression = (AExpression)visit(ctx.expression()); - SBlock block = (SBlock)visit(ctx.trailer()); + AExpression expression = (AExpression) visit(ctx.expression()); + SBlock block = (SBlock) visit(ctx.trailer()); return new SEach(nextIdentifier(), location(ctx), "def", name, expression, block); } @@ -417,11 +444,11 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitTry(TryContext ctx) { - SBlock block = (SBlock)visit(ctx.block()); + SBlock block = (SBlock) visit(ctx.block()); List catches = new ArrayList<>(); for (TrapContext trap : ctx.trap()) { - catches.add((SCatch)visit(trap)); + catches.add((SCatch) visit(trap)); } return new STry(nextIdentifier(), location(ctx), block, catches); @@ -429,14 +456,14 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitThrow(ThrowContext ctx) { - AExpression expression = (AExpression)visit(ctx.expression()); + AExpression expression = (AExpression) visit(ctx.expression()); return new SThrow(nextIdentifier(), location(ctx), expression); } @Override public ANode visitExpr(ExprContext ctx) { - AExpression expression = (AExpression)visit(ctx.expression()); + AExpression expression = (AExpression) visit(ctx.expression()); return new SExpression(nextIdentifier(), location(ctx), expression); } @@ -447,7 +474,7 @@ public final class Walker extends PainlessParserBaseVisitor { return visit(ctx.block()); } else if (ctx.statement() != null) { List statements = new ArrayList<>(); - statements.add((AStatement)visit(ctx.statement())); + statements.add((AStatement) visit(ctx.statement())); return new SBlock(nextIdentifier(), location(ctx), statements); } else { @@ -463,11 +490,11 @@ public final class Walker extends PainlessParserBaseVisitor { List statements = new ArrayList<>(); for (StatementContext statement : ctx.statement()) { - statements.add((AStatement)visit(statement)); + statements.add((AStatement) visit(statement)); } if (ctx.dstatement() != null) { - statements.add((AStatement)visit(ctx.dstatement())); + statements.add((AStatement) visit(ctx.dstatement())); } return new SBlock(nextIdentifier(), location(ctx), statements); @@ -502,7 +529,7 @@ public final class Walker extends PainlessParserBaseVisitor { for (DeclvarContext declvar : ctx.declvar()) { String name = declvar.ID().getText(); - AExpression expression = declvar.expression() == null ? null : (AExpression)visit(declvar.expression()); + AExpression expression = declvar.expression() == null ? null : (AExpression) visit(declvar.expression()); declarations.add(new SDeclaration(nextIdentifier(), location(declvar), type, name, expression)); } @@ -528,7 +555,7 @@ public final class Walker extends PainlessParserBaseVisitor { public ANode visitTrap(TrapContext ctx) { String type = ctx.type().getText(); String name = ctx.ID().getText(); - SBlock block = (SBlock)visit(ctx.block()); + SBlock block = (SBlock) visit(ctx.block()); return new SCatch(nextIdentifier(), location(ctx), Exception.class, type, name, block); } @@ -540,8 +567,8 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitBinary(BinaryContext ctx) { - AExpression left = (AExpression)visit(ctx.noncondexpression(0)); - AExpression right = (AExpression)visit(ctx.noncondexpression(1)); + AExpression left = (AExpression) visit(ctx.noncondexpression(0)); + AExpression right = (AExpression) visit(ctx.noncondexpression(1)); final Operation operation; if (ctx.MUL() != null) { @@ -579,8 +606,8 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitComp(CompContext ctx) { - AExpression left = (AExpression)visit(ctx.noncondexpression(0)); - AExpression right = (AExpression)visit(ctx.noncondexpression(1)); + AExpression left = (AExpression) visit(ctx.noncondexpression(0)); + AExpression right = (AExpression) visit(ctx.noncondexpression(1)); final Operation operation; if (ctx.LT() != null) { @@ -608,7 +635,7 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitInstanceof(InstanceofContext ctx) { - AExpression expr = (AExpression)visit(ctx.noncondexpression()); + AExpression expr = (AExpression) visit(ctx.noncondexpression()); String type = ctx.decltype().getText(); return new EInstanceof(nextIdentifier(), location(ctx), expr, type); @@ -616,8 +643,8 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitBool(BoolContext ctx) { - AExpression left = (AExpression)visit(ctx.noncondexpression(0)); - AExpression right = (AExpression)visit(ctx.noncondexpression(1)); + AExpression left = (AExpression) visit(ctx.noncondexpression(0)); + AExpression right = (AExpression) visit(ctx.noncondexpression(1)); final Operation operation; if (ctx.BOOLAND() != null) { @@ -633,8 +660,8 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitElvis(ElvisContext ctx) { - AExpression left = (AExpression)visit(ctx.noncondexpression(0)); - AExpression right = (AExpression)visit(ctx.noncondexpression(1)); + AExpression left = (AExpression) visit(ctx.noncondexpression(0)); + AExpression right = (AExpression) visit(ctx.noncondexpression(1)); return new EElvis(nextIdentifier(), location(ctx), left, right); } @@ -646,17 +673,17 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitConditional(ConditionalContext ctx) { - AExpression condition = (AExpression)visit(ctx.noncondexpression()); - AExpression left = (AExpression)visit(ctx.expression(0)); - AExpression right = (AExpression)visit(ctx.expression(1)); + AExpression condition = (AExpression) visit(ctx.noncondexpression()); + AExpression left = (AExpression) visit(ctx.expression(0)); + AExpression right = (AExpression) visit(ctx.expression(1)); return new EConditional(nextIdentifier(), location(ctx), condition, left, right); } @Override public ANode visitAssignment(AssignmentContext ctx) { - AExpression lhs = (AExpression)visit(ctx.noncondexpression()); - AExpression rhs = (AExpression)visit(ctx.expression()); + AExpression lhs = (AExpression) visit(ctx.noncondexpression()); + AExpression rhs = (AExpression) visit(ctx.expression()); final Operation operation; @@ -693,7 +720,7 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitPre(PreContext ctx) { - AExpression expression = (AExpression)visit(ctx.chain()); + AExpression expression = (AExpression) visit(ctx.chain()); final Operation operation; @@ -705,13 +732,19 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("illegal tree structure")); } - return new EAssignment(nextIdentifier(), location(ctx), expression, - new ENumeric(nextIdentifier(), location(ctx), "1", 10), false, operation); + return new EAssignment( + nextIdentifier(), + location(ctx), + expression, + new ENumeric(nextIdentifier(), location(ctx), "1", 10), + false, + operation + ); } @Override public ANode visitAddsub(AddsubContext ctx) { - AExpression expression = (AExpression)visit(ctx.unary()); + AExpression expression = (AExpression) visit(ctx.unary()); final Operation operation; @@ -738,7 +771,7 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitPost(PostContext ctx) { - AExpression expression = (AExpression)visit(ctx.chain()); + AExpression expression = (AExpression) visit(ctx.chain()); final Operation operation; @@ -750,13 +783,19 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("illegal tree structure")); } - return new EAssignment(nextIdentifier(), location(ctx), expression, - new ENumeric(nextIdentifier(), location(ctx), "1", 10), true, operation); + return new EAssignment( + nextIdentifier(), + location(ctx), + expression, + new ENumeric(nextIdentifier(), location(ctx), "1", 10), + true, + operation + ); } @Override public ANode visitNot(NotContext ctx) { - AExpression expression = (AExpression)visit(ctx.unary()); + AExpression expression = (AExpression) visit(ctx.unary()); final Operation operation; @@ -779,7 +818,7 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitPrimordefcast(PainlessParser.PrimordefcastContext ctx) { String type = ctx.primordefcasttype().getText(); - AExpression child = (AExpression)visit(ctx.unary()); + AExpression child = (AExpression) visit(ctx.unary()); return new EExplicit(nextIdentifier(), location(ctx), type, child); } @@ -787,7 +826,7 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitRefcast(PainlessParser.RefcastContext ctx) { String type = ctx.refcasttype().getText(); - AExpression child = (AExpression)visit(ctx.unarynotaddsub()); + AExpression child = (AExpression) visit(ctx.unarynotaddsub()); return new EExplicit(nextIdentifier(), location(ctx), type, child); } @@ -804,7 +843,7 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitDynamic(DynamicContext ctx) { - AExpression primary = (AExpression)visit(ctx.primary()); + AExpression primary = (AExpression) visit(ctx.primary()); return buildPostfixChain(primary, null, ctx.postfix()); } @@ -1000,7 +1039,7 @@ public final class Walker extends PainlessParserBaseVisitor { } public AExpression visitBraceaccess(BraceaccessContext ctx, AExpression prefix) { - AExpression expression = (AExpression)visit(ctx.expression()); + AExpression expression = (AExpression) visit(ctx.expression()); return new EBrace(nextIdentifier(), location(ctx), prefix, expression); } @@ -1012,11 +1051,14 @@ public final class Walker extends PainlessParserBaseVisitor { for (ExpressionContext expression : ctx.expression()) { type.append("[]"); - expressions.add((AExpression)visit(expression)); + expressions.add((AExpression) visit(expression)); } return buildPostfixChain( - new ENewArray(nextIdentifier(), location(ctx), type.toString(), expressions, false), ctx.postdot(), ctx.postfix()); + new ENewArray(nextIdentifier(), location(ctx), type.toString(), expressions, false), + ctx.postdot(), + ctx.postfix() + ); } @Override @@ -1025,7 +1067,7 @@ public final class Walker extends PainlessParserBaseVisitor { List expressions = new ArrayList<>(); for (ExpressionContext expression : ctx.expression()) { - expressions.add((AExpression)visit(expression)); + expressions.add((AExpression) visit(expression)); } return buildPostfixChain(new ENewArray(nextIdentifier(), location(ctx), type, expressions, true), null, ctx.postfix()); @@ -1036,7 +1078,7 @@ public final class Walker extends PainlessParserBaseVisitor { List values = new ArrayList<>(); for (ExpressionContext expression : ctx.expression()) { - values.add((AExpression)visit(expression)); + values.add((AExpression) visit(expression)); } return new EListInit(nextIdentifier(), location(ctx), values); @@ -1048,8 +1090,8 @@ public final class Walker extends PainlessParserBaseVisitor { List values = new ArrayList<>(); for (MaptokenContext maptoken : ctx.maptoken()) { - keys.add((AExpression)visit(maptoken.expression(0))); - values.add((AExpression)visit(maptoken.expression(1))); + keys.add((AExpression) visit(maptoken.expression(0))); + values.add((AExpression) visit(maptoken.expression(1))); } return new EMapInit(nextIdentifier(), location(ctx), keys, values); @@ -1069,7 +1111,7 @@ public final class Walker extends PainlessParserBaseVisitor { List arguments = new ArrayList<>(); for (ArgumentContext argument : ctx.argument()) { - arguments.add((AExpression)visit(argument)); + arguments.add((AExpression) visit(argument)); } return arguments; @@ -1106,11 +1148,14 @@ public final class Walker extends PainlessParserBaseVisitor { if (ctx.expression() != null) { // single expression - AExpression expression = (AExpression)visit(ctx.expression()); - block = new SBlock(nextIdentifier(), location(ctx), - Collections.singletonList(new SReturn(nextIdentifier(), location(ctx), expression))); + AExpression expression = (AExpression) visit(ctx.expression()); + block = new SBlock( + nextIdentifier(), + location(ctx), + Collections.singletonList(new SReturn(nextIdentifier(), location(ctx), expression)) + ); } else { - block = (SBlock)visit(ctx.block()); + block = (SBlock) visit(ctx.block()); } return new ELambda(nextIdentifier(), location(ctx), paramTypes, paramNames, block); @@ -1128,9 +1173,9 @@ public final class Walker extends PainlessParserBaseVisitor { @Override public ANode visitConstructorfuncref(ConstructorfuncrefContext ctx) { - return ctx.decltype().LBRACE().isEmpty() ? - new EFunctionRef(nextIdentifier(), location(ctx), ctx.decltype().getText(), ctx.NEW().getText()) : - new ENewArrayFunctionRef(nextIdentifier(), location(ctx), ctx.decltype().getText()); + return ctx.decltype().LBRACE().isEmpty() + ? new EFunctionRef(nextIdentifier(), location(ctx), ctx.decltype().getText(), ctx.NEW().getText()) + : new ENewArrayFunctionRef(nextIdentifier(), location(ctx), ctx.decltype().getText()); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/api/Augmentation.java b/modules/lang-painless/src/main/java/org/opensearch/painless/api/Augmentation.java index 3278ef9f486..821fbc45c42 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/api/Augmentation.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/api/Augmentation.java @@ -88,7 +88,7 @@ public class Augmentation { /** Converts this Iterable to a Collection. Returns the original Iterable if it is already a Collection. */ public static Collection asCollection(Iterable receiver) { if (receiver instanceof Collection) { - return (Collection)receiver; + return (Collection) receiver; } List list = new ArrayList<>(); for (T t : receiver) { @@ -100,7 +100,7 @@ public class Augmentation { /** Converts this Iterable to a List. Returns the original Iterable if it is already a List. */ public static List asList(Iterable receiver) { if (receiver instanceof List) { - return (List)receiver; + return (List) receiver; } List list = new ArrayList<>(); for (T t : receiver) { @@ -157,13 +157,13 @@ public class Augmentation { * Iterates through the Iterable transforming items using the supplied function and * collecting any non-null results. */ - public static List findResults(Iterable receiver, Function filter) { + public static List findResults(Iterable receiver, Function filter) { List list = new ArrayList<>(); - for (T t: receiver) { - U result = filter.apply(t); - if (result != null) { - list.add(result); - } + for (T t : receiver) { + U result = filter.apply(t); + if (result != null) { + list.add(result); + } } return list; } @@ -171,8 +171,8 @@ public class Augmentation { /** * Sorts all Iterable members into groups determined by the supplied mapping function. */ - public static Map> groupBy(Iterable receiver, Function mapper) { - Map> map = new LinkedHashMap<>(); + public static Map> groupBy(Iterable receiver, Function mapper) { + Map> map = new LinkedHashMap<>(); for (T t : receiver) { U mapped = mapper.apply(t); List results = map.get(mapped); @@ -229,7 +229,7 @@ public class Augmentation { * Iterates through this collection transforming each entry into a new value using * the function, returning a list of transformed values. */ - public static List collect(Collection receiver, Function function) { + public static List collect(Collection receiver, Function function) { List list = new ArrayList<>(); for (T t : receiver) { list.add(function.apply(t)); @@ -241,7 +241,7 @@ public class Augmentation { * Iterates through this collection transforming each entry into a new value using * the function, adding the values to the specified collection. */ - public static Object collect(Collection receiver, Collection collection, Function function) { + public static Object collect(Collection receiver, Collection collection, Function function) { for (T t : receiver) { collection.add(function.apply(t)); } @@ -278,7 +278,7 @@ public class Augmentation { * but stopping once the first non-null result is found and returning that result. * If all results are null, null is returned. */ - public static Object findResult(Collection receiver, Function function) { + public static Object findResult(Collection receiver, Function function) { return findResult(receiver, null, function); } @@ -287,7 +287,7 @@ public class Augmentation { * but stopping once the first non-null result is found and returning that result. * If all results are null, defaultResult is returned. */ - public static Object findResult(Collection receiver, Object defaultResult, Function function) { + public static Object findResult(Collection receiver, Object defaultResult, Function function) { for (T t : receiver) { U value = function.apply(t); if (value != null) { @@ -324,9 +324,9 @@ public class Augmentation { * Iterates through this map transforming each entry into a new value using * the function, returning a list of transformed values. */ - public static List collect(Map receiver, BiFunction function) { + public static List collect(Map receiver, BiFunction function) { List list = new ArrayList<>(); - for (Map.Entry kvPair : receiver.entrySet()) { + for (Map.Entry kvPair : receiver.entrySet()) { list.add(function.apply(kvPair.getKey(), kvPair.getValue())); } return list; @@ -336,17 +336,17 @@ public class Augmentation { * Iterates through this map transforming each entry into a new value using * the function, adding the values to the specified collection. */ - public static Object collect(Map receiver, Collection collection, BiFunction function) { - for (Map.Entry kvPair : receiver.entrySet()) { + public static Object collect(Map receiver, Collection collection, BiFunction function) { + for (Map.Entry kvPair : receiver.entrySet()) { collection.add(function.apply(kvPair.getKey(), kvPair.getValue())); } return collection; } /** Counts the number of occurrences which satisfy the given predicate from inside this Map */ - public static int count(Map receiver, BiPredicate predicate) { + public static int count(Map receiver, BiPredicate predicate) { int count = 0; - for (Map.Entry kvPair : receiver.entrySet()) { + for (Map.Entry kvPair : receiver.entrySet()) { if (predicate.test(kvPair.getKey(), kvPair.getValue())) { count++; } @@ -355,7 +355,7 @@ public class Augmentation { } /** Iterates through a Map, passing each item to the given consumer. */ - public static Object each(Map receiver, BiConsumer consumer) { + public static Object each(Map receiver, BiConsumer consumer) { receiver.forEach(consumer); return receiver; } @@ -363,8 +363,8 @@ public class Augmentation { /** * Used to determine if the given predicate is valid (i.e. returns true for all items in this map). */ - public static boolean every(Map receiver, BiPredicate predicate) { - for (Map.Entry kvPair : receiver.entrySet()) { + public static boolean every(Map receiver, BiPredicate predicate) { + for (Map.Entry kvPair : receiver.entrySet()) { if (predicate.test(kvPair.getKey(), kvPair.getValue()) == false) { return false; } @@ -375,8 +375,8 @@ public class Augmentation { /** * Finds the first entry matching the predicate, or returns null. */ - public static Map.Entry find(Map receiver, BiPredicate predicate) { - for (Map.Entry kvPair : receiver.entrySet()) { + public static Map.Entry find(Map receiver, BiPredicate predicate) { + for (Map.Entry kvPair : receiver.entrySet()) { if (predicate.test(kvPair.getKey(), kvPair.getValue())) { return kvPair; } @@ -387,15 +387,15 @@ public class Augmentation { /** * Finds all values matching the predicate, returns as a map. */ - public static Map findAll(Map receiver, BiPredicate predicate) { + public static Map findAll(Map receiver, BiPredicate predicate) { // try to preserve some properties of the receiver (see the groovy javadocs) - final Map map; + final Map map; if (receiver instanceof TreeMap) { map = new TreeMap<>(); } else { map = new LinkedHashMap<>(); } - for (Map.Entry kvPair : receiver.entrySet()) { + for (Map.Entry kvPair : receiver.entrySet()) { if (predicate.test(kvPair.getKey(), kvPair.getValue())) { map.put(kvPair.getKey(), kvPair.getValue()); } @@ -408,7 +408,7 @@ public class Augmentation { * but stopping once the first non-null result is found and returning that result. * If all results are null, null is returned. */ - public static Object findResult(Map receiver, BiFunction function) { + public static Object findResult(Map receiver, BiFunction function) { return findResult(receiver, null, function); } @@ -417,8 +417,8 @@ public class Augmentation { * but stopping once the first non-null result is found and returning that result. * If all results are null, defaultResult is returned. */ - public static Object findResult(Map receiver, Object defaultResult, BiFunction function) { - for (Map.Entry kvPair : receiver.entrySet()) { + public static Object findResult(Map receiver, Object defaultResult, BiFunction function) { + for (Map.Entry kvPair : receiver.entrySet()) { T value = function.apply(kvPair.getKey(), kvPair.getValue()); if (value != null) { return value; @@ -431,13 +431,13 @@ public class Augmentation { * Iterates through the map transforming items using the supplied function and * collecting any non-null results. */ - public static List findResults(Map receiver, BiFunction filter) { + public static List findResults(Map receiver, BiFunction filter) { List list = new ArrayList<>(); - for (Map.Entry kvPair : receiver.entrySet()) { - T result = filter.apply(kvPair.getKey(), kvPair.getValue()); - if (result != null) { - list.add(result); - } + for (Map.Entry kvPair : receiver.entrySet()) { + T result = filter.apply(kvPair.getKey(), kvPair.getValue()); + if (result != null) { + list.add(result); + } } return list; } @@ -445,11 +445,11 @@ public class Augmentation { /** * Sorts all Map members into groups determined by the supplied mapping function. */ - public static Map> groupBy(Map receiver, BiFunction mapper) { - Map> map = new LinkedHashMap<>(); - for (Map.Entry kvPair : receiver.entrySet()) { + public static Map> groupBy(Map receiver, BiFunction mapper) { + Map> map = new LinkedHashMap<>(); + for (Map.Entry kvPair : receiver.entrySet()) { T mapped = mapper.apply(kvPair.getKey(), kvPair.getValue()); - Map results = map.get(mapped); + Map results = map.get(mapped); if (results == null) { // try to preserve some properties of the receiver (see the groovy javadocs) if (receiver instanceof TreeMap) { @@ -549,7 +549,7 @@ public class Augmentation { // Loop until we hit the limit or forever if we are passed in less than one (signifying no limit) // If Integer.MIN_VALUE is passed in, it will still continue to loop down to 1 from MAX_VALUE // This edge case should be fine as we are limited by receiver length (Integer.MAX_VALUE) even if we split at every char - for(;limit != 1; limit--) { + for (; limit != 1; limit--) { // Find the next occurrence of token after current pos int idx = receiver.indexOf(token, pos); @@ -590,7 +590,7 @@ public class Augmentation { /** * Same as {@link #getByPath(List, String)}, but for Map. */ - public static Object getByPath(Map receiver, String path) { + public static Object getByPath(Map receiver, String path) { return getByPathDispatch(receiver, splitPath(path), 0, throwCantFindValue(path)); } @@ -605,7 +605,7 @@ public class Augmentation { /** * Same as {@link #getByPath(List, String, Object)}, but for Map. */ - public static Object getByPath(Map receiver, String path, Object defaultValue) { + public static Object getByPath(Map receiver, String path, Object defaultValue) { return getByPathDispatch(receiver, splitPath(path), 0, () -> defaultValue); } @@ -614,11 +614,11 @@ public class Augmentation { private static Object getByPathDispatch(Object obj, String[] elements, int i, Supplier defaultSupplier) { if (i > elements.length - 1) { return obj; - } else if (elements[i].length() == 0 ) { + } else if (elements[i].length() == 0) { String format = "Extra '.' in path [%s] at index [%d]"; throw new IllegalArgumentException(String.format(Locale.ROOT, format, String.join(".", elements), i)); - } else if (obj instanceof Map) { - return getByPathMap((Map) obj, elements, i, defaultSupplier); + } else if (obj instanceof Map) { + return getByPathMap((Map) obj, elements, i, defaultSupplier); } else if (obj instanceof List) { return getByPathList((List) obj, elements, i, defaultSupplier); } @@ -626,7 +626,7 @@ public class Augmentation { } // lookup existing key in map, call back to dispatch. - private static Object getByPathMap(Map map, String[] elements, int i, Supplier defaultSupplier) { + private static Object getByPathMap(Map map, String[] elements, int i, Supplier defaultSupplier) { String element = elements[i]; if (map.containsKey(element)) { return getByPathDispatch(map.get(element), elements, i + 1, defaultSupplier); @@ -634,7 +634,7 @@ public class Augmentation { return handleMissing(map, elements, i, defaultSupplier); } - // lookup existing index in list, call back to dispatch. Throws IllegalArgumentException with NumberFormatException + // lookup existing index in list, call back to dispatch. Throws IllegalArgumentException with NumberFormatException // if index can't be parsed as an int. private static Object getByPathList(List list, String[] elements, int i, Supplier defaultSupplier) { String element = elements[i]; @@ -664,9 +664,7 @@ public class Augmentation { // A supplier that throws IllegalArgumentException private static Supplier throwCantFindValue(String path) { - return () -> { - throw new IllegalArgumentException(String.format(Locale.ROOT, "Could not find value at path [%s]", path)); - }; + return () -> { throw new IllegalArgumentException(String.format(Locale.ROOT, "Could not find value at path [%s]", path)); }; } // Use defaultSupplier if at last path element, otherwise throw IllegalArgumentException @@ -680,19 +678,16 @@ public class Augmentation { } String format = "Non-container [%s] at [%s], index [%d] in path [%s]"; throw new IllegalArgumentException( - String.format(Locale.ROOT, format, obj.getClass().getName(), elements[i], i, String.join(".", elements))); + String.format(Locale.ROOT, format, obj.getClass().getName(), elements[i], i, String.join(".", elements)) + ); } public static String sha1(String source) { - return MessageDigests.toHexString( - MessageDigests.sha1().digest(source.getBytes(StandardCharsets.UTF_8)) - ); + return MessageDigests.toHexString(MessageDigests.sha1().digest(source.getBytes(StandardCharsets.UTF_8))); } public static String sha256(String source) { - return MessageDigests.toHexString( - MessageDigests.sha256().digest(source.getBytes(StandardCharsets.UTF_8)) - ); + return MessageDigests.toHexString(MessageDigests.sha256().digest(source.getBytes(StandardCharsets.UTF_8))); } public static final int UNLIMITED_PATTERN_FACTOR = 0; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/api/Json.java b/modules/lang-painless/src/main/java/org/opensearch/painless/api/Json.java index 57283f3d761..89f756ef8b7 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/api/Json.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/api/Json.java @@ -45,11 +45,12 @@ public class Json { /** * Load a string as the Java version of a JSON type, either List (JSON array), Map (JSON object), Number, Boolean or String */ - public static Object load(String json) throws IOException{ + public static Object load(String json) throws IOException { XContentParser parser = JsonXContent.jsonXContent.createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - json); + json + ); switch (parser.nextToken()) { case START_ARRAY: @@ -71,7 +72,7 @@ public class Json { * Write a JSON representable type as a string */ public static String dump(Object data) throws IOException { - return dump(data, false); + return dump(data, false); } /** diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java b/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java index bfe1183c50f..80844202952 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java @@ -64,11 +64,19 @@ public class LimitedCharSequence implements CharSequence { } public String details() { - return (pattern != null ? "pattern: [" + pattern.pattern() + "], " : "") + - "limit factor: [" + limitFactor + "], " + - "char limit: [" + counter.charAtLimit + "], " + - "count: [" + counter.count + "], " + - "wrapped: [" + snippet(MAX_STR_LENGTH) + "]"; + return (pattern != null ? "pattern: [" + pattern.pattern() + "], " : "") + + "limit factor: [" + + limitFactor + + "], " + + "char limit: [" + + counter.charAtLimit + + "], " + + "count: [" + + counter.count + + "], " + + "wrapped: [" + + snippet(MAX_STR_LENGTH) + + "]"; } /** @@ -76,15 +84,21 @@ public class LimitedCharSequence implements CharSequence { */ String snippet(int maxStrLength) { if (maxStrLength < SNIPPET.length() * 6) { - throw new IllegalArgumentException("max str length must be large enough to include three snippets and three context chars, " + - "at least [" + SNIPPET.length() * 6 +"], not [" + maxStrLength + "]"); + throw new IllegalArgumentException( + "max str length must be large enough to include three snippets and three context chars, " + + "at least [" + + SNIPPET.length() * 6 + + "], not [" + + maxStrLength + + "]" + ); } if (wrapped.length() <= maxStrLength) { return wrapped.toString(); } - return wrapped.subSequence(0, maxStrLength - SNIPPET.length()) + "..." ; + return wrapped.subSequence(0, maxStrLength - SNIPPET.length()) + "..."; } @Override @@ -96,9 +110,14 @@ public class LimitedCharSequence implements CharSequence { public char charAt(int index) { counter.count++; if (counter.hitLimit()) { - throw new CircuitBreakingException("[scripting] Regular expression considered too many characters, " + details() + - ", this limit can be changed by changed by the [" + CompilerSettings.REGEX_LIMIT_FACTOR.getKey() + "] setting", - CircuitBreaker.Durability.TRANSIENT); + throw new CircuitBreakingException( + "[scripting] Regular expression considered too many characters, " + + details() + + ", this limit can be changed by changed by the [" + + CompilerSettings.REGEX_LIMIT_FACTOR.getKey() + + "] setting", + CircuitBreaker.Durability.TRANSIENT + ); } return wrapped.charAt(index); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BinaryMathNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BinaryMathNode.java index 68fa424cdb3..3a4d848a8a3 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BinaryMathNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BinaryMathNode.java @@ -53,7 +53,7 @@ public class BinaryMathNode extends BinaryNode { private Class binaryType; private Class shiftType; private int flags; - // TODO(stu): DefaultUserTreeToIRTree -> visitRegex should have compiler settings in script set. set it + // TODO(stu): DefaultUserTreeToIRTree -> visitRegex should have compiler settings in script set. set it private int regexLimit; public void setOperation(Operation operation) { @@ -138,16 +138,23 @@ public class BinaryMathNode extends BinaryNode { } else if (operation == Operation.MATCH) { methodWriter.invokeVirtual(org.objectweb.asm.Type.getType(Matcher.class), WriterConstants.MATCHER_MATCHES); } else { - throw new IllegalStateException("unexpected binary math operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); + throw new IllegalStateException( + "unexpected binary math operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); } } else { getLeftNode().write(classWriter, methodWriter, writeScope); getRightNode().write(classWriter, methodWriter, writeScope); if (binaryType == def.class || (shiftType != null && shiftType == def.class)) { - methodWriter.writeDynamicBinaryInstruction(getLocation(), - getExpressionType(), getLeftNode().getExpressionType(), getRightNode().getExpressionType(), operation, flags); + methodWriter.writeDynamicBinaryInstruction( + getLocation(), + getExpressionType(), + getLeftNode().getExpressionType(), + getRightNode().getExpressionType(), + operation, + flags + ); } else { methodWriter.writeBinaryInstruction(getLocation(), getExpressionType(), operation); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BooleanNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BooleanNode.java index 2d039b078a8..7952305d614 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BooleanNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/BooleanNode.java @@ -109,8 +109,9 @@ public class BooleanNode extends BinaryNode { methodWriter.push(false); methodWriter.mark(end); } else { - throw new IllegalStateException("unexpected boolean operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); + throw new IllegalStateException( + "unexpected boolean operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); } } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ClassNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ClassNode.java index 6224b2b2d51..c60388602ee 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ClassNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ClassNode.java @@ -140,8 +140,16 @@ public class ClassNode extends IRNode { String className = CLASS_TYPE.getInternalName(); String[] classInterfaces = new String[] { interfaceBase }; - ClassWriter classWriter = new ClassWriter(scriptScope.getCompilerSettings(), statements, debugStream, - scriptClassInfo.getBaseClass(), classFrames, classAccess, className, classInterfaces); + ClassWriter classWriter = new ClassWriter( + scriptScope.getCompilerSettings(), + statements, + debugStream, + scriptClassInfo.getBaseClass(), + classFrames, + classAccess, + className, + classInterfaces + ); ClassVisitor classVisitor = classWriter.getClassVisitor(); classVisitor.visitSource(Location.computeSourceName(scriptScope.getScriptName()), null); @@ -150,8 +158,11 @@ public class ClassNode extends IRNode { if (scriptClassInfo.getBaseClass().getConstructors().length == 0) { init = new org.objectweb.asm.commons.Method("", MethodType.methodType(void.class).toMethodDescriptorString()); } else { - init = new org.objectweb.asm.commons.Method("", MethodType.methodType(void.class, - scriptClassInfo.getBaseClass().getConstructors()[0].getParameterTypes()).toMethodDescriptorString()); + init = new org.objectweb.asm.commons.Method( + "", + MethodType.methodType(void.class, scriptClassInfo.getBaseClass().getConstructors()[0].getParameterTypes()) + .toMethodDescriptorString() + ); } // Write the constructor: @@ -165,8 +176,9 @@ public class ClassNode extends IRNode { if (clinitBlockNode.getStatementsNodes().isEmpty() == false) { MethodWriter methodWriter = classWriter.newMethodWriter( - Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, - new Method("", Type.getType(void.class), new Type[0])); + Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, + new Method("", Type.getType(void.class), new Type[0]) + ); clinitBlockNode.write(classWriter, methodWriter, new WriteScope()); methodWriter.returnValue(); methodWriter.endMethod(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ComparisonNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ComparisonNode.java index ab647b9214c..6e48ae1b520 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ComparisonNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ComparisonNode.java @@ -108,102 +108,111 @@ public class ComparisonNode extends BinaryNode { boolean eq = (operation == Operation.EQ || operation == Operation.EQR); boolean ne = (operation == Operation.NE || operation == Operation.NER); - boolean lt = operation == Operation.LT; + boolean lt = operation == Operation.LT; boolean lte = operation == Operation.LTE; - boolean gt = operation == Operation.GT; + boolean gt = operation == Operation.GT; boolean gte = operation == Operation.GTE; boolean writejump = true; Type type = MethodWriter.getType(comparisonType); - if (comparisonType == void.class || comparisonType == byte.class - || comparisonType == short.class || comparisonType == char.class) { - throw new IllegalStateException("unexpected comparison operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); + if (comparisonType == void.class || comparisonType == byte.class || comparisonType == short.class || comparisonType == char.class) { + throw new IllegalStateException( + "unexpected comparison operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); } else if (comparisonType == boolean.class) { if (eq) methodWriter.ifCmp(type, MethodWriter.EQ, jump); else if (ne) methodWriter.ifCmp(type, MethodWriter.NE, jump); else { - throw new IllegalStateException("unexpected comparison operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); - } - } else if (comparisonType == int.class || comparisonType == long.class - || comparisonType == float.class || comparisonType == double.class) { - if (eq) methodWriter.ifCmp(type, MethodWriter.EQ, jump); - else if (ne) methodWriter.ifCmp(type, MethodWriter.NE, jump); - else if (lt) methodWriter.ifCmp(type, MethodWriter.LT, jump); - else if (lte) methodWriter.ifCmp(type, MethodWriter.LE, jump); - else if (gt) methodWriter.ifCmp(type, MethodWriter.GT, jump); - else if (gte) methodWriter.ifCmp(type, MethodWriter.GE, jump); - else { - throw new IllegalStateException("unexpected comparison operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); + throw new IllegalStateException( + "unexpected comparison operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); } + } else if (comparisonType == int.class + || comparisonType == long.class + || comparisonType == float.class + || comparisonType == double.class) { + if (eq) methodWriter.ifCmp(type, MethodWriter.EQ, jump); + else if (ne) methodWriter.ifCmp(type, MethodWriter.NE, jump); + else if (lt) methodWriter.ifCmp(type, MethodWriter.LT, jump); + else if (lte) methodWriter.ifCmp(type, MethodWriter.LE, jump); + else if (gt) methodWriter.ifCmp(type, MethodWriter.GT, jump); + else if (gte) methodWriter.ifCmp(type, MethodWriter.GE, jump); + else { + throw new IllegalStateException( + "unexpected comparison operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); + } - } else if (comparisonType == def.class) { - Type booleanType = Type.getType(boolean.class); - Type descriptor = Type.getMethodType(booleanType, - MethodWriter.getType(getLeftNode().getExpressionType()), MethodWriter.getType(getRightNode().getExpressionType())); + } else if (comparisonType == def.class) { + Type booleanType = Type.getType(boolean.class); + Type descriptor = Type.getMethodType( + booleanType, + MethodWriter.getType(getLeftNode().getExpressionType()), + MethodWriter.getType(getRightNode().getExpressionType()) + ); - if (eq) { - if (getRightNode() instanceof NullNode) { - methodWriter.ifNull(jump); - } else if (getLeftNode() instanceof NullNode == false && operation == Operation.EQ) { - methodWriter.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); + if (eq) { + if (getRightNode() instanceof NullNode) { + methodWriter.ifNull(jump); + } else if (getLeftNode() instanceof NullNode == false && operation == Operation.EQ) { + methodWriter.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); + writejump = false; + } else { + methodWriter.ifCmp(type, MethodWriter.EQ, jump); + } + } else if (ne) { + if (getRightNode() instanceof NullNode) { + methodWriter.ifNonNull(jump); + } else if (getLeftNode() instanceof NullNode == false && operation == Operation.NE) { + methodWriter.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); + methodWriter.ifZCmp(MethodWriter.EQ, jump); + } else { + methodWriter.ifCmp(type, MethodWriter.NE, jump); + } + } else if (lt) { + methodWriter.invokeDefCall("lt", descriptor, DefBootstrap.BINARY_OPERATOR, 0); + writejump = false; + } else if (lte) { + methodWriter.invokeDefCall("lte", descriptor, DefBootstrap.BINARY_OPERATOR, 0); + writejump = false; + } else if (gt) { + methodWriter.invokeDefCall("gt", descriptor, DefBootstrap.BINARY_OPERATOR, 0); + writejump = false; + } else if (gte) { + methodWriter.invokeDefCall("gte", descriptor, DefBootstrap.BINARY_OPERATOR, 0); writejump = false; } else { - methodWriter.ifCmp(type, MethodWriter.EQ, jump); - } - } else if (ne) { - if (getRightNode() instanceof NullNode) { - methodWriter.ifNonNull(jump); - } else if (getLeftNode() instanceof NullNode == false && operation == Operation.NE) { - methodWriter.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); - methodWriter.ifZCmp(MethodWriter.EQ, jump); - } else { - methodWriter.ifCmp(type, MethodWriter.NE, jump); - } - } else if (lt) { - methodWriter.invokeDefCall("lt", descriptor, DefBootstrap.BINARY_OPERATOR, 0); - writejump = false; - } else if (lte) { - methodWriter.invokeDefCall("lte", descriptor, DefBootstrap.BINARY_OPERATOR, 0); - writejump = false; - } else if (gt) { - methodWriter.invokeDefCall("gt", descriptor, DefBootstrap.BINARY_OPERATOR, 0); - writejump = false; - } else if (gte) { - methodWriter.invokeDefCall("gte", descriptor, DefBootstrap.BINARY_OPERATOR, 0); - writejump = false; - } else { - throw new IllegalStateException("unexpected comparison operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); - } - } else { - if (eq) { - if (getRightNode() instanceof NullNode) { - methodWriter.ifNull(jump); - } else if (operation == Operation.EQ) { - methodWriter.invokeStatic(OBJECTS_TYPE, EQUALS); - writejump = false; - } else { - methodWriter.ifCmp(type, MethodWriter.EQ, jump); - } - } else if (ne) { - if (getRightNode() instanceof NullNode) { - methodWriter.ifNonNull(jump); - } else if (operation == Operation.NE) { - methodWriter.invokeStatic(OBJECTS_TYPE, EQUALS); - methodWriter.ifZCmp(MethodWriter.EQ, jump); - } else { - methodWriter.ifCmp(type, MethodWriter.NE, jump); + throw new IllegalStateException( + "unexpected comparison operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); } } else { - throw new IllegalStateException("unexpected comparison operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); + if (eq) { + if (getRightNode() instanceof NullNode) { + methodWriter.ifNull(jump); + } else if (operation == Operation.EQ) { + methodWriter.invokeStatic(OBJECTS_TYPE, EQUALS); + writejump = false; + } else { + methodWriter.ifCmp(type, MethodWriter.EQ, jump); + } + } else if (ne) { + if (getRightNode() instanceof NullNode) { + methodWriter.ifNonNull(jump); + } else if (operation == Operation.NE) { + methodWriter.invokeStatic(OBJECTS_TYPE, EQUALS); + methodWriter.ifZCmp(MethodWriter.EQ, jump); + } else { + methodWriter.ifCmp(type, MethodWriter.NE, jump); + } + } else { + throw new IllegalStateException( + "unexpected comparison operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); + } } - } if (writejump) { methodWriter.push(false); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ConstantNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ConstantNode.java index ecef45fb252..0ac76a7a579 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ConstantNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ConstantNode.java @@ -72,15 +72,15 @@ public class ConstantNode extends ExpressionNode { @Override protected void write(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { - if (constant instanceof String) methodWriter.push((String)constant); - else if (constant instanceof Double) methodWriter.push((double)constant); - else if (constant instanceof Float) methodWriter.push((float)constant); - else if (constant instanceof Long) methodWriter.push((long)constant); - else if (constant instanceof Integer) methodWriter.push((int)constant); - else if (constant instanceof Character) methodWriter.push((char)constant); - else if (constant instanceof Short) methodWriter.push((short)constant); - else if (constant instanceof Byte) methodWriter.push((byte)constant); - else if (constant instanceof Boolean) methodWriter.push((boolean)constant); + if (constant instanceof String) methodWriter.push((String) constant); + else if (constant instanceof Double) methodWriter.push((double) constant); + else if (constant instanceof Float) methodWriter.push((float) constant); + else if (constant instanceof Long) methodWriter.push((long) constant); + else if (constant instanceof Integer) methodWriter.push((int) constant); + else if (constant instanceof Character) methodWriter.push((char) constant); + else if (constant instanceof Short) methodWriter.push((short) constant); + else if (constant instanceof Byte) methodWriter.push((byte) constant); + else if (constant instanceof Boolean) methodWriter.push((boolean) constant); else { throw new IllegalStateException("unexpected constant [" + constant + "]"); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DeclarationNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DeclarationNode.java index 1ff4493ae0a..1a7a64d237a 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DeclarationNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DeclarationNode.java @@ -109,8 +109,12 @@ public class DeclarationNode extends StatementNode { if (expressionNode == null) { Class sort = variable.getType(); - if (sort == void.class || sort == boolean.class || sort == byte.class || - sort == short.class || sort == char.class || sort == int.class) { + if (sort == void.class + || sort == boolean.class + || sort == byte.class + || sort == short.class + || sort == char.class + || sort == int.class) { methodWriter.push(0); } else if (sort == long.class) { methodWriter.push(0L); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DefInterfaceReferenceNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DefInterfaceReferenceNode.java index 1ba04bf2d78..09de9dfd0a9 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DefInterfaceReferenceNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/DefInterfaceReferenceNode.java @@ -77,7 +77,7 @@ public class DefInterfaceReferenceNode extends ReferenceNode { // place holder for functional interface receiver // which is resolved and replace at runtime - methodWriter.push((String)null); + methodWriter.push((String) null); for (String capture : getCaptures()) { WriteScope.Variable variable = writeScope.getVariable(capture); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FieldNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FieldNode.java index 9603abd88ee..5da52faa3a6 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FieldNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FieldNode.java @@ -96,7 +96,8 @@ public class FieldNode extends IRNode { @Override protected void write(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { - classWriter.getClassVisitor().visitField( - ClassWriter.buildAccess(modifiers, true), name, Type.getType(fieldType).getDescriptor(), null, null).visitEnd(); + classWriter.getClassVisitor() + .visitField(ClassWriter.buildAccess(modifiers, true), name, Type.getType(fieldType).getDescriptor(), null, null) + .visitEnd(); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FlipDefIndexNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FlipDefIndexNode.java index 5d13864578e..9bacc3cb73f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FlipDefIndexNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/FlipDefIndexNode.java @@ -66,9 +66,10 @@ public class FlipDefIndexNode extends UnaryNode { methodWriter.dup(); getChildNode().write(classWriter, methodWriter, writeScope); Type methodType = Type.getMethodType( - MethodWriter.getType(getChildNode().getExpressionType()), - MethodWriter.getType(def.class), - MethodWriter.getType(getChildNode().getExpressionType())); + MethodWriter.getType(getChildNode().getExpressionType()), + MethodWriter.getType(def.class), + MethodWriter.getType(getChildNode().getExpressionType()) + ); methodWriter.invokeDefCall("normalizeIndex", methodType, DefBootstrap.INDEX_NORMALIZE); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForEachSubIterableNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForEachSubIterableNode.java index 173279f558f..6290cc91240 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForEachSubIterableNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForEachSubIterableNode.java @@ -141,8 +141,10 @@ public class ForEachSubIterableNode extends LoopNode { getConditionNode().write(classWriter, methodWriter, writeScope); if (method == null) { - org.objectweb.asm.Type methodType = org.objectweb.asm.Type - .getMethodType(org.objectweb.asm.Type.getType(Iterator.class), org.objectweb.asm.Type.getType(Object.class)); + org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType( + org.objectweb.asm.Type.getType(Iterator.class), + org.objectweb.asm.Type.getType(Object.class) + ); methodWriter.invokeDefCall("iterator", methodType, DefBootstrap.ITERATOR); } else { methodWriter.invokeMethodCall(method); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForLoopNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForLoopNode.java index e25edd0ec1b..eb5de9a8b5f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForLoopNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ForLoopNode.java @@ -109,7 +109,7 @@ public class ForLoopNode extends LoopNode { if (initializerNode instanceof DeclarationBlockNode) { initializerNode.write(classWriter, methodWriter, writeScope); } else if (initializerNode instanceof ExpressionNode) { - ExpressionNode initializer = (ExpressionNode)this.initializerNode; + ExpressionNode initializer = (ExpressionNode) this.initializerNode; initializer.write(classWriter, methodWriter, writeScope); methodWriter.writePop(MethodWriter.getType(initializer.getExpressionType()).getSize()); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/IRNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/IRNode.java index c8c9fc0edd1..da36f0860d1 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/IRNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/IRNode.java @@ -51,6 +51,7 @@ public abstract class IRNode { /* ---- end node data, begin visitor ---- */ public abstract void visit(IRTreeVisitor irTreeVisitor, Scope scope); + public abstract void visitChildren(IRTreeVisitor irTreeVisitor, Scope scope); /* ---- end visitor ---- */ diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InstanceofNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InstanceofNode.java index 5ac042118c7..277268ee452 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InstanceofNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InstanceofNode.java @@ -85,8 +85,10 @@ public class InstanceofNode extends UnaryNode { methodWriter.push(true); } else if (getChildNode().getExpressionType().isPrimitive()) { methodWriter.writePop(MethodWriter.getType(getExpressionType()).getSize()); - methodWriter.push(PainlessLookupUtility.typeToBoxedType(instanceType).isAssignableFrom( - PainlessLookupUtility.typeToBoxedType(getChildNode().getExpressionType()))); + methodWriter.push( + PainlessLookupUtility.typeToBoxedType(instanceType) + .isAssignableFrom(PainlessLookupUtility.typeToBoxedType(getChildNode().getExpressionType())) + ); } else { methodWriter.instanceOf(MethodWriter.getType(PainlessLookupUtility.typeToBoxedType(instanceType))); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallDefNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallDefNode.java index 45ec8df371e..11f6c4835dc 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallDefNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallDefNode.java @@ -109,14 +109,14 @@ public class InvokeCallDefNode extends ArgumentsNode { // to hint at which values are the call's arguments // versus which values are captures if (argumentNode instanceof DefInterfaceReferenceNode) { - DefInterfaceReferenceNode defInterfaceReferenceNode = (DefInterfaceReferenceNode)argumentNode; + DefInterfaceReferenceNode defInterfaceReferenceNode = (DefInterfaceReferenceNode) argumentNode; boostrapArguments.add(defInterfaceReferenceNode.getDefReferenceEncoding()); // the encoding uses a char to indicate the number of captures // where the value is the number of current arguments plus the // total number of captures for easier capture count tracking // when resolved at runtime - char encoding = (char)(i + capturedCount); + char encoding = (char) (i + capturedCount); defCallRecipe.append(encoding); capturedCount += defInterfaceReferenceNode.getCaptures().size(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallMemberNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallMemberNode.java index 3ab7d87b9c0..6e1f9bd9900 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallMemberNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/InvokeCallMemberNode.java @@ -137,7 +137,7 @@ public class InvokeCallMemberNode extends ArgumentsNode { for (ExpressionNode argumentNode : getArgumentNodes()) { argumentNode.write(classWriter, methodWriter, writeScope); - } + } if (localFunction.isStatic()) { methodWriter.invokeStatic(CLASS_TYPE, localFunction.getAsmMethod()); @@ -147,10 +147,12 @@ public class InvokeCallMemberNode extends ArgumentsNode { } else if (importedMethod != null) { for (ExpressionNode argumentNode : getArgumentNodes()) { argumentNode.write(classWriter, methodWriter, writeScope); - } + } - methodWriter.invokeStatic(Type.getType(importedMethod.targetClass), - new Method(importedMethod.javaMethod.getName(), importedMethod.methodType.toMethodDescriptorString())); + methodWriter.invokeStatic( + Type.getType(importedMethod.targetClass), + new Method(importedMethod.javaMethod.getName(), importedMethod.methodType.toMethodDescriptorString()) + ); } else if (classBinding != null) { Type type = Type.getType(classBinding.javaConstructor.getDeclaringClass()); int javaConstructorParameterCount = classBinding.javaConstructor.getParameterCount() - classBindingOffset; @@ -170,7 +172,7 @@ public class InvokeCallMemberNode extends ArgumentsNode { for (int argument = 0; argument < javaConstructorParameterCount; ++argument) { getArgumentNodes().get(argument).write(classWriter, methodWriter, writeScope); - } + } methodWriter.invokeConstructor(type, Method.getMethod(classBinding.javaConstructor)); methodWriter.putField(CLASS_TYPE, bindingName, type); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ListInitializationNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ListInitializationNode.java index 5f38872edcc..2fc7cba18aa 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ListInitializationNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/ListInitializationNode.java @@ -92,7 +92,9 @@ public class ListInitializationNode extends ArgumentsNode { methodWriter.newInstance(MethodWriter.getType(getExpressionType())); methodWriter.dup(); methodWriter.invokeConstructor( - Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); + Type.getType(constructor.javaConstructor.getDeclaringClass()), + Method.getMethod(constructor.javaConstructor) + ); for (ExpressionNode argument : getArgumentNodes()) { methodWriter.dup(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadBraceDefNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadBraceDefNode.java index 3e5d463088a..24723514361 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadBraceDefNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadBraceDefNode.java @@ -80,9 +80,10 @@ public class LoadBraceDefNode extends ExpressionNode { protected void write(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { methodWriter.writeDebugInfo(getLocation()); Type methodType = Type.getMethodType( - MethodWriter.getType(getExpressionType()), - MethodWriter.getType(def.class), - MethodWriter.getType(indexType)); + MethodWriter.getType(getExpressionType()), + MethodWriter.getType(def.class), + MethodWriter.getType(indexType) + ); methodWriter.invokeDefCall("arrayLoad", methodType, DefBootstrap.ARRAY_LOAD); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotDefNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotDefNode.java index 4f00ecdd99a..b8696356d6b 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotDefNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotDefNode.java @@ -76,9 +76,7 @@ public class LoadDotDefNode extends ExpressionNode { @Override protected void write(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { methodWriter.writeDebugInfo(getLocation()); - Type methodType = Type.getMethodType( - MethodWriter.getType(getExpressionType()), - MethodWriter.getType(def.class)); + Type methodType = Type.getMethodType(MethodWriter.getType(getExpressionType()), MethodWriter.getType(def.class)); methodWriter.invokeDefCall(value, methodType, DefBootstrap.LOAD); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotNode.java index 175075bcee6..1f4a9e4c363 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/LoadDotNode.java @@ -77,11 +77,17 @@ public class LoadDotNode extends ExpressionNode { methodWriter.writeDebugInfo(getLocation()); if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { - methodWriter.getStatic(Type.getType( - field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); + methodWriter.getStatic( + Type.getType(field.javaField.getDeclaringClass()), + field.javaField.getName(), + MethodWriter.getType(field.typeParameter) + ); } else { - methodWriter.getField(Type.getType( - field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); + methodWriter.getField( + Type.getType(field.javaField.getDeclaringClass()), + field.javaField.getName(), + MethodWriter.getType(field.typeParameter) + ); } } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/MapInitializationNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/MapInitializationNode.java index 00a6f75d73e..88fa949f7cf 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/MapInitializationNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/MapInitializationNode.java @@ -129,7 +129,9 @@ public class MapInitializationNode extends ExpressionNode { methodWriter.newInstance(MethodWriter.getType(getExpressionType())); methodWriter.dup(); methodWriter.invokeConstructor( - Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); + Type.getType(constructor.javaConstructor.getDeclaringClass()), + Method.getMethod(constructor.javaConstructor) + ); for (int index = 0; index < getArgumentsSize(); ++index) { methodWriter.dup(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/NewObjectNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/NewObjectNode.java index 9bcdebf6e04..944d67314d5 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/NewObjectNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/NewObjectNode.java @@ -99,6 +99,8 @@ public class NewObjectNode extends ArgumentsNode { } methodWriter.invokeConstructor( - Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); + Type.getType(constructor.javaConstructor.getDeclaringClass()), + Method.getMethod(constructor.javaConstructor) + ); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreBraceDefNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreBraceDefNode.java index 16e94b443f8..f54e4f2d0ec 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreBraceDefNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreBraceDefNode.java @@ -84,10 +84,11 @@ public class StoreBraceDefNode extends StoreNode { methodWriter.writeDebugInfo(getLocation()); Type methodType = Type.getMethodType( - MethodWriter.getType(void.class), - MethodWriter.getType(def.class), - MethodWriter.getType(indexType), - MethodWriter.getType(getStoreType())); + MethodWriter.getType(void.class), + MethodWriter.getType(def.class), + MethodWriter.getType(indexType), + MethodWriter.getType(getStoreType()) + ); methodWriter.invokeDefCall("arrayStore", methodType, DefBootstrap.ARRAY_STORE); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotDefNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotDefNode.java index 1e8e67a63be..e7761f36e91 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotDefNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotDefNode.java @@ -79,9 +79,10 @@ public class StoreDotDefNode extends StoreNode { methodWriter.writeDebugInfo(getLocation()); Type methodType = Type.getMethodType( - MethodWriter.getType(void.class), - MethodWriter.getType(def.class), - MethodWriter.getType(getStoreType())); + MethodWriter.getType(void.class), + MethodWriter.getType(def.class), + MethodWriter.getType(getStoreType()) + ); methodWriter.invokeDefCall(value, methodType, DefBootstrap.STORE); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotNode.java index ead6e126c5e..39a776b0e44 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/StoreDotNode.java @@ -79,11 +79,17 @@ public class StoreDotNode extends StoreNode { methodWriter.writeDebugInfo(getLocation()); if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { - methodWriter.putStatic(Type.getType( - field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); + methodWriter.putStatic( + Type.getType(field.javaField.getDeclaringClass()), + field.javaField.getName(), + MethodWriter.getType(field.typeParameter) + ); } else { - methodWriter.putField(Type.getType( - field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter)); + methodWriter.putField( + Type.getType(field.javaField.getDeclaringClass()), + field.javaField.getName(), + MethodWriter.getType(field.typeParameter) + ); } } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/UnaryMathNode.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/UnaryMathNode.java index 6885b6025d9..0667565bf17 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ir/UnaryMathNode.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ir/UnaryMathNode.java @@ -149,8 +149,9 @@ public class UnaryMathNode extends UnaryNode { } else if (getUnaryType() == long.class) { methodWriter.push(-1L); } else { - throw new IllegalStateException("unexpected unary math operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); + throw new IllegalStateException( + "unexpected unary math operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); } methodWriter.math(MethodWriter.XOR, actualType); @@ -168,8 +169,9 @@ public class UnaryMathNode extends UnaryNode { methodWriter.invokeDefCall("plus", descriptor, DefBootstrap.UNARY_OPERATOR, defFlags); } } else { - throw new IllegalStateException("unexpected unary math operation [" + operation + "] " + - "for type [" + getExpressionCanonicalTypeName() + "]"); + throw new IllegalStateException( + "unexpected unary math operation [" + operation + "] " + "for type [" + getExpressionCanonicalTypeName() + "]" + ); } } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessCast.java index 93b3a9d52cc..52de0480d53 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessCast.java @@ -46,7 +46,11 @@ public class PainlessCast { /** Create a cast where the original type will be unboxed, and then the cast will be performed. */ public static PainlessCast unboxOriginalType( - Class originalType, Class targetType, boolean explicitCast, Class unboxOriginalType) { + Class originalType, + Class targetType, + boolean explicitCast, + Class unboxOriginalType + ) { Objects.requireNonNull(originalType); Objects.requireNonNull(targetType); @@ -56,8 +60,7 @@ public class PainlessCast { } /** Create a cast where the target type will be unboxed, and then the cast will be performed. */ - public static PainlessCast unboxTargetType( - Class originalType, Class targetType, boolean explicitCast, Class unboxTargetType) { + public static PainlessCast unboxTargetType(Class originalType, Class targetType, boolean explicitCast, Class unboxTargetType) { Objects.requireNonNull(originalType); Objects.requireNonNull(targetType); @@ -67,8 +70,7 @@ public class PainlessCast { } /** Create a cast where the original type will be boxed, and then the cast will be performed. */ - public static PainlessCast boxOriginalType( - Class originalType, Class targetType, boolean explicitCast, Class boxOriginalType) { + public static PainlessCast boxOriginalType(Class originalType, Class targetType, boolean explicitCast, Class boxOriginalType) { Objects.requireNonNull(originalType); Objects.requireNonNull(targetType); @@ -78,8 +80,7 @@ public class PainlessCast { } /** Create a cast where the target type will be boxed, and then the cast will be performed. */ - public static PainlessCast boxTargetType( - Class originalType, Class targetType, boolean explicitCast, Class boxTargetType) { + public static PainlessCast boxTargetType(Class originalType, Class targetType, boolean explicitCast, Class boxTargetType) { Objects.requireNonNull(originalType); Objects.requireNonNull(targetType); @@ -105,8 +106,15 @@ public class PainlessCast { public final Class boxOriginalType; public final Class boxTargetType; - private PainlessCast(Class originalType, Class targetType, boolean explicitCast, - Class unboxOriginalType, Class unboxTargetType, Class boxOriginalType, Class boxTargetType) { + private PainlessCast( + Class originalType, + Class targetType, + boolean explicitCast, + Class unboxOriginalType, + Class unboxTargetType, + Class boxOriginalType, + Class boxTargetType + ) { this.originalType = originalType; this.targetType = targetType; @@ -127,15 +135,15 @@ public class PainlessCast { return false; } - PainlessCast that = (PainlessCast)object; + PainlessCast that = (PainlessCast) object; - return explicitCast == that.explicitCast && - Objects.equals(originalType, that.originalType) && - Objects.equals(targetType, that.targetType) && - Objects.equals(unboxOriginalType, that.unboxOriginalType) && - Objects.equals(unboxTargetType, that.unboxTargetType) && - Objects.equals(boxOriginalType, that.boxOriginalType) && - Objects.equals(boxTargetType, that.boxTargetType); + return explicitCast == that.explicitCast + && Objects.equals(originalType, that.originalType) + && Objects.equals(targetType, that.targetType) + && Objects.equals(unboxOriginalType, that.unboxOriginalType) + && Objects.equals(unboxTargetType, that.unboxTargetType) + && Objects.equals(boxOriginalType, that.boxOriginalType) + && Objects.equals(boxTargetType, that.boxTargetType); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java index bb8d9f45ddc..efa2d515245 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java @@ -51,12 +51,17 @@ public final class PainlessClass { public final Map getterMethodHandles; public final Map setterMethodHandles; - PainlessClass(Map constructors, - Map staticMethods, Map methods, - Map staticFields, Map fields, - PainlessMethod functionalInterfaceMethod, - Map runtimeMethods, - Map getterMethodHandles, Map setterMethodHandles) { + PainlessClass( + Map constructors, + Map staticMethods, + Map methods, + Map staticFields, + Map fields, + PainlessMethod functionalInterfaceMethod, + Map runtimeMethods, + Map getterMethodHandles, + Map setterMethodHandles + ) { this.constructors = CollectionUtils.copyMap(constructors); this.staticMethods = CollectionUtils.copyMap(staticMethods); @@ -80,14 +85,14 @@ public final class PainlessClass { return false; } - PainlessClass that = (PainlessClass)object; + PainlessClass that = (PainlessClass) object; - return Objects.equals(constructors, that.constructors) && - Objects.equals(staticMethods, that.staticMethods) && - Objects.equals(methods, that.methods) && - Objects.equals(staticFields, that.staticFields) && - Objects.equals(fields, that.fields) && - Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + return Objects.equals(constructors, that.constructors) + && Objects.equals(staticMethods, that.staticMethods) + && Objects.equals(methods, that.methods) + && Objects.equals(staticFields, that.staticFields) + && Objects.equals(fields, that.fields) + && Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBinding.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBinding.java index 51158557af8..37bf4140ade 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBinding.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBinding.java @@ -47,8 +47,13 @@ public class PainlessClassBinding { public final List> typeParameters; public final Map, Object> annotations; - PainlessClassBinding(Constructor javaConstructor, Method javaMethod, Class returnType, List> typeParameters, - Map, Object> annotations) { + PainlessClassBinding( + Constructor javaConstructor, + Method javaMethod, + Class returnType, + List> typeParameters, + Map, Object> annotations + ) { this.javaConstructor = javaConstructor; this.javaMethod = javaMethod; @@ -67,12 +72,12 @@ public class PainlessClassBinding { return false; } - PainlessClassBinding that = (PainlessClassBinding)object; + PainlessClassBinding that = (PainlessClassBinding) object; - return Objects.equals(javaConstructor, that.javaConstructor) && - Objects.equals(javaMethod, that.javaMethod) && - Objects.equals(returnType, that.returnType) && - Objects.equals(typeParameters, that.typeParameters); + return Objects.equals(javaConstructor, that.javaConstructor) + && Objects.equals(javaMethod, that.javaMethod) + && Objects.equals(returnType, that.returnType) + && Objects.equals(typeParameters, that.typeParameters); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBuilder.java index 84b698a74d5..c313e562bec 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClassBuilder.java @@ -64,8 +64,17 @@ final class PainlessClassBuilder { } PainlessClass build() { - return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod, - runtimeMethods, getterMethodHandles, setterMethodHandles); + return new PainlessClass( + constructors, + staticMethods, + methods, + staticFields, + fields, + functionalInterfaceMethod, + runtimeMethods, + getterMethodHandles, + setterMethodHandles + ); } @Override @@ -78,14 +87,14 @@ final class PainlessClassBuilder { return false; } - PainlessClassBuilder that = (PainlessClassBuilder)object; + PainlessClassBuilder that = (PainlessClassBuilder) object; - return Objects.equals(constructors, that.constructors) && - Objects.equals(staticMethods, that.staticMethods) && - Objects.equals(methods, that.methods) && - Objects.equals(staticFields, that.staticFields) && - Objects.equals(fields, that.fields) && - Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + return Objects.equals(constructors, that.constructors) + && Objects.equals(staticMethods, that.staticMethods) + && Objects.equals(methods, that.methods) + && Objects.equals(staticFields, that.staticFields) + && Objects.equals(fields, that.fields) + && Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessConstructor.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessConstructor.java index d4b24df8bd8..c067fd01d53 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessConstructor.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessConstructor.java @@ -47,8 +47,13 @@ public class PainlessConstructor { public final MethodType methodType; public final Map, Object> annotations; - PainlessConstructor(Constructor javaConstructor, List> typeParameters, MethodHandle methodHandle, MethodType methodType, - Map, Object> annotations) { + PainlessConstructor( + Constructor javaConstructor, + List> typeParameters, + MethodHandle methodHandle, + MethodType methodType, + Map, Object> annotations + ) { this.javaConstructor = javaConstructor; this.typeParameters = typeParameters; this.methodHandle = methodHandle; @@ -66,12 +71,12 @@ public class PainlessConstructor { return false; } - PainlessConstructor that = (PainlessConstructor)object; + PainlessConstructor that = (PainlessConstructor) object; - return Objects.equals(javaConstructor, that.javaConstructor) && - Objects.equals(typeParameters, that.typeParameters) && - Objects.equals(methodType, that.methodType) && - Objects.equals(annotations, that.annotations); + return Objects.equals(javaConstructor, that.javaConstructor) + && Objects.equals(typeParameters, that.typeParameters) + && Objects.equals(methodType, that.methodType) + && Objects.equals(annotations, that.annotations); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessField.java index 8be15ae0ac3..30f719358a0 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessField.java @@ -62,10 +62,9 @@ public final class PainlessField { return false; } - PainlessField that = (PainlessField)object; + PainlessField that = (PainlessField) object; - return Objects.equals(javaField, that.javaField) && - Objects.equals(typeParameter, that.typeParameter); + return Objects.equals(javaField, that.javaField) && Objects.equals(typeParameter, that.typeParameter); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessInstanceBinding.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessInstanceBinding.java index 7885fb5516b..d1ee49f8960 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessInstanceBinding.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessInstanceBinding.java @@ -62,12 +62,12 @@ public class PainlessInstanceBinding { return false; } - PainlessInstanceBinding that = (PainlessInstanceBinding)object; + PainlessInstanceBinding that = (PainlessInstanceBinding) object; - return targetInstance == that.targetInstance && - Objects.equals(javaMethod, that.javaMethod) && - Objects.equals(returnType, that.returnType) && - Objects.equals(typeParameters, that.typeParameters); + return targetInstance == that.targetInstance + && Objects.equals(javaMethod, that.javaMethod) + && Objects.equals(returnType, that.returnType) + && Objects.equals(typeParameters, that.typeParameters); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java index c5f92c0ed75..1249a9cffec 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java @@ -57,12 +57,13 @@ public final class PainlessLookup { private final Map painlessMethodKeysToPainlessInstanceBindings; PainlessLookup( - Map> javaClassNamesToClasses, - Map> canonicalClassNamesToClasses, - Map, PainlessClass> classesToPainlessClasses, - Map painlessMethodKeysToImportedPainlessMethods, - Map painlessMethodKeysToPainlessClassBindings, - Map painlessMethodKeysToPainlessInstanceBindings) { + Map> javaClassNamesToClasses, + Map> canonicalClassNamesToClasses, + Map, PainlessClass> classesToPainlessClasses, + Map painlessMethodKeysToImportedPainlessMethods, + Map painlessMethodKeysToPainlessClassBindings, + Map painlessMethodKeysToPainlessInstanceBindings + ) { Objects.requireNonNull(javaClassNamesToClasses); Objects.requireNonNull(canonicalClassNamesToClasses); @@ -175,9 +176,7 @@ public final class PainlessLookup { return null; } - return isStatic ? - targetPainlessClass.staticMethods.get(painlessMethodKey) : - targetPainlessClass.methods.get(painlessMethodKey); + return isStatic ? targetPainlessClass.staticMethods.get(painlessMethodKey) : targetPainlessClass.methods.get(painlessMethodKey); } public PainlessField lookupPainlessField(String targetCanonicalClassName, boolean isStatic, String fieldName) { @@ -203,9 +202,9 @@ public final class PainlessLookup { return null; } - PainlessField painlessField = isStatic ? - targetPainlessClass.staticFields.get(painlessFieldKey) : - targetPainlessClass.fields.get(painlessFieldKey); + PainlessField painlessField = isStatic + ? targetPainlessClass.staticFields.get(painlessFieldKey) + : targetPainlessClass.fields.get(painlessFieldKey); if (painlessField == null) { return null; @@ -253,8 +252,9 @@ public final class PainlessLookup { Objects.requireNonNull(methodName); String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); - Function objectLookup = - targetPainlessClass -> targetPainlessClass.runtimeMethods.get(painlessMethodKey); + Function objectLookup = targetPainlessClass -> targetPainlessClass.runtimeMethods.get( + painlessMethodKey + ); return lookupRuntimePainlessObject(originalTargetClass, objectLookup); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java index ede300019fc..dd8e253db47 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java @@ -107,20 +107,20 @@ public final class PainlessLookupBuilder { private static final CodeSource CODESOURCE; - private static final Map painlessConstructorCache = new HashMap<>(); - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); - private static final Map painlessClassBindingCache = new HashMap<>(); + private static final Map painlessConstructorCache = new HashMap<>(); + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); + private static final Map painlessClassBindingCache = new HashMap<>(); private static final Map painlessInstanceBindingCache = new HashMap<>(); - private static final Map painlessBridgeCache = new HashMap<>(); + private static final Map painlessBridgeCache = new HashMap<>(); - private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); + private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); - private static final Pattern FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); + private static final Pattern FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); static { try { - CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[])null); + CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); } catch (MalformedURLException mue) { throw new RuntimeException(mue); } @@ -135,8 +135,10 @@ public final class PainlessLookupBuilder { for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { origin = whitelistClass.origin; painlessLookupBuilder.addPainlessClass( - whitelist.classLoader, whitelistClass.javaClassName, - whitelistClass.painlessAnnotations.containsKey(NoImportAnnotation.class) == false); + whitelist.classLoader, + whitelistClass.javaClassName, + whitelistClass.painlessAnnotations.containsKey(NoImportAnnotation.class) == false + ); } } @@ -147,47 +149,67 @@ public final class PainlessLookupBuilder { for (WhitelistConstructor whitelistConstructor : whitelistClass.whitelistConstructors) { origin = whitelistConstructor.origin; painlessLookupBuilder.addPainlessConstructor( - targetCanonicalClassName, whitelistConstructor.canonicalTypeNameParameters, - whitelistConstructor.painlessAnnotations); + targetCanonicalClassName, + whitelistConstructor.canonicalTypeNameParameters, + whitelistConstructor.painlessAnnotations + ); } for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { origin = whitelistMethod.origin; painlessLookupBuilder.addPainlessMethod( - whitelist.classLoader, targetCanonicalClassName, whitelistMethod.augmentedCanonicalClassName, - whitelistMethod.methodName, whitelistMethod.returnCanonicalTypeName, - whitelistMethod.canonicalTypeNameParameters, whitelistMethod.painlessAnnotations); + whitelist.classLoader, + targetCanonicalClassName, + whitelistMethod.augmentedCanonicalClassName, + whitelistMethod.methodName, + whitelistMethod.returnCanonicalTypeName, + whitelistMethod.canonicalTypeNameParameters, + whitelistMethod.painlessAnnotations + ); } for (WhitelistField whitelistField : whitelistClass.whitelistFields) { origin = whitelistField.origin; painlessLookupBuilder.addPainlessField( - targetCanonicalClassName, whitelistField.fieldName, whitelistField.canonicalTypeNameParameter); + targetCanonicalClassName, + whitelistField.fieldName, + whitelistField.canonicalTypeNameParameter + ); } } for (WhitelistMethod whitelistStatic : whitelist.whitelistImportedMethods) { origin = whitelistStatic.origin; painlessLookupBuilder.addImportedPainlessMethod( - whitelist.classLoader, whitelistStatic.augmentedCanonicalClassName, - whitelistStatic.methodName, whitelistStatic.returnCanonicalTypeName, - whitelistStatic.canonicalTypeNameParameters, - whitelistStatic.painlessAnnotations); + whitelist.classLoader, + whitelistStatic.augmentedCanonicalClassName, + whitelistStatic.methodName, + whitelistStatic.returnCanonicalTypeName, + whitelistStatic.canonicalTypeNameParameters, + whitelistStatic.painlessAnnotations + ); } for (WhitelistClassBinding whitelistClassBinding : whitelist.whitelistClassBindings) { origin = whitelistClassBinding.origin; painlessLookupBuilder.addPainlessClassBinding( - whitelist.classLoader, whitelistClassBinding.targetJavaClassName, whitelistClassBinding.methodName, - whitelistClassBinding.returnCanonicalTypeName, whitelistClassBinding.canonicalTypeNameParameters, - whitelistClassBinding.painlessAnnotations); + whitelist.classLoader, + whitelistClassBinding.targetJavaClassName, + whitelistClassBinding.methodName, + whitelistClassBinding.returnCanonicalTypeName, + whitelistClassBinding.canonicalTypeNameParameters, + whitelistClassBinding.painlessAnnotations + ); } for (WhitelistInstanceBinding whitelistInstanceBinding : whitelist.whitelistInstanceBindings) { origin = whitelistInstanceBinding.origin; painlessLookupBuilder.addPainlessInstanceBinding( - whitelistInstanceBinding.targetInstance, whitelistInstanceBinding.methodName, - whitelistInstanceBinding.returnCanonicalTypeName, whitelistInstanceBinding.canonicalTypeNameParameters); + whitelistInstanceBinding.targetInstance, + whitelistInstanceBinding.methodName, + whitelistInstanceBinding.returnCanonicalTypeName, + whitelistInstanceBinding.canonicalTypeNameParameters + ); } } } catch (Exception exception) { @@ -241,15 +263,15 @@ public final class PainlessLookupBuilder { Class clazz; - if ("void".equals(javaClassName)) clazz = void.class; + if ("void".equals(javaClassName)) clazz = void.class; else if ("boolean".equals(javaClassName)) clazz = boolean.class; - else if ("byte".equals(javaClassName)) clazz = byte.class; - else if ("short".equals(javaClassName)) clazz = short.class; - else if ("char".equals(javaClassName)) clazz = char.class; - else if ("int".equals(javaClassName)) clazz = int.class; - else if ("long".equals(javaClassName)) clazz = long.class; - else if ("float".equals(javaClassName)) clazz = float.class; - else if ("double".equals(javaClassName)) clazz = double.class; + else if ("byte".equals(javaClassName)) clazz = byte.class; + else if ("short".equals(javaClassName)) clazz = short.class; + else if ("char".equals(javaClassName)) clazz = char.class; + else if ("int".equals(javaClassName)) clazz = int.class; + else if ("long".equals(javaClassName)) clazz = long.class; + else if ("float".equals(javaClassName)) clazz = float.class; + else if ("double".equals(javaClassName)) clazz = double.class; else { try { clazz = Class.forName(javaClassName, true, classLoader); @@ -263,7 +285,7 @@ public final class PainlessLookupBuilder { public void addPainlessClass(Class clazz, boolean importClassName) { Objects.requireNonNull(clazz); - //Matcher m = new Matcher(); + // Matcher m = new Matcher(); if (clazz == def.class) { throw new IllegalArgumentException("cannot add reserved class [" + DEF_CLASS_NAME + "]"); @@ -284,15 +306,23 @@ public final class PainlessLookupBuilder { if (existingClass == null) { javaClassNamesToClasses.put(clazz.getName().intern(), clazz); } else if (existingClass != clazz) { - throw new IllegalArgumentException("class [" + canonicalClassName + "] " + - "cannot represent multiple java classes with the same name from different class loaders"); + throw new IllegalArgumentException( + "class [" + + canonicalClassName + + "] " + + "cannot represent multiple java classes with the same name from different class loaders" + ); } existingClass = canonicalClassNamesToClasses.get(canonicalClassName); if (existingClass != null && existingClass != clazz) { - throw new IllegalArgumentException("class [" + canonicalClassName + "] " + - "cannot represent multiple java classes with the same name from different class loaders"); + throw new IllegalArgumentException( + "class [" + + canonicalClassName + + "] " + + "cannot represent multiple java classes with the same name from different class loaders" + ); } PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClassBuilders.get(clazz); @@ -317,31 +347,49 @@ public final class PainlessLookupBuilder { if (importedClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { - throw new IllegalArgumentException( - "inconsistent no_import parameter found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName.intern(), clazz); } } else if (importedClass != clazz) { - throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + - "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedClass) + "]"); + throw new IllegalArgumentException( + "imported class [" + + importedCanonicalClassName + + "] cannot represent multiple " + + "classes [" + + canonicalClassName + + "] and [" + + typeToCanonicalTypeName(importedClass) + + "]" + ); } else if (importClassName == false) { throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } } } - public void addPainlessConstructor(String targetCanonicalClassName, List canonicalTypeNameParameters, - Map, Object> annotations) { + public void addPainlessConstructor( + String targetCanonicalClassName, + List canonicalTypeNameParameters, + Map, Object> annotations + ) { Objects.requireNonNull(targetCanonicalClassName); Objects.requireNonNull(canonicalTypeNameParameters); Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + - "for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "target class [" + + targetCanonicalClassName + + "] not found" + + "for constructor [[" + + targetCanonicalClassName + + "], " + + canonicalTypeNameParameters + + "]" + ); } List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); @@ -350,8 +398,16 @@ public final class PainlessLookupBuilder { Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); if (typeParameter == null) { - throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " + - "for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "type parameter [" + + canonicalTypeNameParameter + + "] not found " + + "for constructor [[" + + targetCanonicalClassName + + "], " + + canonicalTypeNameParameters + + "]" + ); } typeParameters.add(typeParameter); @@ -372,8 +428,16 @@ public final class PainlessLookupBuilder { PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); if (painlessClassBuilder == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + - "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "target class [" + + targetCanonicalClassName + + "] not found" + + "for constructor [[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } int typeParametersSize = typeParameters.size(); @@ -381,8 +445,16 @@ public final class PainlessLookupBuilder { for (Class typeParameter : typeParameters) { if (isValidType(typeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] not found " + + "for constructor [[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } javaTypeParameters.add(typeToJavaType(typeParameter)); @@ -393,8 +465,15 @@ public final class PainlessLookupBuilder { try { javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("reflection object not found for constructor " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); + throw new IllegalArgumentException( + "reflection object not found for constructor " + + "[[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]", + nsme + ); } MethodHandle methodHandle; @@ -402,30 +481,58 @@ public final class PainlessLookupBuilder { try { methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle not found for constructor " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + throw new IllegalArgumentException( + "method handle not found for constructor " + + "[[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]", + iae + ); } MethodType methodType = methodHandle.type(); String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize); PainlessConstructor existingPainlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); - PainlessConstructor newPainlessConstructor = new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType, - annotations); + PainlessConstructor newPainlessConstructor = new PainlessConstructor( + javaConstructor, + typeParameters, + methodHandle, + methodType, + annotations + ); if (existingPainlessConstructor == null) { newPainlessConstructor = painlessConstructorCache.computeIfAbsent(newPainlessConstructor, key -> key); painlessClassBuilder.constructors.put(painlessConstructorKey.intern(), newPainlessConstructor); - } else if (newPainlessConstructor.equals(existingPainlessConstructor) == false){ - throw new IllegalArgumentException("cannot add constructors with the same arity but are not equivalent for constructors " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(existingPainlessConstructor.typeParameters) + "]"); + } else if (newPainlessConstructor.equals(existingPainlessConstructor) == false) { + throw new IllegalArgumentException( + "cannot add constructors with the same arity but are not equivalent for constructors " + + "[[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] and " + + "[[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(existingPainlessConstructor.typeParameters) + + "]" + ); } } - public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName, - String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - Map, Object> annotations) { + public void addPainlessMethod( + ClassLoader classLoader, + String targetCanonicalClassName, + String augmentedCanonicalClassName, + String methodName, + String returnCanonicalTypeName, + List canonicalTypeNameParameters, + Map, Object> annotations + ) { Objects.requireNonNull(classLoader); Objects.requireNonNull(targetCanonicalClassName); @@ -436,8 +543,18 @@ public final class PainlessLookupBuilder { Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "target class [" + + targetCanonicalClassName + + "] not found for method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } Class augmentedClass = null; @@ -446,8 +563,19 @@ public final class PainlessLookupBuilder { try { augmentedClass = Class.forName(augmentedCanonicalClassName, true, classLoader); } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("augmented class [" + augmentedCanonicalClassName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]", cnfe); + throw new IllegalArgumentException( + "augmented class [" + + augmentedCanonicalClassName + + "] not found for method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]", + cnfe + ); } } @@ -457,8 +585,18 @@ public final class PainlessLookupBuilder { Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); if (typeParameter == null) { - throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "type parameter [" + + canonicalTypeNameParameter + + "] not found for method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } typeParameters.add(typeParameter); @@ -467,15 +605,31 @@ public final class PainlessLookupBuilder { Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); if (returnType == null) { - throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "return type [" + + returnCanonicalTypeName + + "] not found for method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters, annotations); } - public void addPainlessMethod(Class targetClass, Class augmentedClass, - String methodName, Class returnType, List> typeParameters, Map, Object> annotations) { + public void addPainlessMethod( + Class targetClass, + Class augmentedClass, + String methodName, + Class returnType, + List> typeParameters, + Map, Object> annotations + ) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); @@ -490,14 +644,25 @@ public final class PainlessLookupBuilder { if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( - "invalid method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]."); + "invalid method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]." + ); } PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); if (painlessClassBuilder == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "target class [" + + targetCanonicalClassName + + "] not found for method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } int typeParametersSize = typeParameters.size(); @@ -510,17 +675,36 @@ public final class PainlessLookupBuilder { for (Class typeParameter : typeParameters) { if (isValidType(typeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + - "not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] " + + "not found for method [[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } javaTypeParameters.add(typeToJavaType(typeParameter)); } if (isValidType(returnType) == false) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(returnType) + + "] not found for method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } Method javaMethod; @@ -529,28 +713,57 @@ public final class PainlessLookupBuilder { try { javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("reflection object not found for method [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); + throw new IllegalArgumentException( + "reflection object not found for method [[" + + targetCanonicalClassName + + "], " + + "[" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]", + nsme + ); } } else { try { javaMethod = augmentedClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); if (Modifier.isStatic(javaMethod.getModifiers()) == false) { - throw new IllegalArgumentException("method [[" + targetCanonicalClassName + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "] with augmented class " + - "[" + typeToCanonicalTypeName(augmentedClass) + "] must be static"); + throw new IllegalArgumentException( + "method [[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] with augmented class " + + "[" + + typeToCanonicalTypeName(augmentedClass) + + "] must be static" + ); } } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("reflection object not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] " + - "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); + throw new IllegalArgumentException( + "reflection object not found for method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] " + + "with augmented class [" + + typeToCanonicalTypeName(augmentedClass) + + "]", + nsme + ); } } // injections alter the type parameters required for the user to call this method, since some are injected by compiler if (annotations.containsKey(InjectConstantAnnotation.class)) { - int numInjections = ((InjectConstantAnnotation)annotations.get(InjectConstantAnnotation.class)).injects.size(); + int numInjections = ((InjectConstantAnnotation) annotations.get(InjectConstantAnnotation.class)).injects.size(); if (numInjections > 0) { typeParameters.subList(0, numInjections).clear(); @@ -560,10 +773,21 @@ public final class PainlessLookupBuilder { } if (javaMethod.getReturnType() != typeToJavaType(returnType)) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + - "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + - "for method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(javaMethod.getReturnType()) + + "] " + + "does not match the specified returned type [" + + typeToCanonicalTypeName(returnType) + + "] " + + "for method [[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } MethodHandle methodHandle; @@ -572,29 +796,54 @@ public final class PainlessLookupBuilder { try { methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle not found for method " + - "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]", iae); + throw new IllegalArgumentException( + "method handle not found for method " + + "[[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]", + iae + ); } } else { try { methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle not found for method " + - "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]" + - "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); + throw new IllegalArgumentException( + "method handle not found for method " + + "[[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + + "with augmented class [" + + typeToCanonicalTypeName(augmentedClass) + + "]", + iae + ); } } MethodType methodType = methodHandle.type(); boolean isStatic = augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers()); String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); - PainlessMethod existingPainlessMethod = isStatic ? - painlessClassBuilder.staticMethods.get(painlessMethodKey) : - painlessClassBuilder.methods.get(painlessMethodKey); - PainlessMethod newPainlessMethod = - new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType, annotations); + PainlessMethod existingPainlessMethod = isStatic + ? painlessClassBuilder.staticMethods.get(painlessMethodKey) + : painlessClassBuilder.methods.get(painlessMethodKey); + PainlessMethod newPainlessMethod = new PainlessMethod( + javaMethod, + targetClass, + returnType, + typeParameters, + methodHandle, + methodType, + annotations + ); if (existingPainlessMethod == null) { newPainlessMethod = painlessMethodCache.computeIfAbsent(newPainlessMethod, key -> key); @@ -605,13 +854,29 @@ public final class PainlessLookupBuilder { painlessClassBuilder.methods.put(painlessMethodKey.intern(), newPainlessMethod); } } else if (newPainlessMethod.equals(existingPainlessMethod) == false) { - throw new IllegalArgumentException("cannot add methods with the same name and arity but are not equivalent for methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(existingPainlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(existingPainlessMethod.typeParameters) + "]"); + throw new IllegalArgumentException( + "cannot add methods with the same name and arity but are not equivalent for methods " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(returnType) + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] and " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(existingPainlessMethod.returnType) + + "], " + + typesToCanonicalTypeNames(existingPainlessMethod.typeParameters) + + "]" + ); } } @@ -623,18 +888,35 @@ public final class PainlessLookupBuilder { Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + - "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + canonicalTypeNameParameter + "]]"); + throw new IllegalArgumentException( + "target class [" + + targetCanonicalClassName + + "] not found for field " + + "[[" + + targetCanonicalClassName + + "], [" + + fieldName + + "], [" + + canonicalTypeNameParameter + + "]]" + ); } Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); if (typeParameter == null) { - throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " + - "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); + throw new IllegalArgumentException( + "type parameter [" + + canonicalTypeNameParameter + + "] not found " + + "for field [[" + + targetCanonicalClassName + + "], [" + + fieldName + + "]" + ); } - addPainlessField(targetClass, fieldName, typeParameter); } @@ -651,20 +933,38 @@ public final class PainlessLookupBuilder { if (FIELD_NAME_PATTERN.matcher(fieldName).matches() == false) { throw new IllegalArgumentException( - "invalid field name [" + fieldName + "] for target class [" + targetCanonicalClassName + "]."); + "invalid field name [" + fieldName + "] for target class [" + targetCanonicalClassName + "]." + ); } - PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); if (painlessClassBuilder == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + - "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "]]"); + throw new IllegalArgumentException( + "target class [" + + targetCanonicalClassName + + "] not found for field " + + "[[" + + targetCanonicalClassName + + "], [" + + fieldName + + "], [" + + typeToCanonicalTypeName(typeParameter) + + "]]" + ); } if (isValidType(typeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] not found " + + "for field [[" + + targetCanonicalClassName + + "], [" + + fieldName + + "]" + ); } Field javaField; @@ -673,13 +973,25 @@ public final class PainlessLookupBuilder { javaField = targetClass.getField(fieldName); } catch (NoSuchFieldException nsme) { throw new IllegalArgumentException( - "reflection object not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", nsme); + "reflection object not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", + nsme + ); } if (javaField.getType() != typeToJavaType(typeParameter)) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaField.getType()) + "] " + - "does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + - "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(javaField.getType()) + + "] " + + "does not match the specified type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] " + + "for field [[" + + targetCanonicalClassName + + "], [" + + fieldName + + "]" + ); } MethodHandle methodHandleGetter; @@ -688,7 +1000,8 @@ public final class PainlessLookupBuilder { methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); } catch (IllegalAccessException iae) { throw new IllegalArgumentException( - "getter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); + "getter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]" + ); } String painlessFieldKey = buildPainlessFieldKey(fieldName); @@ -705,12 +1018,24 @@ public final class PainlessLookupBuilder { newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); painlessClassBuilder.staticFields.put(painlessFieldKey.intern(), newPainlessField); } else if (newPainlessField.equals(existingPainlessField) == false) { - throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + - "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + - typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + - "with the same name and different type parameters"); + throw new IllegalArgumentException( + "cannot add fields with the same name but are not equivalent for fields " + + "[[" + + targetCanonicalClassName + + "], [" + + fieldName + + "], [" + + typeToCanonicalTypeName(typeParameter) + + "] and " + + "[[" + + targetCanonicalClassName + + "], [" + + existingPainlessField.javaField.getName() + + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + + "] " + + "with the same name and different type parameters" + ); } } else { MethodHandle methodHandleSetter; @@ -719,7 +1044,8 @@ public final class PainlessLookupBuilder { methodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField); } catch (IllegalAccessException iae) { throw new IllegalArgumentException( - "setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); + "setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]" + ); } PainlessField existingPainlessField = painlessClassBuilder.fields.get(painlessFieldKey); @@ -729,19 +1055,36 @@ public final class PainlessLookupBuilder { newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); painlessClassBuilder.fields.put(painlessFieldKey.intern(), newPainlessField); } else if (newPainlessField.equals(existingPainlessField) == false) { - throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + - "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + - typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + - "with the same name and different type parameters"); + throw new IllegalArgumentException( + "cannot add fields with the same name but are not equivalent for fields " + + "[[" + + targetCanonicalClassName + + "], [" + + fieldName + + "], [" + + typeToCanonicalTypeName(typeParameter) + + "] and " + + "[[" + + targetCanonicalClassName + + "], [" + + existingPainlessField.javaField.getName() + + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + + "] " + + "with the same name and different type parameters" + ); } } } - public void addImportedPainlessMethod(ClassLoader classLoader, String targetJavaClassName, - String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - Map, Object> annotations) { + public void addImportedPainlessMethod( + ClassLoader classLoader, + String targetJavaClassName, + String methodName, + String returnCanonicalTypeName, + List canonicalTypeNameParameters, + Map, Object> annotations + ) { Objects.requireNonNull(classLoader); Objects.requireNonNull(targetJavaClassName); @@ -760,8 +1103,18 @@ public final class PainlessLookupBuilder { String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); if (targetClass == null) { - throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "target class [" + + targetCanonicalClassName + + "] not found for imported method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); @@ -770,8 +1123,18 @@ public final class PainlessLookupBuilder { Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); if (typeParameter == null) { - throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for imported method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "type parameter [" + + canonicalTypeNameParameter + + "] not found for imported method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } typeParameters.add(typeParameter); @@ -780,15 +1143,30 @@ public final class PainlessLookupBuilder { Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); if (returnType == null) { - throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for imported method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "return type [" + + returnCanonicalTypeName + + "] not found for imported method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } addImportedPainlessMethod(targetClass, methodName, returnType, typeParameters, annotations); } - public void addImportedPainlessMethod(Class targetClass, String methodName, Class returnType, List> typeParameters, - Map, Object> annotations) { + public void addImportedPainlessMethod( + Class targetClass, + String methodName, + Class returnType, + List> typeParameters, + Map, Object> annotations + ) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); Objects.requireNonNull(returnType); @@ -804,13 +1182,18 @@ public final class PainlessLookupBuilder { if (existingTargetClass == null) { javaClassNamesToClasses.put(targetClass.getName().intern(), targetClass); } else if (existingTargetClass != targetClass) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] " + - "cannot represent multiple java classes with the same name from different class loaders"); + throw new IllegalArgumentException( + "class [" + + targetCanonicalClassName + + "] " + + "cannot represent multiple java classes with the same name from different class loaders" + ); } if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( - "invalid imported method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]."); + "invalid imported method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]." + ); } int typeParametersSize = typeParameters.size(); @@ -818,17 +1201,36 @@ public final class PainlessLookupBuilder { for (Class typeParameter : typeParameters) { if (isValidType(typeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + - "not found for imported method [[" + targetCanonicalClassName + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] " + + "not found for imported method [[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } javaTypeParameters.add(typeToJavaType(typeParameter)); } if (isValidType(returnType) == false) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for imported method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(returnType) + + "] not found for imported method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } Method javaMethod; @@ -836,20 +1238,47 @@ public final class PainlessLookupBuilder { try { javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("imported method reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException( + "imported method reflection object [[" + + targetCanonicalClassName + + "], " + + "[" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] not found", + nsme + ); } if (javaMethod.getReturnType() != typeToJavaType(returnType)) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + - "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + - "for imported method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(javaMethod.getReturnType()) + + "] " + + "does not match the specified returned type [" + + typeToCanonicalTypeName(returnType) + + "] " + + "for imported method [[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } if (Modifier.isStatic(javaMethod.getModifiers()) == false) { - throw new IllegalArgumentException("imported method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "] must be static"); + throw new IllegalArgumentException( + "imported method [[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] must be static" + ); } String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); @@ -867,34 +1296,71 @@ public final class PainlessLookupBuilder { try { methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); + throw new IllegalArgumentException( + "imported method handle [[" + + targetClass.getCanonicalName() + + "], " + + "[" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] not found", + iae + ); } MethodType methodType = methodHandle.type(); PainlessMethod existingImportedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); - PainlessMethod newImportedPainlessMethod = - new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType, annotations); + PainlessMethod newImportedPainlessMethod = new PainlessMethod( + javaMethod, + targetClass, + returnType, + typeParameters, + methodHandle, + methodType, + annotations + ); if (existingImportedPainlessMethod == null) { newImportedPainlessMethod = painlessMethodCache.computeIfAbsent(newImportedPainlessMethod, key -> key); painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey.intern(), newImportedPainlessMethod); } else if (newImportedPainlessMethod.equals(existingImportedPainlessMethod) == false) { - throw new IllegalArgumentException("cannot add imported methods with the same name and arity " + - "but do not have equivalent methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(existingImportedPainlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(existingImportedPainlessMethod.typeParameters) + "]"); + throw new IllegalArgumentException( + "cannot add imported methods with the same name and arity " + + "but do not have equivalent methods " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(returnType) + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] and " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(existingImportedPainlessMethod.returnType) + + "], " + + typesToCanonicalTypeNames(existingImportedPainlessMethod.typeParameters) + + "]" + ); } } - public void addPainlessClassBinding(ClassLoader classLoader, String targetJavaClassName, - String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - Map, Object> annotations) { + public void addPainlessClassBinding( + ClassLoader classLoader, + String targetJavaClassName, + String methodName, + String returnCanonicalTypeName, + List canonicalTypeNameParameters, + Map, Object> annotations + ) { Objects.requireNonNull(classLoader); Objects.requireNonNull(targetJavaClassName); @@ -917,8 +1383,18 @@ public final class PainlessLookupBuilder { Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); if (typeParameter == null) { - throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for class binding " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "type parameter [" + + canonicalTypeNameParameter + + "] not found for class binding " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } typeParameters.add(typeParameter); @@ -927,15 +1403,30 @@ public final class PainlessLookupBuilder { Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); if (returnType == null) { - throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for class binding " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "return type [" + + returnCanonicalTypeName + + "] not found for class binding " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } addPainlessClassBinding(targetClass, methodName, returnType, typeParameters, annotations); } - public void addPainlessClassBinding(Class targetClass, String methodName, Class returnType, List> typeParameters, - Map, Object> annotations) { + public void addPainlessClassBinding( + Class targetClass, + String methodName, + Class returnType, + List> typeParameters, + Map, Object> annotations + ) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); Objects.requireNonNull(returnType); @@ -951,8 +1442,12 @@ public final class PainlessLookupBuilder { if (existingTargetClass == null) { javaClassNamesToClasses.put(targetClass.getName().intern(), targetClass); } else if (existingTargetClass != targetClass) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] " + - "cannot represent multiple java classes with the same name from different class loaders"); + throw new IllegalArgumentException( + "class [" + + targetCanonicalClassName + + "] " + + "cannot represent multiple java classes with the same name from different class loaders" + ); } Constructor[] javaConstructors = targetClass.getConstructors(); @@ -962,7 +1457,8 @@ public final class PainlessLookupBuilder { if (eachJavaConstructor.getDeclaringClass() == targetClass) { if (javaConstructor != null) { throw new IllegalArgumentException( - "class binding [" + targetCanonicalClassName + "] cannot have multiple constructors"); + "class binding [" + targetCanonicalClassName + "] cannot have multiple constructors" + ); } javaConstructor = eachJavaConstructor; @@ -979,27 +1475,54 @@ public final class PainlessLookupBuilder { Class typeParameter = typeParameters.get(typeParameterIndex); if (isValidType(typeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for class binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] not found " + + "for class binding [[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } Class javaTypeParameter = javaConstructor.getParameterTypes()[typeParameterIndex]; if (isValidType(javaTypeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for class binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] not found " + + "for class binding [[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } if (javaTypeParameter != typeToJavaType(typeParameter)) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaTypeParameter) + "] " + - "does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + - "for class binding [[" + targetClass.getCanonicalName() + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(javaTypeParameter) + + "] " + + "does not match the specified type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] " + + "for class binding [[" + + targetClass.getCanonicalName() + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } } if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( - "invalid method name [" + methodName + "] for class binding [" + targetCanonicalClassName + "]."); + "invalid method name [" + methodName + "] for class binding [" + targetCanonicalClassName + "]." + ); } Method[] javaMethods = targetClass.getMethods(); @@ -1025,34 +1548,81 @@ public final class PainlessLookupBuilder { Class typeParameter = typeParameters.get(constructorTypeParametersSize + typeParameterIndex); if (isValidType(typeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for class binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] not found " + + "for class binding [[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } Class javaTypeParameter = javaMethod.getParameterTypes()[typeParameterIndex]; if (isValidType(javaTypeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for class binding [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] not found " + + "for class binding [[" + + targetCanonicalClassName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } if (javaTypeParameter != typeToJavaType(typeParameter)) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaTypeParameter) + "] " + - "does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + - "for class binding [[" + targetClass.getCanonicalName() + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(javaTypeParameter) + + "] " + + "does not match the specified type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] " + + "for class binding [[" + + targetClass.getCanonicalName() + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } } if (isValidType(returnType) == false) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for class binding " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(returnType) + + "] not found for class binding " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } if (javaMethod.getReturnType() != typeToJavaType(returnType)) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + - "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + - "for class binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(javaMethod.getReturnType()) + + "] " + + "does not match the specified returned type [" + + typeToCanonicalTypeName(returnType) + + "] " + + "for class binding [[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } String painlessMethodKey = buildPainlessMethodKey(methodName, constructorTypeParametersSize + methodTypeParametersSize); @@ -1066,33 +1636,65 @@ public final class PainlessLookupBuilder { } if (Modifier.isStatic(javaMethod.getModifiers())) { - throw new IllegalArgumentException("class binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "] cannot be static"); + throw new IllegalArgumentException( + "class binding [[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] cannot be static" + ); } PainlessClassBinding existingPainlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); - PainlessClassBinding newPainlessClassBinding = - new PainlessClassBinding(javaConstructor, javaMethod, returnType, typeParameters, annotations); + PainlessClassBinding newPainlessClassBinding = new PainlessClassBinding( + javaConstructor, + javaMethod, + returnType, + typeParameters, + annotations + ); if (existingPainlessClassBinding == null) { newPainlessClassBinding = painlessClassBindingCache.computeIfAbsent(newPainlessClassBinding, key -> key); painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey.intern(), newPainlessClassBinding); } else if (newPainlessClassBinding.equals(existingPainlessClassBinding) == false) { - throw new IllegalArgumentException("cannot add class bindings with the same name and arity " + - "but do not have equivalent methods " + - "[[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + - "[" + typeToCanonicalTypeName(existingPainlessClassBinding.returnType) + "], " + - typesToCanonicalTypeNames(existingPainlessClassBinding.typeParameters) + "]"); + throw new IllegalArgumentException( + "cannot add class bindings with the same name and arity " + + "but do not have equivalent methods " + + "[[" + + targetCanonicalClassName + + "], " + + "[" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(returnType) + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] and " + + "[[" + + targetCanonicalClassName + + "], " + + "[" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(existingPainlessClassBinding.returnType) + + "], " + + typesToCanonicalTypeNames(existingPainlessClassBinding.typeParameters) + + "]" + ); } } - public void addPainlessInstanceBinding(Object targetInstance, - String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { + public void addPainlessInstanceBinding( + Object targetInstance, + String methodName, + String returnCanonicalTypeName, + List canonicalTypeNameParameters + ) { Objects.requireNonNull(targetInstance); Objects.requireNonNull(methodName); @@ -1107,8 +1709,18 @@ public final class PainlessLookupBuilder { Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); if (typeParameter == null) { - throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for instance binding " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "type parameter [" + + canonicalTypeNameParameter + + "] not found for instance binding " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } typeParameters.add(typeParameter); @@ -1117,8 +1729,18 @@ public final class PainlessLookupBuilder { Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); if (returnType == null) { - throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for class binding " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + throw new IllegalArgumentException( + "return type [" + + returnCanonicalTypeName + + "] not found for class binding " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + canonicalTypeNameParameters + + "]" + ); } addPainlessInstanceBinding(targetInstance, methodName, returnType, typeParameters); @@ -1142,13 +1764,18 @@ public final class PainlessLookupBuilder { if (existingTargetClass == null) { javaClassNamesToClasses.put(targetClass.getName().intern(), targetClass); } else if (existingTargetClass != targetClass) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] " + - "cannot represent multiple java classes with the same name from different class loaders"); + throw new IllegalArgumentException( + "class [" + + targetCanonicalClassName + + "] " + + "cannot represent multiple java classes with the same name from different class loaders" + ); } if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( - "invalid method name [" + methodName + "] for instance binding [" + targetCanonicalClassName + "]."); + "invalid method name [" + methodName + "] for instance binding [" + targetCanonicalClassName + "]." + ); } int typeParametersSize = typeParameters.size(); @@ -1156,17 +1783,36 @@ public final class PainlessLookupBuilder { for (Class typeParameter : typeParameters) { if (isValidType(typeParameter) == false) { - throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + - "not found for instance binding [[" + targetCanonicalClassName + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "type parameter [" + + typeToCanonicalTypeName(typeParameter) + + "] " + + "not found for instance binding [[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } javaTypeParameters.add(typeToJavaType(typeParameter)); } if (isValidType(returnType) == false) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for imported method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(returnType) + + "] not found for imported method " + + "[[" + + targetCanonicalClassName + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } Method javaMethod; @@ -1174,20 +1820,47 @@ public final class PainlessLookupBuilder { try { javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("instance binding reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException( + "instance binding reflection object [[" + + targetCanonicalClassName + + "], " + + "[" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] not found", + nsme + ); } if (javaMethod.getReturnType() != typeToJavaType(returnType)) { - throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + - "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + - "for instance binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]"); + throw new IllegalArgumentException( + "return type [" + + typeToCanonicalTypeName(javaMethod.getReturnType()) + + "] " + + "does not match the specified returned type [" + + typeToCanonicalTypeName(returnType) + + "] " + + "for instance binding [[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "]" + ); } if (Modifier.isStatic(javaMethod.getModifiers())) { - throw new IllegalArgumentException("instance binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "] cannot be static"); + throw new IllegalArgumentException( + "instance binding [[" + + targetClass.getCanonicalName() + + "], [" + + methodName + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] cannot be static" + ); } String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); @@ -1201,23 +1874,43 @@ public final class PainlessLookupBuilder { } PainlessInstanceBinding existingPainlessInstanceBinding = painlessMethodKeysToPainlessInstanceBindings.get(painlessMethodKey); - PainlessInstanceBinding newPainlessInstanceBinding = - new PainlessInstanceBinding(targetInstance, javaMethod, returnType, typeParameters); + PainlessInstanceBinding newPainlessInstanceBinding = new PainlessInstanceBinding( + targetInstance, + javaMethod, + returnType, + typeParameters + ); if (existingPainlessInstanceBinding == null) { newPainlessInstanceBinding = painlessInstanceBindingCache.computeIfAbsent(newPainlessInstanceBinding, key -> key); painlessMethodKeysToPainlessInstanceBindings.put(painlessMethodKey.intern(), newPainlessInstanceBinding); } else if (newPainlessInstanceBinding.equals(existingPainlessInstanceBinding) == false) { - throw new IllegalArgumentException("cannot add instances bindings with the same name and arity " + - "but do not have equivalent methods " + - "[[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + - "[" + typeToCanonicalTypeName(existingPainlessInstanceBinding.returnType) + "], " + - typesToCanonicalTypeNames(existingPainlessInstanceBinding.typeParameters) + "]"); + throw new IllegalArgumentException( + "cannot add instances bindings with the same name and arity " + + "but do not have equivalent methods " + + "[[" + + targetCanonicalClassName + + "], " + + "[" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(returnType) + + "], " + + typesToCanonicalTypeNames(typeParameters) + + "] and " + + "[[" + + targetCanonicalClassName + + "], " + + "[" + + methodName + + "], " + + "[" + + typeToCanonicalTypeName(existingPainlessInstanceBinding.returnType) + + "], " + + typesToCanonicalTypeNames(existingPainlessInstanceBinding.typeParameters) + + "]" + ); } } @@ -1234,28 +1927,32 @@ public final class PainlessLookupBuilder { } if (javaClassNamesToClasses.values().containsAll(canonicalClassNamesToClasses.values()) == false) { - throw new IllegalArgumentException("the values of java class names to classes " + - "must be a superset of the values of canonical class names to classes"); + throw new IllegalArgumentException( + "the values of java class names to classes " + "must be a superset of the values of canonical class names to classes" + ); } if (javaClassNamesToClasses.values().containsAll(classesToPainlessClasses.keySet()) == false) { - throw new IllegalArgumentException("the values of java class names to classes " + - "must be a superset of the keys of classes to painless classes"); + throw new IllegalArgumentException( + "the values of java class names to classes " + "must be a superset of the keys of classes to painless classes" + ); } - if (canonicalClassNamesToClasses.values().containsAll(classesToPainlessClasses.keySet()) == false || - classesToPainlessClasses.keySet().containsAll(canonicalClassNamesToClasses.values()) == false) { - throw new IllegalArgumentException("the values of canonical class names to classes " + - "must have the same classes as the keys of classes to painless classes"); + if (canonicalClassNamesToClasses.values().containsAll(classesToPainlessClasses.keySet()) == false + || classesToPainlessClasses.keySet().containsAll(canonicalClassNamesToClasses.values()) == false) { + throw new IllegalArgumentException( + "the values of canonical class names to classes " + "must have the same classes as the keys of classes to painless classes" + ); } return new PainlessLookup( - javaClassNamesToClasses, - canonicalClassNamesToClasses, - classesToPainlessClasses, - painlessMethodKeysToImportedPainlessMethods, - painlessMethodKeysToPainlessClassBindings, - painlessMethodKeysToPainlessInstanceBindings); + javaClassNamesToClasses, + canonicalClassNamesToClasses, + classesToPainlessClasses, + painlessMethodKeysToImportedPainlessMethods, + painlessMethodKeysToPainlessClassBindings, + painlessMethodKeysToPainlessInstanceBindings + ); } private void copyPainlessClassMembers() { @@ -1303,8 +2000,9 @@ public final class PainlessLookupBuilder { PainlessMethod newPainlessMethod = painlessMethodEntry.getValue(); PainlessMethod existingPainlessMethod = targetPainlessClassBuilder.methods.get(painlessMethodKey); - if (existingPainlessMethod == null || existingPainlessMethod.targetClass != newPainlessMethod.targetClass && - existingPainlessMethod.targetClass.isAssignableFrom(newPainlessMethod.targetClass)) { + if (existingPainlessMethod == null + || existingPainlessMethod.targetClass != newPainlessMethod.targetClass + && existingPainlessMethod.targetClass.isAssignableFrom(newPainlessMethod.targetClass)) { targetPainlessClassBuilder.methods.put(painlessMethodKey.intern(), newPainlessMethod); } } @@ -1314,9 +2012,10 @@ public final class PainlessLookupBuilder { PainlessField newPainlessField = painlessFieldEntry.getValue(); PainlessField existingPainlessField = targetPainlessClassBuilder.fields.get(painlessFieldKey); - if (existingPainlessField == null || - existingPainlessField.javaField.getDeclaringClass() != newPainlessField.javaField.getDeclaringClass() && - existingPainlessField.javaField.getDeclaringClass().isAssignableFrom(newPainlessField.javaField.getDeclaringClass())) { + if (existingPainlessField == null + || existingPainlessField.javaField.getDeclaringClass() != newPainlessField.javaField.getDeclaringClass() + && existingPainlessField.javaField.getDeclaringClass() + .isAssignableFrom(newPainlessField.javaField.getDeclaringClass())) { targetPainlessClassBuilder.fields.put(painlessFieldKey.intern(), newPainlessField); } } @@ -1343,8 +2042,13 @@ public final class PainlessLookupBuilder { } if (javaMethods.size() != 1 && targetClass.isAnnotationPresent(FunctionalInterface.class)) { - throw new IllegalArgumentException("class [" + typeToCanonicalTypeName(targetClass) + "] " + - "is illegally marked as a FunctionalInterface with java methods " + javaMethods); + throw new IllegalArgumentException( + "class [" + + typeToCanonicalTypeName(targetClass) + + "] " + + "is illegally marked as a FunctionalInterface with java methods " + + javaMethods + ); } else if (javaMethods.size() == 1) { java.lang.reflect.Method javaMethod = javaMethods.get(0); String painlessMethodKey = buildPainlessMethodKey(javaMethod.getName(), javaMethod.getParameterCount()); @@ -1370,15 +2074,13 @@ public final class PainlessLookupBuilder { for (PainlessMethod painlessMethod : painlessClassBuilder.runtimeMethods.values()) { for (Class typeParameter : painlessMethod.typeParameters) { - if ( - typeParameter == Byte.class || - typeParameter == Short.class || - typeParameter == Character.class || - typeParameter == Integer.class || - typeParameter == Long.class || - typeParameter == Float.class || - typeParameter == Double.class - ) { + if (typeParameter == Byte.class + || typeParameter == Short.class + || typeParameter == Character.class + || typeParameter == Integer.class + || typeParameter == Long.class + || typeParameter == Float.class + || typeParameter == Double.class) { generateBridgeMethod(painlessClassBuilder, painlessMethod); } } @@ -1396,17 +2098,35 @@ public final class PainlessLookupBuilder { int bridgeClassFrames = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS; int bridgeClassAccess = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL; - String bridgeClassName = - "org/opensearch/painless/Bridge$" + javaMethod.getDeclaringClass().getSimpleName() + "$" + javaMethod.getName(); + String bridgeClassName = "org/opensearch/painless/Bridge$" + + javaMethod.getDeclaringClass().getSimpleName() + + "$" + + javaMethod.getName(); ClassWriter bridgeClassWriter = new ClassWriter(bridgeClassFrames); bridgeClassWriter.visit( - WriterConstants.CLASS_VERSION, bridgeClassAccess, bridgeClassName, null, OBJECT_TYPE.getInternalName(), null); + WriterConstants.CLASS_VERSION, + bridgeClassAccess, + bridgeClassName, + null, + OBJECT_TYPE.getInternalName(), + null + ); - org.objectweb.asm.commons.Method bridgeConstructorType = - new org.objectweb.asm.commons.Method("", MethodType.methodType(void.class).toMethodDescriptorString()); - GeneratorAdapter bridgeConstructorWriter = - new GeneratorAdapter(Opcodes.ASM5, bridgeConstructorType, bridgeClassWriter.visitMethod( - Opcodes.ACC_PRIVATE, bridgeConstructorType.getName(), bridgeConstructorType.getDescriptor(), null, null)); + org.objectweb.asm.commons.Method bridgeConstructorType = new org.objectweb.asm.commons.Method( + "", + MethodType.methodType(void.class).toMethodDescriptorString() + ); + GeneratorAdapter bridgeConstructorWriter = new GeneratorAdapter( + Opcodes.ASM5, + bridgeConstructorType, + bridgeClassWriter.visitMethod( + Opcodes.ACC_PRIVATE, + bridgeConstructorType.getName(), + bridgeConstructorType.getDescriptor(), + null, + null + ) + ); bridgeConstructorWriter.visitCode(); bridgeConstructorWriter.loadThis(); bridgeConstructorWriter.invokeConstructor(OBJECT_TYPE, bridgeConstructorType); @@ -1421,15 +2141,13 @@ public final class PainlessLookupBuilder { } for (Class typeParameter : javaMethod.getParameterTypes()) { - if ( - typeParameter == Byte.class || - typeParameter == Short.class || - typeParameter == Character.class || - typeParameter == Integer.class || - typeParameter == Long.class || - typeParameter == Float.class || - typeParameter == Double.class - ) { + if (typeParameter == Byte.class + || typeParameter == Short.class + || typeParameter == Character.class + || typeParameter == Integer.class + || typeParameter == Long.class + || typeParameter == Float.class + || typeParameter == Double.class) { bridgeTypeParameters.add(Object.class); } else { bridgeTypeParameters.add(typeParameter); @@ -1437,11 +2155,13 @@ public final class PainlessLookupBuilder { } MethodType bridgeMethodType = MethodType.methodType(painlessMethod.returnType, bridgeTypeParameters); - MethodWriter bridgeMethodWriter = - new MethodWriter(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, - new org.objectweb.asm.commons.Method( - painlessMethod.javaMethod.getName(), bridgeMethodType.toMethodDescriptorString()), - bridgeClassWriter, null, null); + MethodWriter bridgeMethodWriter = new MethodWriter( + Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, + new org.objectweb.asm.commons.Method(painlessMethod.javaMethod.getName(), bridgeMethodType.toMethodDescriptorString()), + bridgeClassWriter, + null, + null + ); bridgeMethodWriter.visitCode(); if (isStatic == false) { @@ -1452,13 +2172,13 @@ public final class PainlessLookupBuilder { bridgeMethodWriter.loadArg(typeParameterCount + bridgeTypeParameterOffset); Class typeParameter = javaMethod.getParameterTypes()[typeParameterCount]; - if (typeParameter == Byte.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_IMPLICIT); - else if (typeParameter == Short.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_IMPLICIT); + if (typeParameter == Byte.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_IMPLICIT); + else if (typeParameter == Short.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_IMPLICIT); else if (typeParameter == Character.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_CHARACTER_IMPLICIT); - else if (typeParameter == Integer.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_IMPLICIT); - else if (typeParameter == Long.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_IMPLICIT); - else if (typeParameter == Float.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_IMPLICIT); - else if (typeParameter == Double.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_IMPLICIT); + else if (typeParameter == Integer.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_IMPLICIT); + else if (typeParameter == Long.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_IMPLICIT); + else if (typeParameter == Float.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_IMPLICIT); + else if (typeParameter == Double.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_IMPLICIT); } bridgeMethodWriter.invokeMethodCall(painlessMethod); @@ -1477,15 +2197,26 @@ public final class PainlessLookupBuilder { Class bridgeClass = bridgeLoader.defineBridge(bridgeClassName.replace('/', '.'), bridgeClassWriter.toByteArray()); Method bridgeMethod = bridgeClass.getMethod( - painlessMethod.javaMethod.getName(), bridgeTypeParameters.toArray(new Class[0])); + painlessMethod.javaMethod.getName(), + bridgeTypeParameters.toArray(new Class[0]) + ); MethodHandle bridgeHandle = MethodHandles.publicLookup().in(bridgeClass).unreflect(bridgeClass.getMethods()[0]); - bridgePainlessMethod = new PainlessMethod(bridgeMethod, bridgeClass, - painlessMethod.returnType, bridgeTypeParameters, bridgeHandle, bridgeMethodType, Collections.emptyMap()); + bridgePainlessMethod = new PainlessMethod( + bridgeMethod, + bridgeClass, + painlessMethod.returnType, + bridgeTypeParameters, + bridgeHandle, + bridgeMethodType, + Collections.emptyMap() + ); painlessClassBuilder.runtimeMethods.put(painlessMethodKey.intern(), bridgePainlessMethod); painlessBridgeCache.put(painlessMethod, bridgePainlessMethod); } catch (Exception exception) { throw new IllegalStateException( - "internal error occurred attempting to generate a bridge method [" + bridgeClassName + "]", exception); + "internal error occurred attempting to generate a bridge method [" + bridgeClassName + "]", + exception + ); } } else { painlessClassBuilder.runtimeMethods.put(painlessMethodKey.intern(), bridgePainlessMethod); @@ -1506,19 +2237,31 @@ public final class PainlessLookupBuilder { String methodName = painlessMethod.javaMethod.getName(); int typeParametersSize = painlessMethod.typeParameters.size(); - if (typeParametersSize == 0 && methodName.startsWith("get") && methodName.length() > 3 && - Character.isUpperCase(methodName.charAt(3))) { + if (typeParametersSize == 0 + && methodName.startsWith("get") + && methodName.length() > 3 + && Character.isUpperCase(methodName.charAt(3))) { painlessClassBuilder.getterMethodHandles.putIfAbsent( - Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), bridgePainlessMethod.methodHandle); - } else if (typeParametersSize == 0 && methodName.startsWith("is") && methodName.length() > 2 && - Character.isUpperCase(methodName.charAt(2))) { - painlessClassBuilder.getterMethodHandles.putIfAbsent( - Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3), bridgePainlessMethod.methodHandle); - } else if (typeParametersSize == 1 && methodName.startsWith("set") && methodName.length() > 3 && - Character.isUpperCase(methodName.charAt(3))) { - painlessClassBuilder.setterMethodHandles.putIfAbsent( - Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), bridgePainlessMethod.methodHandle); - } + Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), + bridgePainlessMethod.methodHandle + ); + } else if (typeParametersSize == 0 + && methodName.startsWith("is") + && methodName.length() > 2 + && Character.isUpperCase(methodName.charAt(2))) { + painlessClassBuilder.getterMethodHandles.putIfAbsent( + Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3), + bridgePainlessMethod.methodHandle + ); + } else if (typeParametersSize == 1 + && methodName.startsWith("set") + && methodName.length() > 3 + && Character.isUpperCase(methodName.charAt(3))) { + painlessClassBuilder.setterMethodHandles.putIfAbsent( + Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), + bridgePainlessMethod.methodHandle + ); + } } for (PainlessField painlessField : painlessClassBuilder.fields.values()) { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java index f259984580a..cae425ad1fe 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupUtility.java @@ -125,9 +125,9 @@ public final class PainlessLookupUtility { int typeNameLength = canonicalTypeName.length(); while (arrayIndex < typeNameLength) { - if (canonicalTypeName.charAt(arrayIndex) == '[' && - ++arrayIndex < typeNameLength && - canonicalTypeName.charAt(arrayIndex++) == ']') { + if (canonicalTypeName.charAt(arrayIndex) == '[' + && ++arrayIndex < typeNameLength + && canonicalTypeName.charAt(arrayIndex++) == ']') { ++arrayDimensions; } else { return null; @@ -343,15 +343,15 @@ public final class PainlessLookupUtility { * where {@code true} is returned if the type is a constant type and {@code false} otherwise. */ public static boolean isConstantType(Class type) { - return type == boolean.class || - type == byte.class || - type == short.class || - type == char.class || - type == int.class || - type == long.class || - type == float.class || - type == double.class || - type == String.class; + return type == boolean.class + || type == byte.class + || type == short.class + || type == char.class + || type == int.class + || type == long.class + || type == float.class + || type == double.class + || type == String.class; } /** @@ -384,7 +384,7 @@ public final class PainlessLookupUtility { return new Object[0]; } - List names = ((InjectConstantAnnotation)painlessMethod.annotations.get(InjectConstantAnnotation.class)).injects; + List names = ((InjectConstantAnnotation) painlessMethod.annotations.get(InjectConstantAnnotation.class)).injects; Object[] injections = new Object[names.size()]; for (int i = 0; i < names.size(); i++) { @@ -392,8 +392,14 @@ public final class PainlessLookupUtility { Object constant = constants.get(name); if (constant == null) { - throw new IllegalStateException("constant [" + name + "] not found for injection into method " + - "[" + buildPainlessMethodKey(painlessMethod.javaMethod.getName(), painlessMethod.typeParameters.size()) + "]"); + throw new IllegalStateException( + "constant [" + + name + + "] not found for injection into method " + + "[" + + buildPainlessMethodKey(painlessMethod.javaMethod.getName(), painlessMethod.typeParameters.size()) + + "]" + ); } injections[i] = constant; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessMethod.java index 1782ff195ad..9a445710a21 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessMethod.java @@ -51,8 +51,15 @@ public class PainlessMethod { public final MethodType methodType; public final Map, Object> annotations; - public PainlessMethod(Method javaMethod, Class targetClass, Class returnType, List> typeParameters, - MethodHandle methodHandle, MethodType methodType, Map, Object> annotations) { + public PainlessMethod( + Method javaMethod, + Class targetClass, + Class returnType, + List> typeParameters, + MethodHandle methodHandle, + MethodType methodType, + Map, Object> annotations + ) { this.javaMethod = javaMethod; this.targetClass = targetClass; @@ -73,14 +80,14 @@ public class PainlessMethod { return false; } - PainlessMethod that = (PainlessMethod)object; + PainlessMethod that = (PainlessMethod) object; - return Objects.equals(javaMethod, that.javaMethod) && - Objects.equals(targetClass, that.targetClass) && - Objects.equals(returnType, that.returnType) && - Objects.equals(typeParameters, that.typeParameters) && - Objects.equals(methodType, that.methodType) && - Objects.equals(annotations, that.annotations); + return Objects.equals(javaMethod, that.javaMethod) + && Objects.equals(targetClass, that.targetClass) + && Objects.equals(returnType, that.returnType) + && Objects.equals(typeParameters, that.typeParameters) + && Objects.equals(methodType, that.methodType) + && Objects.equals(annotations, that.annotations); } @Override diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/opensearch/painless/node/EAssignment.java index fbcbfd8c689..88738c70562 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/node/EAssignment.java @@ -48,8 +48,14 @@ public class EAssignment extends AExpression { private final boolean postIfRead; private final Operation operation; - public EAssignment(int identifier, Location location, - AExpression leftNode, AExpression rightNode, boolean postIfRead, Operation operation) { + public EAssignment( + int identifier, + Location location, + AExpression leftNode, + AExpression rightNode, + boolean postIfRead, + Operation operation + ) { super(identifier, location); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/node/ECall.java b/modules/lang-painless/src/main/java/org/opensearch/painless/node/ECall.java index 7442c57e4db..b601488f351 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/node/ECall.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/node/ECall.java @@ -49,8 +49,14 @@ public class ECall extends AExpression { private final List argumentNodes; private final boolean isNullSafe; - public ECall(int identifier, Location location, - AExpression prefixNode, String methodName, List argumentNodes, boolean isNullSafe) { + public ECall( + int identifier, + Location location, + AExpression prefixNode, + String methodName, + List argumentNodes, + boolean isNullSafe + ) { super(identifier, location); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/opensearch/painless/node/ELambda.java index 09c6c07fafd..9b36a802971 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/node/ELambda.java @@ -68,8 +68,13 @@ public class ELambda extends AExpression { private final List parameterNames; private final SBlock blockNode; - public ELambda(int identifier, Location location, - List canonicalTypeNameParameters, List parameterNames, SBlock blockNode) { + public ELambda( + int identifier, + Location location, + List canonicalTypeNameParameters, + List parameterNames, + SBlock blockNode + ) { super(identifier, location); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/opensearch/painless/node/SBlock.java index f92f81afaf8..85c7554506f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/node/SBlock.java @@ -63,7 +63,7 @@ public class SBlock extends AStatement { @Override public void visitChildren(UserTreeVisitor userTreeVisitor, Scope scope) { - for (AStatement statementNode: statementNodes) { + for (AStatement statementNode : statementNodes) { statementNode.visit(userTreeVisitor, scope); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFor.java b/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFor.java index 2d0a07f5eee..c841e637bef 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFor.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFor.java @@ -45,8 +45,14 @@ public class SFor extends AStatement { private final AExpression afterthoughtNode; private final SBlock blockNode; - public SFor(int identifier, Location location, - ANode initializerNode, AExpression conditionNode, AExpression afterthoughtNode, SBlock blockNode) { + public SFor( + int identifier, + Location location, + ANode initializerNode, + AExpression conditionNode, + AExpression afterthoughtNode, + SBlock blockNode + ) { super(identifier, location); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFunction.java index 46a82b2bc03..21e889188b4 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/node/SFunction.java @@ -54,10 +54,19 @@ public class SFunction extends ANode { private final boolean isSynthetic; private final boolean isAutoReturnEnabled; - public SFunction(int identifier, Location location, - String returnCanonicalTypeName, String name, List canonicalTypeNameParameters, List parameterNames, - SBlock blockNode, - boolean isInternal, boolean isStatic, boolean isSynthetic, boolean isAutoReturnEnabled) { + public SFunction( + int identifier, + Location location, + String returnCanonicalTypeName, + String name, + List canonicalTypeNameParameters, + List parameterNames, + SBlock blockNode, + boolean isInternal, + boolean isStatic, + boolean isSynthetic, + boolean isAutoReturnEnabled + ) { super(identifier, location); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultConstantFoldingOptimizationPhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultConstantFoldingOptimizationPhase.java index fc8bd298cc6..8b55e70e8ea 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultConstantFoldingOptimizationPhase.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultConstantFoldingOptimizationPhase.java @@ -191,48 +191,81 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor type = irUnaryMathNode.getExpressionType(); if (operation == Operation.SUB) { if (type == int.class) { - irConstantNode.setConstant(-(int)irConstantNode.getConstant()); + irConstantNode.setConstant(-(int) irConstantNode.getConstant()); } else if (type == long.class) { - irConstantNode.setConstant(-(long)irConstantNode.getConstant()); + irConstantNode.setConstant(-(long) irConstantNode.getConstant()); } else if (type == float.class) { - irConstantNode.setConstant(-(float)irConstantNode.getConstant()); + irConstantNode.setConstant(-(float) irConstantNode.getConstant()); } else if (type == double.class) { - irConstantNode.setConstant(-(double)irConstantNode.getConstant()); + irConstantNode.setConstant(-(double) irConstantNode.getConstant()); } else { - throw irUnaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "unary operation [" + operation.symbol + "] on " + - "constant [" + irConstantNode.getConstant() + "]")); + throw irUnaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "unary operation [" + + operation.symbol + + "] on " + + "constant [" + + irConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irConstantNode); } else if (operation == Operation.BWNOT) { if (type == int.class) { - irConstantNode.setConstant(~(int)irConstantNode.getConstant()); + irConstantNode.setConstant(~(int) irConstantNode.getConstant()); } else if (type == long.class) { - irConstantNode.setConstant(~(long)irConstantNode.getConstant()); + irConstantNode.setConstant(~(long) irConstantNode.getConstant()); } else { - throw irUnaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "unary operation [" + operation.symbol + "] on " + - "constant [" + irConstantNode.getConstant() + "]")); + throw irUnaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "unary operation [" + + operation.symbol + + "] on " + + "constant [" + + irConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irConstantNode); } else if (operation == Operation.NOT) { if (type == boolean.class) { - irConstantNode.setConstant(!(boolean)irConstantNode.getConstant()); + irConstantNode.setConstant(!(boolean) irConstantNode.getConstant()); } else { - throw irUnaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "unary operation [" + operation.symbol + "] on " + - "constant [" + irConstantNode.getConstant() + "]")); + throw irUnaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "unary operation [" + + operation.symbol + + "] on " + + "constant [" + + irConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irConstantNode); @@ -248,44 +281,73 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor type = irBinaryMathNode.getExpressionType(); if (operation == Operation.MUL) { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() * (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() * (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() * (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() * (long) irRightConstantNode.getConstant()); } else if (type == float.class) { - irLeftConstantNode.setConstant((float)irLeftConstantNode.getConstant() * (float)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((float) irLeftConstantNode.getConstant() * (float) irRightConstantNode.getConstant()); } else if (type == double.class) { - irLeftConstantNode.setConstant((double)irLeftConstantNode.getConstant() * (double)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((double) irLeftConstantNode.getConstant() * (double) irRightConstantNode.getConstant()); } else { - throw irBinaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBinaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); } else if (operation == Operation.DIV) { try { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() / (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() / (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() / (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() / (long) irRightConstantNode.getConstant()); } else if (type == float.class) { - irLeftConstantNode.setConstant((float)irLeftConstantNode.getConstant() / (float)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (float) irLeftConstantNode.getConstant() / (float) irRightConstantNode.getConstant() + ); } else if (type == double.class) { irLeftConstantNode.setConstant( - (double)irLeftConstantNode.getConstant() / (double)irRightConstantNode.getConstant()); + (double) irLeftConstantNode.getConstant() / (double) irRightConstantNode.getConstant() + ); } else { - throw irBinaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBinaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } } catch (ArithmeticException ae) { throw irBinaryMathNode.getLocation().createError(ae); @@ -295,19 +357,35 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor> (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() >> (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() >> (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() >> (int) irRightConstantNode.getConstant()); } else { - throw irBinaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBinaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); } else if (operation == Operation.USH) { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() >>> (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() >>> (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() >>> (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() >>> (int) irRightConstantNode.getConstant()); } else { - throw irBinaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBinaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); } else if (operation == Operation.BWAND) { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() & (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() & (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() & (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() & (long) irRightConstantNode.getConstant()); } else { - throw irBinaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBinaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); } else if (operation == Operation.XOR) { if (type == boolean.class) { - irLeftConstantNode.setConstant((boolean)irLeftConstantNode.getConstant() ^ (boolean)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (boolean) irLeftConstantNode.getConstant() ^ (boolean) irRightConstantNode.getConstant() + ); } else if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() ^ (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() ^ (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() ^ (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() ^ (long) irRightConstantNode.getConstant()); } else { - throw irBinaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBinaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); } else if (operation == Operation.BWOR) { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() | (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() | (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() | (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() | (long) irRightConstantNode.getConstant()); } else { - throw irBinaryMathNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBinaryMathNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); @@ -446,18 +630,18 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor irStringConcatenationNode.getArgumentNodes().set(j + 1, e)); if (irLeftNode instanceof ConstantNode && irRightNode instanceof ConstantNode) { - ConstantNode irConstantNode = (ConstantNode)irLeftNode; - irConstantNode.setConstant("" + irConstantNode.getConstant() + ((ConstantNode)irRightNode).getConstant()); + ConstantNode irConstantNode = (ConstantNode) irLeftNode; + irConstantNode.setConstant("" + irConstantNode.getConstant() + ((ConstantNode) irRightNode).getConstant()); irConstantNode.setExpressionType(String.class); irStringConcatenationNode.getArgumentNodes().remove(i + 1); } else if (irLeftNode instanceof NullNode && irRightNode instanceof ConstantNode) { - ConstantNode irConstantNode = (ConstantNode)irRightNode; - irConstantNode.setConstant("" + null + ((ConstantNode)irRightNode).getConstant()); + ConstantNode irConstantNode = (ConstantNode) irRightNode; + irConstantNode.setConstant("" + null + ((ConstantNode) irRightNode).getConstant()); irConstantNode.setExpressionType(String.class); irStringConcatenationNode.getArgumentNodes().remove(i); } else if (irLeftNode instanceof ConstantNode && irRightNode instanceof NullNode) { - ConstantNode irConstantNode = (ConstantNode)irLeftNode; - irConstantNode.setConstant("" + ((ConstantNode)irLeftNode).getConstant() + null); + ConstantNode irConstantNode = (ConstantNode) irLeftNode; + irConstantNode.setConstant("" + ((ConstantNode) irLeftNode).getConstant() + null); irConstantNode.setExpressionType(String.class); irStringConcatenationNode.getArgumentNodes().remove(i + 1); } else if (irLeftNode instanceof NullNode && irRightNode instanceof NullNode) { @@ -486,30 +670,60 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor type = irBooleanNode.getExpressionType(); if (operation == Operation.AND) { if (type == boolean.class) { - irLeftConstantNode.setConstant((boolean)irLeftConstantNode.getConstant() && (boolean)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (boolean) irLeftConstantNode.getConstant() && (boolean) irRightConstantNode.getConstant() + ); } else { - throw irBooleanNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "binary operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBooleanNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "binary operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); } else if (operation == Operation.OR) { if (type == boolean.class) { - irLeftConstantNode.setConstant((boolean)irLeftConstantNode.getConstant() || (boolean)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (boolean) irLeftConstantNode.getConstant() || (boolean) irRightConstantNode.getConstant() + ); } else { - throw irBooleanNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "boolean operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irBooleanNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "boolean operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } scope.accept(irLeftConstantNode); @@ -523,26 +737,30 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor type = irComparisonNode.getComparisonType(); if (operation == Operation.EQ || operation == Operation.EQR) { if (type == boolean.class) { - irLeftConstantNode.setConstant((boolean)irLeftConstantNode.getConstant() == (boolean)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (boolean) irLeftConstantNode.getConstant() == (boolean) irRightConstantNode.getConstant() + ); } else if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() == (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() == (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() == (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() == (long) irRightConstantNode.getConstant()); } else if (type == float.class) { - irLeftConstantNode.setConstant((float)irLeftConstantNode.getConstant() == (float)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((float) irLeftConstantNode.getConstant() == (float) irRightConstantNode.getConstant()); } else if (type == double.class) { - irLeftConstantNode.setConstant((double)irLeftConstantNode.getConstant() == (double)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((double) irLeftConstantNode.getConstant() == (double) irRightConstantNode.getConstant()); } else if (irLeftConstantNode == null && irRightConstantNode == null) { irLeftConstantNode = new ConstantNode(irComparisonNode.getLeftNode().getLocation()); irLeftConstantNode.setConstant(true); @@ -561,15 +779,17 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() > (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() > (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() > (long) irRightConstantNode.getConstant()); } else if (type == float.class) { - irLeftConstantNode.setConstant((float)irLeftConstantNode.getConstant() > (float)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (float) irLeftConstantNode.getConstant() > (float) irRightConstantNode.getConstant() + ); } else if (type == double.class) { irLeftConstantNode.setConstant( - (double)irLeftConstantNode.getConstant() > (double)irRightConstantNode.getConstant()); + (double) irLeftConstantNode.getConstant() > (double) irRightConstantNode.getConstant() + ); } else { - throw irComparisonNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "comparison operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irComparisonNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "comparison operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } irLeftConstantNode.setExpressionType(boolean.class); scope.accept(irLeftConstantNode); } else if (operation == Operation.GTE) { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() >= (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() >= (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() >= (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() >= (long) irRightConstantNode.getConstant()); } else if (type == float.class) { - irLeftConstantNode.setConstant((float)irLeftConstantNode.getConstant() >= (float)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (float) irLeftConstantNode.getConstant() >= (float) irRightConstantNode.getConstant() + ); } else if (type == double.class) { irLeftConstantNode.setConstant( - (double)irLeftConstantNode.getConstant() >= (double)irRightConstantNode.getConstant()); + (double) irLeftConstantNode.getConstant() >= (double) irRightConstantNode.getConstant() + ); } else { - throw irComparisonNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "comparison operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irComparisonNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "comparison operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } irLeftConstantNode.setExpressionType(boolean.class); scope.accept(irLeftConstantNode); } else if (operation == Operation.LT) { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() < (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() < (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() < (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() < (long) irRightConstantNode.getConstant()); } else if (type == float.class) { - irLeftConstantNode.setConstant((float)irLeftConstantNode.getConstant() < (float)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (float) irLeftConstantNode.getConstant() < (float) irRightConstantNode.getConstant() + ); } else if (type == double.class) { irLeftConstantNode.setConstant( - (double)irLeftConstantNode.getConstant() < (double)irRightConstantNode.getConstant()); + (double) irLeftConstantNode.getConstant() < (double) irRightConstantNode.getConstant() + ); } else { - throw irComparisonNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "comparison operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irComparisonNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "comparison operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } irLeftConstantNode.setExpressionType(boolean.class); scope.accept(irLeftConstantNode); } else if (operation == Operation.LTE) { if (type == int.class) { - irLeftConstantNode.setConstant((int)irLeftConstantNode.getConstant() <= (int)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((int) irLeftConstantNode.getConstant() <= (int) irRightConstantNode.getConstant()); } else if (type == long.class) { - irLeftConstantNode.setConstant((long)irLeftConstantNode.getConstant() <= (long)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant((long) irLeftConstantNode.getConstant() <= (long) irRightConstantNode.getConstant()); } else if (type == float.class) { - irLeftConstantNode.setConstant((float)irLeftConstantNode.getConstant() <= (float)irRightConstantNode.getConstant()); + irLeftConstantNode.setConstant( + (float) irLeftConstantNode.getConstant() <= (float) irRightConstantNode.getConstant() + ); } else if (type == double.class) { irLeftConstantNode.setConstant( - (double)irLeftConstantNode.getConstant() <= (double)irRightConstantNode.getConstant()); + (double) irLeftConstantNode.getConstant() <= (double) irRightConstantNode.getConstant() + ); } else { - throw irComparisonNode.getLocation().createError(new IllegalStateException("constant folding error: " + - "unexpected type [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] for " + - "comparison operation [" + operation.symbol + "] on " + - "constants [" + irLeftConstantNode.getConstant() + "] and [" + irRightConstantNode.getConstant() + "]")); + throw irComparisonNode.getLocation() + .createError( + new IllegalStateException( + "constant folding error: " + + "unexpected type [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] for " + + "comparison operation [" + + operation.symbol + + "] on " + + "constants [" + + irLeftConstantNode.getConstant() + + "] and [" + + irRightConstantNode.getConstant() + + "]" + ) + ); } irLeftConstantNode.setExpressionType(boolean.class); @@ -673,9 +957,10 @@ public class DefaultConstantFoldingOptimizationPhase extends IRTreeBaseVisitor returnType = localFunction.getReturnType(); List> typeParameters = localFunction.getTypeParameters(); FunctionScope functionScope = newFunctionScope(scriptScope, localFunction.getReturnType()); @@ -255,9 +265,17 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor type = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(canonicalTypeName); if (type == null) { - throw userEachNode.createError(new IllegalArgumentException( - "invalid foreach loop: type [" + canonicalTypeName + "] not found")); + throw userEachNode.createError( + new IllegalArgumentException("invalid foreach loop: type [" + canonicalTypeName + "] not found") + ); } semanticScope = semanticScope.newLocalScope(); @@ -628,28 +654,37 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor iterableValueType = semanticScope.getDecoration(userIterableNode, ValueType.class).getValueType(); if (iterableValueType.isArray()) { - PainlessCast painlessCast = - AnalyzerCaster.getLegalCast(location, iterableValueType.getComponentType(), variable.getType(), true, true); + PainlessCast painlessCast = AnalyzerCaster.getLegalCast( + location, + iterableValueType.getComponentType(), + variable.getType(), + true, + true + ); if (painlessCast != null) { semanticScope.putDecoration(userEachNode, new ExpressionPainlessCast(painlessCast)); } } else if (iterableValueType == def.class || Iterable.class.isAssignableFrom(iterableValueType)) { if (iterableValueType != def.class) { - PainlessMethod method = semanticScope.getScriptScope().getPainlessLookup(). - lookupPainlessMethod(iterableValueType, false, "iterator", 0); + PainlessMethod method = semanticScope.getScriptScope() + .getPainlessLookup() + .lookupPainlessMethod(iterableValueType, false, "iterator", 0); if (method == null) { - throw userEachNode.createError(new IllegalArgumentException("invalid foreach loop: " + - "method [" + typeToCanonicalTypeName(iterableValueType) + ", iterator/0] not found")); + throw userEachNode.createError( + new IllegalArgumentException( + "invalid foreach loop: " + "method [" + typeToCanonicalTypeName(iterableValueType) + ", iterator/0] not found" + ) + ); } semanticScope.putDecoration(userEachNode, new IterablePainlessMethod(method)); @@ -661,8 +696,14 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor type = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName); if (type == null) { - throw userDeclarationNode.createError(new IllegalArgumentException( - "invalid declaration: cannot resolve type [" + canonicalTypeName + "]")); + throw userDeclarationNode.createError( + new IllegalArgumentException("invalid declaration: cannot resolve type [" + canonicalTypeName + "]") + ); } AExpression userValueNode = userDeclarationNode.getValueNode(); @@ -722,9 +765,17 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor type = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName); if (type == null) { - throw userCatchNode.createError(new IllegalArgumentException( - "invalid catch declaration: cannot resolve type [" + canonicalTypeName + "]")); + throw userCatchNode.createError( + new IllegalArgumentException("invalid catch declaration: cannot resolve type [" + canonicalTypeName + "]") + ); } Location location = userCatchNode.getLocation(); @@ -854,9 +907,16 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor baseException = userCatchNode.getBaseException(); if (userCatchNode.getBaseException().isAssignableFrom(type) == false) { - throw userCatchNode.createError(new ClassCastException( - "cannot cast from [" + PainlessLookupUtility.typeToCanonicalTypeName(type) + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(baseException) + "]")); + throw userCatchNode.createError( + new ClassCastException( + "cannot cast from [" + + PainlessLookupUtility.typeToCanonicalTypeName(type) + + "] " + + "to [" + + PainlessLookupUtility.typeToCanonicalTypeName(baseException) + + "]" + ) + ); } SBlock userBlockNode = userCatchNode.getBlockNode(); @@ -986,8 +1046,18 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor rightValueType = semanticScope.getDecoration(userRightNode, ValueType.class).getValueType(); if (rightValueType == void.class) { - throw userAssignmentNode.createError(new IllegalArgumentException( - "invalid assignment: cannot assign type [" + PainlessLookupUtility.typeToCanonicalTypeName(void.class) + "]")); + throw userAssignmentNode.createError( + new IllegalArgumentException( + "invalid assignment: cannot assign type [" + PainlessLookupUtility.typeToCanonicalTypeName(void.class) + "]" + ) + ); } semanticScope.putDecoration(userLeftNode, new ValueType(rightValueType)); @@ -1040,8 +1113,10 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor valueType = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(canonicalTypeName); @@ -1363,13 +1503,19 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor instanceType = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(canonicalTypeName); @@ -1393,13 +1539,15 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor promote = AnalyzerCaster.promoteConditional(leftValueType, rightValueType); if (promote == null) { - throw userConditionalNode.createError(new ClassCastException("cannot apply the conditional operator [?:] to the types " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(leftValueType) + "] and " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(rightValueType) + "]")); + throw userConditionalNode.createError( + new ClassCastException( + "cannot apply the conditional operator [?:] to the types " + + "[" + + PainlessLookupUtility.typeToCanonicalTypeName(leftValueType) + + "] and " + + "[" + + PainlessLookupUtility.typeToCanonicalTypeName(rightValueType) + + "]" + ) + ); } semanticScope.putDecoration(userTrueNode, new TargetType(promote)); @@ -1456,8 +1612,9 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor/0] not found")); + throw userListInitNode.createError( + new IllegalArgumentException("constructor [" + typeToCanonicalTypeName(valueType) + ", /0] not found") + ); } semanticScope.putDecoration(userListInitNode, new StandardPainlessConstructor(constructor)); @@ -1550,8 +1708,9 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor/0] not found")); + throw userMapInitNode.createError( + new IllegalArgumentException("constructor [" + typeToCanonicalTypeName(valueType) + ", /0] not found") + ); } semanticScope.putDecoration(userMapInitNode, new StandardPainlessConstructor(constructor)); @@ -1596,8 +1755,9 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor userArgumentNodes = userNewObjNode.getArgumentNodes(); int userArgumentsSize = userArgumentNodes.size(); if (semanticScope.getCondition(userNewObjNode, Write.class)) { - throw userNewObjNode.createError(new IllegalArgumentException( - "invalid assignment cannot assign a value to new object with constructor " + - "[" + canonicalTypeName + "/" + userArgumentsSize + "]")); + throw userNewObjNode.createError( + new IllegalArgumentException( + "invalid assignment cannot assign a value to new object with constructor " + + "[" + + canonicalTypeName + + "/" + + userArgumentsSize + + "]" + ) + ); } ScriptScope scriptScope = semanticScope.getScriptScope(); @@ -1688,8 +1857,11 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor/" + userArgumentsSize + "] not found")); + throw userNewObjNode.createError( + new IllegalArgumentException( + "constructor [" + typeToCanonicalTypeName(valueType) + ", /" + userArgumentsSize + "] not found" + ) + ); } scriptScope.putDecoration(userNewObjNode, new StandardPainlessConstructor(constructor)); @@ -1699,9 +1871,18 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor= Byte.MIN_VALUE && integer <= Byte.MAX_VALUE) { - constant = (byte)integer; + constant = (byte) integer; valueType = byte.class; } else if (sort == char.class && integer >= Character.MIN_VALUE && integer <= Character.MAX_VALUE) { - constant = (char)integer; + constant = (char) integer; valueType = char.class; } else if (sort == short.class && integer >= Short.MIN_VALUE && integer <= Short.MAX_VALUE) { - constant = (short)integer; + constant = (short) integer; valueType = short.class; } else { constant = integer; @@ -1935,8 +2125,11 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor valueType; - if (symbol.equals("this") || type != null) { + if (symbol.equals("this") || type != null) { if (semanticScope.getCondition(userFunctionRefNode, Write.class)) { - throw userFunctionRefNode.createError(new IllegalArgumentException( - "invalid assignment: cannot assign a value to function reference [" + symbol + ":" + methodName + "]")); + throw userFunctionRefNode.createError( + new IllegalArgumentException( + "invalid assignment: cannot assign a value to function reference [" + symbol + ":" + methodName + "]" + ) + ); } if (read == false) { - throw userFunctionRefNode.createError(new IllegalArgumentException( - "not a statement: function reference [" + symbol + ":" + methodName + "] not used")); + throw userFunctionRefNode.createError( + new IllegalArgumentException("not a statement: function reference [" + symbol + ":" + methodName + "] not used") + ); } if (targetType == null) { @@ -2294,21 +2519,34 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor staticType = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(symbol); - if (staticType != null) { + if (staticType != null) { if (write) { - throw userSymbolNode.createError(new IllegalArgumentException("invalid assignment: " + - "cannot write a value to a static type [" + PainlessLookupUtility.typeToCanonicalTypeName(staticType) + "]")); + throw userSymbolNode.createError( + new IllegalArgumentException( + "invalid assignment: " + + "cannot write a value to a static type [" + + PainlessLookupUtility.typeToCanonicalTypeName(staticType) + + "]" + ) + ); } if (read == false) { - throw userSymbolNode.createError(new IllegalArgumentException("not a statement: " + - "static type [" + PainlessLookupUtility.typeToCanonicalTypeName(staticType) + "] not used")); + throw userSymbolNode.createError( + new IllegalArgumentException( + "not a statement: " + "static type [" + PainlessLookupUtility.typeToCanonicalTypeName(staticType) + "] not used" + ) + ); } semanticScope.putDecoration(userSymbolNode, new StaticType(staticType)); @@ -2456,32 +2723,54 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor staticType = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName); if (staticType == null) { semanticScope.putDecoration(userDotNode, new PartialCanonicalTypeName(canonicalTypeName)); } else { if (write) { - throw userDotNode.createError(new IllegalArgumentException("invalid assignment: " + - "cannot write a value to a static type [" + PainlessLookupUtility.typeToCanonicalTypeName(staticType) + "]")); + throw userDotNode.createError( + new IllegalArgumentException( + "invalid assignment: " + + "cannot write a value to a static type [" + + PainlessLookupUtility.typeToCanonicalTypeName(staticType) + + "]" + ) + ); } semanticScope.putDecoration(userDotNode, new StaticType(staticType)); @@ -2496,8 +2785,14 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor valueType; @@ -2890,35 +3279,59 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor argumentValueType = semanticScope.getDecoration(userArgumentNode, ValueType.class).getValueType(); if (argumentValueType == void.class) { - throw userCallNode.createError(new IllegalArgumentException( - "Argument(s) cannot be of [void] type when calling method [" + methodName + "].")); + throw userCallNode.createError( + new IllegalArgumentException("Argument(s) cannot be of [void] type when calling method [" + methodName + "].") + ); } } TargetType targetType = userCallNode.isNullSafe() ? null : semanticScope.getDecoration(userCallNode, TargetType.class); // TODO: remove ZonedDateTime exception when JodaCompatibleDateTime is removed - valueType = targetType == null || targetType.getTargetType() == ZonedDateTime.class || - semanticScope.getCondition(userCallNode, Explicit.class) ? def.class : targetType.getTargetType(); + valueType = targetType == null + || targetType.getTargetType() == ZonedDateTime.class + || semanticScope.getCondition(userCallNode, Explicit.class) ? def.class : targetType.getTargetType(); } else { PainlessMethod method; if (prefixValueType != null) { - method = semanticScope.getScriptScope().getPainlessLookup().lookupPainlessMethod( - prefixValueType.getValueType(), false, methodName, userArgumentsSize); + method = semanticScope.getScriptScope() + .getPainlessLookup() + .lookupPainlessMethod(prefixValueType.getValueType(), false, methodName, userArgumentsSize); if (method == null) { - throw userCallNode.createError(new IllegalArgumentException("member method " + - "[" + prefixValueType.getValueCanonicalTypeName() + ", " + methodName + "/" + userArgumentsSize + "] " + - "not found")); + throw userCallNode.createError( + new IllegalArgumentException( + "member method " + + "[" + + prefixValueType.getValueCanonicalTypeName() + + ", " + + methodName + + "/" + + userArgumentsSize + + "] " + + "not found" + ) + ); } } else if (prefixStaticType != null) { - method = semanticScope.getScriptScope().getPainlessLookup().lookupPainlessMethod( - prefixStaticType.getStaticType(), true, methodName, userArgumentsSize); + method = semanticScope.getScriptScope() + .getPainlessLookup() + .lookupPainlessMethod(prefixStaticType.getStaticType(), true, methodName, userArgumentsSize); if (method == null) { - throw userCallNode.createError(new IllegalArgumentException("static method " + - "[" + prefixStaticType.getStaticCanonicalTypeName() + ", " + methodName + "/" + userArgumentsSize + "] " + - "not found")); + throw userCallNode.createError( + new IllegalArgumentException( + "static method " + + "[" + + prefixStaticType.getStaticCanonicalTypeName() + + ", " + + methodName + + "/" + + userArgumentsSize + + "] " + + "not found" + ) + ); } } else { throw userCallNode.createError(new IllegalStateException("value required: instead found no value")); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultSemanticHeaderPhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultSemanticHeaderPhase.java index 6beffa69479..13f190adf99 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultSemanticHeaderPhase.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultSemanticHeaderPhase.java @@ -58,17 +58,28 @@ public class DefaultSemanticHeaderPhase extends UserTreeBaseVisitor int parameterCount = canonicalTypeNameParameters.size(); if (parameterCount != parameterNames.size()) { - throw userFunctionNode.createError(new IllegalStateException("invalid function definition: " + - "parameter types size [" + canonicalTypeNameParameters.size() + "] is not equal to " + - "parameter names size [" + parameterNames.size() + "] for function [" + functionName +"]")); + throw userFunctionNode.createError( + new IllegalStateException( + "invalid function definition: " + + "parameter types size [" + + canonicalTypeNameParameters.size() + + "] is not equal to " + + "parameter names size [" + + parameterNames.size() + + "] for function [" + + functionName + + "]" + ) + ); } FunctionTable functionTable = scriptScope.getFunctionTable(); String functionKey = FunctionTable.buildLocalFunctionKey(functionName, canonicalTypeNameParameters.size()); if (functionTable.getFunction(functionKey) != null) { - throw userFunctionNode.createError(new IllegalArgumentException("invalid function definition: " + - "found duplicate function [" + functionKey + "].")); + throw userFunctionNode.createError( + new IllegalArgumentException("invalid function definition: " + "found duplicate function [" + functionKey + "].") + ); } PainlessLookup painlessLookup = scriptScope.getPainlessLookup(); @@ -76,8 +87,16 @@ public class DefaultSemanticHeaderPhase extends UserTreeBaseVisitor Class returnType = painlessLookup.canonicalTypeNameToType(returnCanonicalTypeName); if (returnType == null) { - throw userFunctionNode.createError(new IllegalArgumentException("invalid function definition: " + - "return type [" + returnCanonicalTypeName + "] not found for function [" + functionKey + "]")); + throw userFunctionNode.createError( + new IllegalArgumentException( + "invalid function definition: " + + "return type [" + + returnCanonicalTypeName + + "] not found for function [" + + functionKey + + "]" + ) + ); } List> typeParameters = new ArrayList<>(); @@ -86,8 +105,16 @@ public class DefaultSemanticHeaderPhase extends UserTreeBaseVisitor Class paramType = painlessLookup.canonicalTypeNameToType(typeParameter); if (paramType == null) { - throw userFunctionNode.createError(new IllegalArgumentException("invalid function definition: " + - "parameter type [" + typeParameter + "] not found for function [" + functionKey + "]")); + throw userFunctionNode.createError( + new IllegalArgumentException( + "invalid function definition: " + + "parameter type [" + + typeParameter + + "] not found for function [" + + functionKey + + "]" + ) + ); } typeParameters.add(paramType); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultStringConcatenationOptimizationPhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultStringConcatenationOptimizationPhase.java index 5a6c86e3f59..01127eb7b36 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultStringConcatenationOptimizationPhase.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultStringConcatenationOptimizationPhase.java @@ -39,14 +39,14 @@ public class DefaultStringConcatenationOptimizationPhase extends IRTreeBaseVisit @Override public void visitStringConcatenation(StringConcatenationNode irStringConcatenationNode, Void scope) { - int i = 0; + int i = 0; while (i < irStringConcatenationNode.getArgumentNodes().size()) { ExpressionNode irArgumentNode = irStringConcatenationNode.getArgumentNodes().get(i); if (irArgumentNode instanceof StringConcatenationNode) { irStringConcatenationNode.getArgumentNodes().remove(i); - irStringConcatenationNode.getArgumentNodes().addAll(i, ((StringConcatenationNode)irArgumentNode).getArgumentNodes()); + irStringConcatenationNode.getArgumentNodes().addAll(i, ((StringConcatenationNode) irArgumentNode).getArgumentNodes()); } else { i++; } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultUserTreeToIRTreePhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultUserTreeToIRTreePhase.java index 613a5cb890f..aa546ab44f8 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultUserTreeToIRTreePhase.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/DefaultUserTreeToIRTreePhase.java @@ -269,10 +269,10 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor returnType = localFunction.getReturnType(); boolean methodEscape = scriptScope.getCondition(userFunctionNode, MethodEscape.class); - BlockNode irBlockNode = (BlockNode)visit(userFunctionNode.getBlockNode(), scriptScope); + BlockNode irBlockNode = (BlockNode) visit(userFunctionNode.getBlockNode(), scriptScope); if (methodEscape == false) { ExpressionNode irExpressionNode; @@ -550,19 +561,19 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor iterableValueType = scriptScope.getDecoration(userEachNode.getIterableNode(), ValueType.class).getValueType(); - BlockNode irBlockNode = (BlockNode)visit(userEachNode.getBlockNode(), scriptScope); + BlockNode irBlockNode = (BlockNode) visit(userEachNode.getBlockNode(), scriptScope); ConditionNode irConditionNode; @@ -691,8 +703,11 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor compoundType = scriptScope.hasDecoration(userAssignmentNode, CompoundType.class) ? - scriptScope.getDecoration(userAssignmentNode, CompoundType.class).getCompoundType() : null; + Class compoundType = scriptScope.hasDecoration(userAssignmentNode, CompoundType.class) + ? scriptScope.getDecoration(userAssignmentNode, CompoundType.class).getCompoundType() + : null; ExpressionNode irAssignmentNode; // add a cast node if necessary for the value node for the assignment @@ -815,7 +831,7 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor unaryType = scriptScope.hasDecoration(userUnaryNode, UnaryType.class) ? - scriptScope.getDecoration(userUnaryNode, UnaryType.class).getUnaryType() : null; + Class unaryType = scriptScope.hasDecoration(userUnaryNode, UnaryType.class) + ? scriptScope.getDecoration(userUnaryNode, UnaryType.class).getUnaryType() + : null; IRNode irNode; @@ -961,12 +980,13 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor shiftType = scriptScope.hasDecoration(userBinaryNode, ShiftType.class) ? - scriptScope.getDecoration(userBinaryNode, ShiftType.class).getShiftType() : null; + Class shiftType = scriptScope.hasDecoration(userBinaryNode, ShiftType.class) + ? scriptScope.getDecoration(userBinaryNode, ShiftType.class).getShiftType() + : null; BinaryMathNode irBinaryMathNode = new BinaryMathNode(userBinaryNode.getLocation()); @@ -1023,7 +1043,7 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor valueType = scriptScope.getDecoration(userDotNode, ValueType.class).getValueType(); ValueType prefixValueType = scriptScope.getDecoration(userDotNode.getPrefixNode(), ValueType.class); - ExpressionNode irPrefixNode = (ExpressionNode)visit(userDotNode.getPrefixNode(), scriptScope); + ExpressionNode irPrefixNode = (ExpressionNode) visit(userDotNode.getPrefixNode(), scriptScope); ExpressionNode irIndexNode = null; StoreNode irStoreNode = null; ExpressionNode irLoadNode = null; @@ -1511,8 +1548,8 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor valueType = scriptScope.getDecoration(userBraceNode, ValueType.class).getValueType(); Class prefixValueType = scriptScope.getDecoration(userBraceNode.getPrefixNode(), ValueType.class).getValueType(); - ExpressionNode irPrefixNode = (ExpressionNode)visit(userBraceNode.getPrefixNode(), scriptScope); + ExpressionNode irPrefixNode = (ExpressionNode) visit(userBraceNode.getPrefixNode(), scriptScope); ExpressionNode irIndexNode = injectCast(userBraceNode.getIndexNode(), scriptScope); StoreNode irStoreNode = null; ExpressionNode irLoadNode = null; @@ -1672,7 +1722,8 @@ public class DefaultUserTreeToIRTreePhase implements UserTreeVisitor { void visitClass(ClassNode irClassNode, Scope scope); + void visitFunction(FunctionNode irFunctionNode, Scope scope); + void visitField(FieldNode irFieldNode, Scope scope); void visitBlock(BlockNode irBlockNode, Scope scope); + void visitIf(IfNode irIfNode, Scope scope); + void visitIfElse(IfElseNode irIfElseNode, Scope scope); + void visitWhileLoop(WhileLoopNode irWhileLoopNode, Scope scope); + void visitDoWhileLoop(DoWhileLoopNode irDoWhileLoopNode, Scope scope); + void visitForLoop(ForLoopNode irForLoopNode, Scope scope); + void visitForEachLoop(ForEachLoopNode irForEachLoopNode, Scope scope); + void visitForEachSubArrayLoop(ForEachSubArrayNode irForEachSubArrayNode, Scope scope); + void visitForEachSubIterableLoop(ForEachSubIterableNode irForEachSubIterableNode, Scope scope); + void visitDeclarationBlock(DeclarationBlockNode irDeclarationBlockNode, Scope scope); + void visitDeclaration(DeclarationNode irDeclarationNode, Scope scope); + void visitReturn(ReturnNode irReturnNode, Scope scope); + void visitStatementExpression(StatementExpressionNode irStatementExpressionNode, Scope scope); + void visitTry(TryNode irTryNode, Scope scope); + void visitCatch(CatchNode irCatchNode, Scope scope); + void visitThrow(ThrowNode irThrowNode, Scope scope); + void visitContinue(ContinueNode irContinueNode, Scope scope); + void visitBreak(BreakNode irBreakNode, Scope scope); void visitBinaryImpl(BinaryImplNode irBinaryImplNode, Scope scope); + void visitUnaryMath(UnaryMathNode irUnaryMathNode, Scope scope); + void visitBinaryMath(BinaryMathNode irBinaryMathNode, Scope scope); + void visitStringConcatenation(StringConcatenationNode irStringConcatenationNode, Scope scope); + void visitBoolean(BooleanNode irBooleanNode, Scope scope); + void visitComparison(ComparisonNode irComparisonNode, Scope scope); + void visitCast(CastNode irCastNode, Scope scope); + void visitInstanceof(InstanceofNode irInstanceofNode, Scope scope); + void visitConditional(ConditionalNode irConditionalNode, Scope scope); + void visitElvis(ElvisNode irElvisNode, Scope scope); + void visitListInitialization(ListInitializationNode irListInitializationNode, Scope scope); + void visitMapInitialization(MapInitializationNode irMapInitializationNode, Scope scope); + void visitNewArray(NewArrayNode irNewArrayNode, Scope scope); + void visitNewObject(NewObjectNode irNewObjectNode, Scope scope); + void visitConstant(ConstantNode irConstantNode, Scope scope); + void visitNull(NullNode irNullNode, Scope scope); + void visitDefInterfaceReference(DefInterfaceReferenceNode irDefInterfaceReferenceNode, Scope scope); + void visitTypedInterfaceReference(TypedInterfaceReferenceNode irTypedInterfaceReferenceNode, Scope scope); + void visitTypeCaptureReference(TypedCaptureReferenceNode irTypedCaptureReferenceNode, Scope scope); + void visitStatic(StaticNode irStaticNode, Scope scope); + void visitLoadVariable(LoadVariableNode irLoadVariableNode, Scope scope); + void visitNullSafeSub(NullSafeSubNode irNullSafeSubNode, Scope scope); + void visitLoadDotArrayLengthNode(LoadDotArrayLengthNode irLoadDotArrayLengthNode, Scope scope); + void visitLoadDotDef(LoadDotDefNode irLoadDotDefNode, Scope scope); + void visitLoadDot(LoadDotNode irLoadDotNode, Scope scope); + void visitLoadDotShortcut(LoadDotShortcutNode irDotSubShortcutNode, Scope scope); + void visitLoadListShortcut(LoadListShortcutNode irLoadListShortcutNode, Scope scope); + void visitLoadMapShortcut(LoadMapShortcutNode irLoadMapShortcutNode, Scope scope); + void visitLoadFieldMember(LoadFieldMemberNode irLoadFieldMemberNode, Scope scope); + void visitLoadBraceDef(LoadBraceDefNode irLoadBraceDefNode, Scope scope); + void visitLoadBrace(LoadBraceNode irLoadBraceNode, Scope scope); + void visitStoreVariable(StoreVariableNode irStoreVariableNode, Scope scope); + void visitStoreDotDef(StoreDotDefNode irStoreDotDefNode, Scope scope); + void visitStoreDot(StoreDotNode irStoreDotNode, Scope scope); + void visitStoreDotShortcut(StoreDotShortcutNode irDotSubShortcutNode, Scope scope); + void visitStoreListShortcut(StoreListShortcutNode irStoreListShortcutNode, Scope scope); + void visitStoreMapShortcut(StoreMapShortcutNode irStoreMapShortcutNode, Scope scope); + void visitStoreFieldMember(StoreFieldMemberNode irStoreFieldMemberNode, Scope scope); + void visitStoreBraceDef(StoreBraceDefNode irStoreBraceDefNode, Scope scope); + void visitStoreBrace(StoreBraceNode irStoreBraceNode, Scope scope); + void visitInvokeCallDef(InvokeCallDefNode irInvokeCallDefNode, Scope scope); + void visitInvokeCall(InvokeCallNode irInvokeCallNode, Scope scope); + void visitInvokeCallMember(InvokeCallMemberNode irInvokeCallMemberNode, Scope scope); + void visitFlipArrayIndex(FlipArrayIndexNode irFlipArrayIndexNode, Scope scope); + void visitFlipCollectionIndex(FlipCollectionIndexNode irFlipCollectionIndexNode, Scope scope); + void visitFlipDefIndex(FlipDefIndexNode irFlipDefIndexNode, Scope scope); + void visitDup(DupNode irDupNode, Scope scope); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java index bb836742e4f..04165f44ba2 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticAnalysisPhase.java @@ -73,8 +73,8 @@ public class PainlessSemanticAnalysisPhase extends DefaultSemanticAnalysisPhase if ("execute".equals(functionName)) { ScriptClassInfo scriptClassInfo = scriptScope.getScriptClassInfo(); - LocalFunction localFunction = - scriptScope.getFunctionTable().getFunction(functionName, scriptClassInfo.getExecuteArguments().size()); + LocalFunction localFunction = scriptScope.getFunctionTable() + .getFunction(functionName, scriptClassInfo.getExecuteArguments().size()); List> typeParameters = localFunction.getTypeParameters(); FunctionScope functionScope = newFunctionScope(scriptScope, localFunction.getReturnType()); @@ -95,9 +95,17 @@ public class PainlessSemanticAnalysisPhase extends DefaultSemanticAnalysisPhase SBlock userBlockNode = userFunctionNode.getBlockNode(); if (userBlockNode.getStatementNodes().isEmpty()) { - throw userFunctionNode.createError(new IllegalArgumentException("invalid function definition: " + - "found no statements for function " + - "[" + functionName + "] with [" + typeParameters.size() + "] parameters")); + throw userFunctionNode.createError( + new IllegalArgumentException( + "invalid function definition: " + + "found no statements for function " + + "[" + + functionName + + "] with [" + + typeParameters.size() + + "] parameters" + ) + ); } functionScope.setCondition(userBlockNode, LastSource.class); @@ -142,8 +150,12 @@ public class PainlessSemanticAnalysisPhase extends DefaultSemanticAnalysisPhase semanticScope.putDecoration(userStatementNode, new TargetType(rtnType)); semanticScope.setCondition(userStatementNode, Internal.class); if ("execute".equals(functionName)) { - decorateWithCastForReturn(userStatementNode, userExpressionNode, semanticScope, - semanticScope.getScriptScope().getScriptClassInfo()); + decorateWithCastForReturn( + userStatementNode, + userExpressionNode, + semanticScope, + semanticScope.getScriptScope().getScriptClassInfo() + ); } else { decorateWithCast(userStatementNode, semanticScope); } @@ -167,9 +179,17 @@ public class PainlessSemanticAnalysisPhase extends DefaultSemanticAnalysisPhase if (userValueNode == null) { if (semanticScope.getReturnType() != void.class) { - throw userReturnNode.createError(new ClassCastException("cannot cast from " + - "[" + semanticScope.getReturnCanonicalTypeName() + "] to " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(void.class) + "]")); + throw userReturnNode.createError( + new ClassCastException( + "cannot cast from " + + "[" + + semanticScope.getReturnCanonicalTypeName() + + "] to " + + "[" + + PainlessLookupUtility.typeToCanonicalTypeName(void.class) + + "]" + ) + ); } } else { semanticScope.setCondition(userValueNode, Read.class); @@ -177,8 +197,12 @@ public class PainlessSemanticAnalysisPhase extends DefaultSemanticAnalysisPhase semanticScope.setCondition(userValueNode, Internal.class); checkedVisit(userValueNode, semanticScope); if ("execute".equals(functionName)) { - decorateWithCastForReturn(userValueNode, userReturnNode, semanticScope, - semanticScope.getScriptScope().getScriptClassInfo()); + decorateWithCastForReturn( + userValueNode, + userReturnNode, + semanticScope, + semanticScope.getScriptScope().getScriptClassInfo() + ); } else { decorateWithCast(userValueNode, semanticScope); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticHeaderPhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticHeaderPhase.java index 66ae34068d7..dd1300116b5 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticHeaderPhase.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessSemanticHeaderPhase.java @@ -54,8 +54,9 @@ public class PainlessSemanticHeaderPhase extends DefaultSemanticHeaderPhase { String functionKey = FunctionTable.buildLocalFunctionKey(functionName, scriptClassInfo.getExecuteArguments().size()); if (functionTable.getFunction(functionKey) != null) { - throw userFunctionNode.createError(new IllegalArgumentException("invalid function definition: " + - "found duplicate function [" + functionKey + "].")); + throw userFunctionNode.createError( + new IllegalArgumentException("invalid function definition: " + "found duplicate function [" + functionKey + "].") + ); } Class returnType = scriptClassInfo.getExecuteMethodReturnType(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessUserTreeToIRTreePhase.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessUserTreeToIRTreePhase.java index ecea866c9e4..a25b9601c0f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessUserTreeToIRTreePhase.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/PainlessUserTreeToIRTreePhase.java @@ -90,12 +90,12 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase // the main "execute" block with several exceptions. if ("execute".equals(functionName)) { ScriptClassInfo scriptClassInfo = scriptScope.getScriptClassInfo(); - LocalFunction localFunction = - scriptScope.getFunctionTable().getFunction(functionName, scriptClassInfo.getExecuteArguments().size()); + LocalFunction localFunction = scriptScope.getFunctionTable() + .getFunction(functionName, scriptClassInfo.getExecuteArguments().size()); Class returnType = localFunction.getReturnType(); boolean methodEscape = scriptScope.getCondition(userFunctionNode, MethodEscape.class); - BlockNode irBlockNode = (BlockNode)visit(userFunctionNode.getBlockNode(), scriptScope); + BlockNode irBlockNode = (BlockNode) visit(userFunctionNode.getBlockNode(), scriptScope); if (methodEscape == false) { ExpressionNode irExpressionNode; @@ -110,19 +110,19 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase if (returnType == boolean.class) { irConstantNode.setConstant(false); } else if (returnType == byte.class - || returnType == char.class - || returnType == short.class - || returnType == int.class) { - irConstantNode.setConstant(0); - } else if (returnType == long.class) { - irConstantNode.setConstant(0L); - } else if (returnType == float.class) { - irConstantNode.setConstant(0f); - } else if (returnType == double.class) { - irConstantNode.setConstant(0d); - } else { - throw userFunctionNode.createError(new IllegalStateException("illegal tree structure")); - } + || returnType == char.class + || returnType == short.class + || returnType == int.class) { + irConstantNode.setConstant(0); + } else if (returnType == long.class) { + irConstantNode.setConstant(0L); + } else if (returnType == float.class) { + irConstantNode.setConstant(0f); + } else if (returnType == double.class) { + irConstantNode.setConstant(0d); + } else { + throw userFunctionNode.createError(new IllegalStateException("illegal tree structure")); + } irExpressionNode = irConstantNode; } else { @@ -292,8 +292,9 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase InvokeCallMemberNode irInvokeCallMemberNode = new InvokeCallMemberNode(internalLocation); irInvokeCallMemberNode.setExpressionType(irDeclarationNode.getDeclarationType()); - irInvokeCallMemberNode.setLocalFunction(new LocalFunction( - getMethod.getName(), returnType, Collections.emptyList(), true, false)); + irInvokeCallMemberNode.setLocalFunction( + new LocalFunction(getMethod.getName(), returnType, Collections.emptyList(), true, false) + ); irDeclarationNode.setExpressionNode(irInvokeCallMemberNode); } } @@ -338,11 +339,11 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase // decorate the execute method with nodes to wrap the user statements with // the sandboxed errors as follows: // } catch (PainlessExplainError e) { - // throw this.convertToScriptException(e, e.getHeaders($DEFINITION)) + // throw this.convertToScriptException(e, e.getHeaders($DEFINITION)) // } // and // } catch (PainlessError | BootstrapMethodError | OutOfMemoryError | StackOverflowError | Exception e) { - // throw this.convertToScriptException(e, e.getHeaders()) + // throw this.convertToScriptException(e, e.getHeaders()) // } protected void injectSandboxExceptions(FunctionNode irFunctionNode) { try { @@ -370,13 +371,7 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase InvokeCallMemberNode irInvokeCallMemberNode = new InvokeCallMemberNode(internalLocation); irInvokeCallMemberNode.setExpressionType(ScriptException.class); irInvokeCallMemberNode.setLocalFunction( - new LocalFunction( - "convertToScriptException", - ScriptException.class, - Arrays.asList(Throwable.class, Map.class), - true, - false - ) + new LocalFunction("convertToScriptException", ScriptException.class, Arrays.asList(Throwable.class, Map.class), true, false) ); irThrowNode.setExpressionNode(irInvokeCallMemberNode); @@ -402,17 +397,15 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase irInvokeCallNode.setExpressionType(Map.class); irInvokeCallNode.setBox(PainlessExplainError.class); irInvokeCallNode.setMethod( - new PainlessMethod( - PainlessExplainError.class.getMethod( - "getHeaders", - PainlessLookup.class), - PainlessExplainError.class, - null, - Collections.emptyList(), - null, - null, - null - ) + new PainlessMethod( + PainlessExplainError.class.getMethod("getHeaders", PainlessLookup.class), + PainlessExplainError.class, + null, + Collections.emptyList(), + null, + null, + null + ) ); irBinaryImplNode.setRightNode(irInvokeCallNode); @@ -425,7 +418,11 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase irInvokeCallNode.addArgumentNode(irLoadFieldMemberNode); for (Class throwable : new Class[] { - PainlessError.class, BootstrapMethodError.class, OutOfMemoryError.class, StackOverflowError.class, Exception.class}) { + PainlessError.class, + BootstrapMethodError.class, + OutOfMemoryError.class, + StackOverflowError.class, + Exception.class }) { String name = throwable.getSimpleName(); name = "#" + Character.toLowerCase(name.charAt(0)) + name.substring(1); @@ -448,13 +445,13 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase irInvokeCallMemberNode = new InvokeCallMemberNode(internalLocation); irInvokeCallMemberNode.setExpressionType(ScriptException.class); irInvokeCallMemberNode.setLocalFunction( - new LocalFunction( - "convertToScriptException", - ScriptException.class, - Arrays.asList(Throwable.class, Map.class), - true, - false - ) + new LocalFunction( + "convertToScriptException", + ScriptException.class, + Arrays.asList(Throwable.class, Map.class), + true, + false + ) ); irThrowNode.setExpressionNode(irInvokeCallMemberNode); @@ -479,15 +476,15 @@ public class PainlessUserTreeToIRTreePhase extends DefaultUserTreeToIRTreePhase irInvokeCallNode.setExpressionType(Map.class); irInvokeCallNode.setBox(Collections.class); irInvokeCallNode.setMethod( - new PainlessMethod( - Collections.class.getMethod("emptyMap"), - Collections.class, - null, - Collections.emptyList(), - null, - null, - null - ) + new PainlessMethod( + Collections.class.getMethod("emptyMap"), + Collections.class, + null, + Collections.emptyList(), + null, + null, + null + ) ); irBinaryImplNode.setRightNode(irInvokeCallNode); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/UserTreeVisitor.java b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/UserTreeVisitor.java index 5081cdee0b1..a945364ee35 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/phase/UserTreeVisitor.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/phase/UserTreeVisitor.java @@ -81,50 +81,92 @@ import org.opensearch.painless.node.SWhile; public interface UserTreeVisitor { void visitClass(SClass userClassNode, Scope scope); + void visitFunction(SFunction userFunctionNode, Scope scope); void visitBlock(SBlock userBlockNode, Scope scope); + void visitIf(SIf userIfNode, Scope scope); + void visitIfElse(SIfElse userIfElseNode, Scope scope); + void visitWhile(SWhile userWhileNode, Scope scope); + void visitDo(SDo userDoNode, Scope scope); + void visitFor(SFor userForNode, Scope scope); + void visitEach(SEach userEachNode, Scope scope); + void visitDeclBlock(SDeclBlock userDeclBlockNode, Scope scope); + void visitDeclaration(SDeclaration userDeclarationNode, Scope scope); + void visitReturn(SReturn userReturnNode, Scope scope); + void visitExpression(SExpression userExpressionNode, Scope scope); + void visitTry(STry userTryNode, Scope scope); + void visitCatch(SCatch userCatchNode, Scope scope); + void visitThrow(SThrow userThrowNode, Scope scope); + void visitContinue(SContinue userContinueNode, Scope scope); + void visitBreak(SBreak userBreakNode, Scope scope); void visitAssignment(EAssignment userAssignmentNode, Scope scope); + void visitUnary(EUnary userUnaryNode, Scope scope); + void visitBinary(EBinary userBinaryNode, Scope scope); + void visitBooleanComp(EBooleanComp userBooleanCompNode, Scope scope); + void visitComp(EComp userCompNode, Scope scope); + void visitExplicit(EExplicit userExplicitNode, Scope scope); + void visitInstanceof(EInstanceof userInstanceofNode, Scope scope); + void visitConditional(EConditional userConditionalNode, Scope scope); + void visitElvis(EElvis userElvisNode, Scope scope); + void visitListInit(EListInit userListInitNode, Scope scope); + void visitMapInit(EMapInit userMapInitNode, Scope scope); + void visitNewArray(ENewArray userNewArrayNode, Scope scope); + void visitNewObj(ENewObj userNewObjectNode, Scope scope); + void visitCallLocal(ECallLocal userCallLocalNode, Scope scope); + void visitBooleanConstant(EBooleanConstant userBooleanConstantNode, Scope scope); + void visitNumeric(ENumeric userNumericNode, Scope scope); + void visitDecimal(EDecimal userDecimalNode, Scope scope); + void visitString(EString userStringNode, Scope scope); + void visitNull(ENull userNullNode, Scope scope); + void visitRegex(ERegex userRegexNode, Scope scope); + void visitLambda(ELambda userLambdaNode, Scope scope); + void visitFunctionRef(EFunctionRef userFunctionRefNode, Scope scope); + void visitNewArrayFunctionRef(ENewArrayFunctionRef userNewArrayFunctionRefNode, Scope scope); + void visitSymbol(ESymbol userSymbolNode, Scope scope); + void visitDot(EDot userDotNode, Scope scope); + void visitBrace(EBrace userBraceNode, Scope scope); + void visitCall(ECall userCallNode, Scope scope); } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorations.java b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorations.java index 654e1518cce..f8db5732b05 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorations.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorations.java @@ -103,7 +103,7 @@ public class Decorations { } - public static class TargetType implements Decoration { + public static class TargetType implements Decoration { private final Class targetType; @@ -616,6 +616,7 @@ public class Decorations { public static class Converter implements Decoration { private final LocalFunction converter; + public Converter(LocalFunction converter) { this.converter = converter; } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorator.java b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorator.java index 6595d9715b3..4d0ba8c20c3 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorator.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/Decorator.java @@ -63,7 +63,7 @@ public class Decorator { @SuppressWarnings("unchecked") public T put(int identifier, T decoration) { - return (T)decorations.get(identifier).put(decoration.getClass(), decoration); + return (T) decorations.get(identifier).put(decoration.getClass(), decoration); } public T remove(int identifier, Class type) { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/FunctionTable.java b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/FunctionTable.java index 33933cb5ece..58c4143a13f 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/FunctionTable.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/FunctionTable.java @@ -61,7 +61,12 @@ public class FunctionTable { protected final Method asmMethod; public LocalFunction( - String functionName, Class returnType, List> typeParameters, boolean isInternal, boolean isStatic) { + String functionName, + Class returnType, + List> typeParameters, + boolean isInternal, + boolean isStatic + ) { this.functionName = Objects.requireNonNull(functionName); this.returnType = Objects.requireNonNull(returnType); @@ -73,8 +78,10 @@ public class FunctionTable { Class[] javaTypeParameters = typeParameters.stream().map(PainlessLookupUtility::typeToJavaType).toArray(Class[]::new); this.methodType = MethodType.methodType(javaReturnType, javaTypeParameters); - this.asmMethod = new org.objectweb.asm.commons.Method(functionName, - MethodType.methodType(javaReturnType, javaTypeParameters).toMethodDescriptorString()); + this.asmMethod = new org.objectweb.asm.commons.Method( + functionName, + MethodType.methodType(javaReturnType, javaTypeParameters).toMethodDescriptorString() + ); } public String getFunctionName() { @@ -119,7 +126,12 @@ public class FunctionTable { protected Map localFunctions = new HashMap<>(); public LocalFunction addFunction( - String functionName, Class returnType, List> typeParameters, boolean isInternal, boolean isStatic) { + String functionName, + Class returnType, + List> typeParameters, + boolean isInternal, + boolean isStatic + ) { String functionKey = buildLocalFunctionKey(functionName, typeParameters.size()); LocalFunction function = new LocalFunction(functionName, returnType, typeParameters, isInternal, isStatic); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/ScriptScope.java b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/ScriptScope.java index 8c515f526f4..03b43eff2f4 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/ScriptScope.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/ScriptScope.java @@ -64,8 +64,14 @@ public class ScriptScope extends Decorator { protected Set usedVariables = Collections.emptySet(); protected Map staticConstants = new HashMap<>(); - public ScriptScope(PainlessLookup painlessLookup, CompilerSettings compilerSettings, - ScriptClassInfo scriptClassInfo, String scriptName, String scriptSource, int nodeCount) { + public ScriptScope( + PainlessLookup painlessLookup, + CompilerSettings compilerSettings, + ScriptClassInfo scriptClassInfo, + String scriptName, + String scriptSource, + int nodeCount + ) { super(nodeCount); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java index c5d2c4f86e7..e27530d745e 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/symbol/SemanticScope.java @@ -348,6 +348,7 @@ public abstract class SemanticScope { } public abstract Class getReturnType(); + public abstract String getReturnCanonicalTypeName(); public Variable defineVariable(Location location, Class type, String name, boolean isReadOnly) { @@ -362,6 +363,7 @@ public abstract class SemanticScope { } public abstract boolean isVariableDefined(String name); + public abstract Variable getVariable(Location location, String name); public Variable defineInternalVariable(Location location, Class type, String name, boolean isReadOnly) { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/AdditionTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/AdditionTests.java index 6f20554c13b..d73f382c076 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/AdditionTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/AdditionTests.java @@ -33,7 +33,7 @@ package org.opensearch.painless; /** Tests for addition operator across all types */ -//TODO: NaN/Inf/overflow/... +// TODO: NaN/Inf/overflow/... public class AdditionTests extends ScriptTestCase { public void testBasics() throws Exception { @@ -41,172 +41,172 @@ public class AdditionTests extends ScriptTestCase { } public void testInt() throws Exception { - assertEquals(1+1, exec("int x = 1; int y = 1; return x+y;")); - assertEquals(1+2, exec("int x = 1; int y = 2; return x+y;")); - assertEquals(5+10, exec("int x = 5; int y = 10; return x+y;")); - assertEquals(1+1+2, exec("int x = 1; int y = 1; int z = 2; return x+y+z;")); - assertEquals((1+1)+2, exec("int x = 1; int y = 1; int z = 2; return (x+y)+z;")); - assertEquals(1+(1+2), exec("int x = 1; int y = 1; int z = 2; return x+(y+z);")); - assertEquals(0+1, exec("int x = 0; int y = 1; return x+y;")); - assertEquals(1+0, exec("int x = 1; int y = 0; return x+y;")); - assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); - assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); + assertEquals(1 + 1, exec("int x = 1; int y = 1; return x+y;")); + assertEquals(1 + 2, exec("int x = 1; int y = 2; return x+y;")); + assertEquals(5 + 10, exec("int x = 5; int y = 10; return x+y;")); + assertEquals(1 + 1 + 2, exec("int x = 1; int y = 1; int z = 2; return x+y+z;")); + assertEquals((1 + 1) + 2, exec("int x = 1; int y = 1; int z = 2; return (x+y)+z;")); + assertEquals(1 + (1 + 2), exec("int x = 1; int y = 1; int z = 2; return x+(y+z);")); + assertEquals(0 + 1, exec("int x = 0; int y = 1; return x+y;")); + assertEquals(1 + 0, exec("int x = 1; int y = 0; return x+y;")); + assertEquals(0 + 0, exec("int x = 0; int y = 0; return x+y;")); + assertEquals(0 + 0, exec("int x = 0; int y = 0; return x+y;")); } public void testIntConst() throws Exception { - assertEquals(1+1, exec("return 1+1;")); - assertEquals(1+2, exec("return 1+2;")); - assertEquals(5+10, exec("return 5+10;")); - assertEquals(1+1+2, exec("return 1+1+2;")); - assertEquals((1+1)+2, exec("return (1+1)+2;")); - assertEquals(1+(1+2), exec("return 1+(1+2);")); - assertEquals(0+1, exec("return 0+1;")); - assertEquals(1+0, exec("return 1+0;")); - assertEquals(0+0, exec("return 0+0;")); + assertEquals(1 + 1, exec("return 1+1;")); + assertEquals(1 + 2, exec("return 1+2;")); + assertEquals(5 + 10, exec("return 5+10;")); + assertEquals(1 + 1 + 2, exec("return 1+1+2;")); + assertEquals((1 + 1) + 2, exec("return (1+1)+2;")); + assertEquals(1 + (1 + 2), exec("return 1+(1+2);")); + assertEquals(0 + 1, exec("return 0+1;")); + assertEquals(1 + 0, exec("return 1+0;")); + assertEquals(0 + 0, exec("return 0+0;")); } public void testByte() throws Exception { - assertEquals((byte)1+(byte)1, exec("byte x = 1; byte y = 1; return x+y;")); - assertEquals((byte)1+(byte)2, exec("byte x = 1; byte y = 2; return x+y;")); - assertEquals((byte)5+(byte)10, exec("byte x = 5; byte y = 10; return x+y;")); - assertEquals((byte)1+(byte)1+(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return x+y+z;")); - assertEquals(((byte)1+(byte)1)+(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return (x+y)+z;")); - assertEquals((byte)1+((byte)1+(byte)2), exec("byte x = 1; byte y = 1; byte z = 2; return x+(y+z);")); - assertEquals((byte)0+(byte)1, exec("byte x = 0; byte y = 1; return x+y;")); - assertEquals((byte)1+(byte)0, exec("byte x = 1; byte y = 0; return x+y;")); - assertEquals((byte)0+(byte)0, exec("byte x = 0; byte y = 0; return x+y;")); + assertEquals((byte) 1 + (byte) 1, exec("byte x = 1; byte y = 1; return x+y;")); + assertEquals((byte) 1 + (byte) 2, exec("byte x = 1; byte y = 2; return x+y;")); + assertEquals((byte) 5 + (byte) 10, exec("byte x = 5; byte y = 10; return x+y;")); + assertEquals((byte) 1 + (byte) 1 + (byte) 2, exec("byte x = 1; byte y = 1; byte z = 2; return x+y+z;")); + assertEquals(((byte) 1 + (byte) 1) + (byte) 2, exec("byte x = 1; byte y = 1; byte z = 2; return (x+y)+z;")); + assertEquals((byte) 1 + ((byte) 1 + (byte) 2), exec("byte x = 1; byte y = 1; byte z = 2; return x+(y+z);")); + assertEquals((byte) 0 + (byte) 1, exec("byte x = 0; byte y = 1; return x+y;")); + assertEquals((byte) 1 + (byte) 0, exec("byte x = 1; byte y = 0; return x+y;")); + assertEquals((byte) 0 + (byte) 0, exec("byte x = 0; byte y = 0; return x+y;")); } public void testByteConst() throws Exception { - assertEquals((byte)1+(byte)1, exec("return (byte)1+(byte)1;")); - assertEquals((byte)1+(byte)2, exec("return (byte)1+(byte)2;")); - assertEquals((byte)5+(byte)10, exec("return (byte)5+(byte)10;")); - assertEquals((byte)1+(byte)1+(byte)2, exec("return (byte)1+(byte)1+(byte)2;")); - assertEquals(((byte)1+(byte)1)+(byte)2, exec("return ((byte)1+(byte)1)+(byte)2;")); - assertEquals((byte)1+((byte)1+(byte)2), exec("return (byte)1+((byte)1+(byte)2);")); - assertEquals((byte)0+(byte)1, exec("return (byte)0+(byte)1;")); - assertEquals((byte)1+(byte)0, exec("return (byte)1+(byte)0;")); - assertEquals((byte)0+(byte)0, exec("return (byte)0+(byte)0;")); + assertEquals((byte) 1 + (byte) 1, exec("return (byte)1+(byte)1;")); + assertEquals((byte) 1 + (byte) 2, exec("return (byte)1+(byte)2;")); + assertEquals((byte) 5 + (byte) 10, exec("return (byte)5+(byte)10;")); + assertEquals((byte) 1 + (byte) 1 + (byte) 2, exec("return (byte)1+(byte)1+(byte)2;")); + assertEquals(((byte) 1 + (byte) 1) + (byte) 2, exec("return ((byte)1+(byte)1)+(byte)2;")); + assertEquals((byte) 1 + ((byte) 1 + (byte) 2), exec("return (byte)1+((byte)1+(byte)2);")); + assertEquals((byte) 0 + (byte) 1, exec("return (byte)0+(byte)1;")); + assertEquals((byte) 1 + (byte) 0, exec("return (byte)1+(byte)0;")); + assertEquals((byte) 0 + (byte) 0, exec("return (byte)0+(byte)0;")); } public void testChar() throws Exception { - assertEquals((char)1+(char)1, exec("char x = 1; char y = 1; return x+y;")); - assertEquals((char)1+(char)2, exec("char x = 1; char y = 2; return x+y;")); - assertEquals((char)5+(char)10, exec("char x = 5; char y = 10; return x+y;")); - assertEquals((char)1+(char)1+(char)2, exec("char x = 1; char y = 1; char z = 2; return x+y+z;")); - assertEquals(((char)1+(char)1)+(char)2, exec("char x = 1; char y = 1; char z = 2; return (x+y)+z;")); - assertEquals((char)1+((char)1+(char)2), exec("char x = 1; char y = 1; char z = 2; return x+(y+z);")); - assertEquals((char)0+(char)1, exec("char x = 0; char y = 1; return x+y;")); - assertEquals((char)1+(char)0, exec("char x = 1; char y = 0; return x+y;")); - assertEquals((char)0+(char)0, exec("char x = 0; char y = 0; return x+y;")); + assertEquals((char) 1 + (char) 1, exec("char x = 1; char y = 1; return x+y;")); + assertEquals((char) 1 + (char) 2, exec("char x = 1; char y = 2; return x+y;")); + assertEquals((char) 5 + (char) 10, exec("char x = 5; char y = 10; return x+y;")); + assertEquals((char) 1 + (char) 1 + (char) 2, exec("char x = 1; char y = 1; char z = 2; return x+y+z;")); + assertEquals(((char) 1 + (char) 1) + (char) 2, exec("char x = 1; char y = 1; char z = 2; return (x+y)+z;")); + assertEquals((char) 1 + ((char) 1 + (char) 2), exec("char x = 1; char y = 1; char z = 2; return x+(y+z);")); + assertEquals((char) 0 + (char) 1, exec("char x = 0; char y = 1; return x+y;")); + assertEquals((char) 1 + (char) 0, exec("char x = 1; char y = 0; return x+y;")); + assertEquals((char) 0 + (char) 0, exec("char x = 0; char y = 0; return x+y;")); } public void testCharConst() throws Exception { - assertEquals((char)1+(char)1, exec("return (char)1+(char)1;")); - assertEquals((char)1+(char)2, exec("return (char)1+(char)2;")); - assertEquals((char)5+(char)10, exec("return (char)5+(char)10;")); - assertEquals((char)1+(char)1+(char)2, exec("return (char)1+(char)1+(char)2;")); - assertEquals(((char)1+(char)1)+(char)2, exec("return ((char)1+(char)1)+(char)2;")); - assertEquals((char)1+((char)1+(char)2), exec("return (char)1+((char)1+(char)2);")); - assertEquals((char)0+(char)1, exec("return (char)0+(char)1;")); - assertEquals((char)1+(char)0, exec("return (char)1+(char)0;")); - assertEquals((char)0+(char)0, exec("return (char)0+(char)0;")); + assertEquals((char) 1 + (char) 1, exec("return (char)1+(char)1;")); + assertEquals((char) 1 + (char) 2, exec("return (char)1+(char)2;")); + assertEquals((char) 5 + (char) 10, exec("return (char)5+(char)10;")); + assertEquals((char) 1 + (char) 1 + (char) 2, exec("return (char)1+(char)1+(char)2;")); + assertEquals(((char) 1 + (char) 1) + (char) 2, exec("return ((char)1+(char)1)+(char)2;")); + assertEquals((char) 1 + ((char) 1 + (char) 2), exec("return (char)1+((char)1+(char)2);")); + assertEquals((char) 0 + (char) 1, exec("return (char)0+(char)1;")); + assertEquals((char) 1 + (char) 0, exec("return (char)1+(char)0;")); + assertEquals((char) 0 + (char) 0, exec("return (char)0+(char)0;")); } public void testShort() throws Exception { - assertEquals((short)1+(short)1, exec("short x = 1; short y = 1; return x+y;")); - assertEquals((short)1+(short)2, exec("short x = 1; short y = 2; return x+y;")); - assertEquals((short)5+(short)10, exec("short x = 5; short y = 10; return x+y;")); - assertEquals((short)1+(short)1+(short)2, exec("short x = 1; short y = 1; short z = 2; return x+y+z;")); - assertEquals(((short)1+(short)1)+(short)2, exec("short x = 1; short y = 1; short z = 2; return (x+y)+z;")); - assertEquals((short)1+((short)1+(short)2), exec("short x = 1; short y = 1; short z = 2; return x+(y+z);")); - assertEquals((short)0+(short)1, exec("short x = 0; short y = 1; return x+y;")); - assertEquals((short)1+(short)0, exec("short x = 1; short y = 0; return x+y;")); - assertEquals((short)0+(short)0, exec("short x = 0; short y = 0; return x+y;")); + assertEquals((short) 1 + (short) 1, exec("short x = 1; short y = 1; return x+y;")); + assertEquals((short) 1 + (short) 2, exec("short x = 1; short y = 2; return x+y;")); + assertEquals((short) 5 + (short) 10, exec("short x = 5; short y = 10; return x+y;")); + assertEquals((short) 1 + (short) 1 + (short) 2, exec("short x = 1; short y = 1; short z = 2; return x+y+z;")); + assertEquals(((short) 1 + (short) 1) + (short) 2, exec("short x = 1; short y = 1; short z = 2; return (x+y)+z;")); + assertEquals((short) 1 + ((short) 1 + (short) 2), exec("short x = 1; short y = 1; short z = 2; return x+(y+z);")); + assertEquals((short) 0 + (short) 1, exec("short x = 0; short y = 1; return x+y;")); + assertEquals((short) 1 + (short) 0, exec("short x = 1; short y = 0; return x+y;")); + assertEquals((short) 0 + (short) 0, exec("short x = 0; short y = 0; return x+y;")); } public void testShortConst() throws Exception { - assertEquals((short)1+(short)1, exec("return (short)1+(short)1;")); - assertEquals((short)1+(short)2, exec("return (short)1+(short)2;")); - assertEquals((short)5+(short)10, exec("return (short)5+(short)10;")); - assertEquals((short)1+(short)1+(short)2, exec("return (short)1+(short)1+(short)2;")); - assertEquals(((short)1+(short)1)+(short)2, exec("return ((short)1+(short)1)+(short)2;")); - assertEquals((short)1+((short)1+(short)2), exec("return (short)1+((short)1+(short)2);")); - assertEquals((short)0+(short)1, exec("return (short)0+(short)1;")); - assertEquals((short)1+(short)0, exec("return (short)1+(short)0;")); - assertEquals((short)0+(short)0, exec("return (short)0+(short)0;")); + assertEquals((short) 1 + (short) 1, exec("return (short)1+(short)1;")); + assertEquals((short) 1 + (short) 2, exec("return (short)1+(short)2;")); + assertEquals((short) 5 + (short) 10, exec("return (short)5+(short)10;")); + assertEquals((short) 1 + (short) 1 + (short) 2, exec("return (short)1+(short)1+(short)2;")); + assertEquals(((short) 1 + (short) 1) + (short) 2, exec("return ((short)1+(short)1)+(short)2;")); + assertEquals((short) 1 + ((short) 1 + (short) 2), exec("return (short)1+((short)1+(short)2);")); + assertEquals((short) 0 + (short) 1, exec("return (short)0+(short)1;")); + assertEquals((short) 1 + (short) 0, exec("return (short)1+(short)0;")); + assertEquals((short) 0 + (short) 0, exec("return (short)0+(short)0;")); } public void testLong() throws Exception { - assertEquals(1L+1L, exec("long x = 1; long y = 1; return x+y;")); - assertEquals(1L+2L, exec("long x = 1; long y = 2; return x+y;")); - assertEquals(5L+10L, exec("long x = 5; long y = 10; return x+y;")); - assertEquals(1L+1L+2L, exec("long x = 1; long y = 1; long z = 2; return x+y+z;")); - assertEquals((1L+1L)+2L, exec("long x = 1; long y = 1; long z = 2; return (x+y)+z;")); - assertEquals(1L+(1L+2L), exec("long x = 1; long y = 1; long z = 2; return x+(y+z);")); - assertEquals(0L+1L, exec("long x = 0; long y = 1; return x+y;")); - assertEquals(1L+0L, exec("long x = 1; long y = 0; return x+y;")); - assertEquals(0L+0L, exec("long x = 0; long y = 0; return x+y;")); + assertEquals(1L + 1L, exec("long x = 1; long y = 1; return x+y;")); + assertEquals(1L + 2L, exec("long x = 1; long y = 2; return x+y;")); + assertEquals(5L + 10L, exec("long x = 5; long y = 10; return x+y;")); + assertEquals(1L + 1L + 2L, exec("long x = 1; long y = 1; long z = 2; return x+y+z;")); + assertEquals((1L + 1L) + 2L, exec("long x = 1; long y = 1; long z = 2; return (x+y)+z;")); + assertEquals(1L + (1L + 2L), exec("long x = 1; long y = 1; long z = 2; return x+(y+z);")); + assertEquals(0L + 1L, exec("long x = 0; long y = 1; return x+y;")); + assertEquals(1L + 0L, exec("long x = 1; long y = 0; return x+y;")); + assertEquals(0L + 0L, exec("long x = 0; long y = 0; return x+y;")); } public void testLongConst() throws Exception { - assertEquals(1L+1L, exec("return 1L+1L;")); - assertEquals(1L+2L, exec("return 1L+2L;")); - assertEquals(5L+10L, exec("return 5L+10L;")); - assertEquals(1L+1L+2L, exec("return 1L+1L+2L;")); - assertEquals((1L+1L)+2L, exec("return (1L+1L)+2L;")); - assertEquals(1L+(1L+2L), exec("return 1L+(1L+2L);")); - assertEquals(0L+1L, exec("return 0L+1L;")); - assertEquals(1L+0L, exec("return 1L+0L;")); - assertEquals(0L+0L, exec("return 0L+0L;")); + assertEquals(1L + 1L, exec("return 1L+1L;")); + assertEquals(1L + 2L, exec("return 1L+2L;")); + assertEquals(5L + 10L, exec("return 5L+10L;")); + assertEquals(1L + 1L + 2L, exec("return 1L+1L+2L;")); + assertEquals((1L + 1L) + 2L, exec("return (1L+1L)+2L;")); + assertEquals(1L + (1L + 2L), exec("return 1L+(1L+2L);")); + assertEquals(0L + 1L, exec("return 0L+1L;")); + assertEquals(1L + 0L, exec("return 1L+0L;")); + assertEquals(0L + 0L, exec("return 0L+0L;")); } public void testFloat() throws Exception { - assertEquals(1F+1F, exec("float x = 1F; float y = 1F; return x+y;")); - assertEquals(1F+2F, exec("float x = 1F; float y = 2F; return x+y;")); - assertEquals(5F+10F, exec("float x = 5F; float y = 10F; return x+y;")); - assertEquals(1F+1F+2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+y+z;")); - assertEquals((1F+1F)+2F, exec("float x = 1F; float y = 1F; float z = 2F; return (x+y)+z;")); - assertEquals((1F+1F)+2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+(y+z);")); - assertEquals(0F+1F, exec("float x = 0F; float y = 1F; return x+y;")); - assertEquals(1F+0F, exec("float x = 1F; float y = 0F; return x+y;")); - assertEquals(0F+0F, exec("float x = 0F; float y = 0F; return x+y;")); + assertEquals(1F + 1F, exec("float x = 1F; float y = 1F; return x+y;")); + assertEquals(1F + 2F, exec("float x = 1F; float y = 2F; return x+y;")); + assertEquals(5F + 10F, exec("float x = 5F; float y = 10F; return x+y;")); + assertEquals(1F + 1F + 2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+y+z;")); + assertEquals((1F + 1F) + 2F, exec("float x = 1F; float y = 1F; float z = 2F; return (x+y)+z;")); + assertEquals((1F + 1F) + 2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+(y+z);")); + assertEquals(0F + 1F, exec("float x = 0F; float y = 1F; return x+y;")); + assertEquals(1F + 0F, exec("float x = 1F; float y = 0F; return x+y;")); + assertEquals(0F + 0F, exec("float x = 0F; float y = 0F; return x+y;")); } public void testFloatConst() throws Exception { - assertEquals(1F+1F, exec("return 1F+1F;")); - assertEquals(1F+2F, exec("return 1F+2F;")); - assertEquals(5F+10F, exec("return 5F+10F;")); - assertEquals(1F+1F+2F, exec("return 1F+1F+2F;")); - assertEquals((1F+1F)+2F, exec("return (1F+1F)+2F;")); - assertEquals(1F+(1F+2F), exec("return 1F+(1F+2F);")); - assertEquals(0F+1F, exec("return 0F+1F;")); - assertEquals(1F+0F, exec("return 1F+0F;")); - assertEquals(0F+0F, exec("return 0F+0F;")); + assertEquals(1F + 1F, exec("return 1F+1F;")); + assertEquals(1F + 2F, exec("return 1F+2F;")); + assertEquals(5F + 10F, exec("return 5F+10F;")); + assertEquals(1F + 1F + 2F, exec("return 1F+1F+2F;")); + assertEquals((1F + 1F) + 2F, exec("return (1F+1F)+2F;")); + assertEquals(1F + (1F + 2F), exec("return 1F+(1F+2F);")); + assertEquals(0F + 1F, exec("return 0F+1F;")); + assertEquals(1F + 0F, exec("return 1F+0F;")); + assertEquals(0F + 0F, exec("return 0F+0F;")); } public void testDouble() throws Exception { - assertEquals(1.0+1.0, exec("double x = 1.0; double y = 1.0; return x+y;")); - assertEquals(1.0+2.0, exec("double x = 1.0; double y = 2.0; return x+y;")); - assertEquals(5.0+10.0, exec("double x = 5.0; double y = 10.0; return x+y;")); - assertEquals(1.0+1.0+2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+y+z;")); - assertEquals((1.0+1.0)+2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return (x+y)+z;")); - assertEquals(1.0+(1.0+2.0), exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+(y+z);")); - assertEquals(0.0+1.0, exec("double x = 0.0; double y = 1.0; return x+y;")); - assertEquals(1.0+0.0, exec("double x = 1.0; double y = 0.0; return x+y;")); - assertEquals(0.0+0.0, exec("double x = 0.0; double y = 0.0; return x+y;")); + assertEquals(1.0 + 1.0, exec("double x = 1.0; double y = 1.0; return x+y;")); + assertEquals(1.0 + 2.0, exec("double x = 1.0; double y = 2.0; return x+y;")); + assertEquals(5.0 + 10.0, exec("double x = 5.0; double y = 10.0; return x+y;")); + assertEquals(1.0 + 1.0 + 2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+y+z;")); + assertEquals((1.0 + 1.0) + 2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return (x+y)+z;")); + assertEquals(1.0 + (1.0 + 2.0), exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+(y+z);")); + assertEquals(0.0 + 1.0, exec("double x = 0.0; double y = 1.0; return x+y;")); + assertEquals(1.0 + 0.0, exec("double x = 1.0; double y = 0.0; return x+y;")); + assertEquals(0.0 + 0.0, exec("double x = 0.0; double y = 0.0; return x+y;")); } public void testDoubleConst() throws Exception { - assertEquals(1.0+1.0, exec("return 1.0+1.0;")); - assertEquals(1.0+2.0, exec("return 1.0+2.0;")); - assertEquals(5.0+10.0, exec("return 5.0+10.0;")); - assertEquals(1.0+1.0+2.0, exec("return 1.0+1.0+2.0;")); - assertEquals((1.0+1.0)+2.0, exec("return (1.0+1.0)+2.0;")); - assertEquals(1.0+(1.0+2.0), exec("return 1.0+(1.0+2.0);")); - assertEquals(0.0+1.0, exec("return 0.0+1.0;")); - assertEquals(1.0+0.0, exec("return 1.0+0.0;")); - assertEquals(0.0+0.0, exec("return 0.0+0.0;")); + assertEquals(1.0 + 1.0, exec("return 1.0+1.0;")); + assertEquals(1.0 + 2.0, exec("return 1.0+2.0;")); + assertEquals(5.0 + 10.0, exec("return 5.0+10.0;")); + assertEquals(1.0 + 1.0 + 2.0, exec("return 1.0+1.0+2.0;")); + assertEquals((1.0 + 1.0) + 2.0, exec("return (1.0+1.0)+2.0;")); + assertEquals(1.0 + (1.0 + 2.0), exec("return 1.0+(1.0+2.0);")); + assertEquals(0.0 + 1.0, exec("return 0.0+1.0;")); + assertEquals(1.0 + 0.0, exec("return 1.0+0.0;")); + assertEquals(0.0 + 0.0, exec("return 0.0+0.0;")); } public void testDef() { @@ -384,15 +384,9 @@ public class AdditionTests extends ScriptTestCase { } public void testDefNulls() { - expectScriptThrows(NullPointerException.class, () -> { - exec("def x = null; int y = 1; return x + y"); - }); - expectScriptThrows(NullPointerException.class, () -> { - exec("int x = 1; def y = null; return x + y"); - }); - expectScriptThrows(NullPointerException.class, () -> { - exec("def x = null; def y = 1; return x + y"); - }); + expectScriptThrows(NullPointerException.class, () -> { exec("def x = null; int y = 1; return x + y"); }); + expectScriptThrows(NullPointerException.class, () -> { exec("int x = 1; def y = null; return x + y"); }); + expectScriptThrows(NullPointerException.class, () -> { exec("def x = null; def y = 1; return x + y"); }); } public void testCompoundAssignment() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/AnalyzerCasterTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/AnalyzerCasterTests.java index 47b03fd6453..6efc78579d8 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/AnalyzerCasterTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/AnalyzerCasterTests.java @@ -52,8 +52,10 @@ public class AnalyzerCasterTests extends OpenSearchTestCase { assertEquals(expected, cast.targetType); if (mustBeExplicit) { - ClassCastException error = expectThrows(ClassCastException.class, - () -> AnalyzerCaster.getLegalCast(location, actual, expected, false, false)); + ClassCastException error = expectThrows( + ClassCastException.class, + () -> AnalyzerCaster.getLegalCast(location, actual, expected, false, false) + ); assertTrue(error.getMessage().startsWith("Cannot cast")); } else { cast = AnalyzerCaster.getLegalCast(location, actual, expected, false, false); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/AndTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/AndTests.java index 926188e2324..df554bec4a6 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/AndTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/AndTests.java @@ -67,21 +67,13 @@ public class AndTests extends ScriptTestCase { } public void testIllegal() throws Exception { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = (float)4; int y = 1; return x & y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = (double)4; int y = 1; return x & y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; int y = 1; return x & y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; int y = 1; return x & y"); }); } public void testDef() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (float)4; def y = (byte)1; return x & y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (double)4; def y = (byte)1; return x & y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; def y = (byte)1; return x & y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; def y = (byte)1; return x & y"); }); assertEquals(0, exec("def x = (byte)4; def y = (byte)1; return x & y")); assertEquals(0, exec("def x = (short)4; def y = (byte)1; return x & y")); assertEquals(0, exec("def x = (char)4; def y = (byte)1; return x & y")); @@ -118,19 +110,15 @@ public class AndTests extends ScriptTestCase { assertEquals(0, exec("def x = (int)4; def y = (int)1; return x & y")); assertEquals(0L, exec("def x = (long)4; def y = (long)1; return x & y")); - assertEquals(true, exec("def x = true; def y = true; return x & y")); + assertEquals(true, exec("def x = true; def y = true; return x & y")); assertEquals(false, exec("def x = true; def y = false; return x & y")); assertEquals(false, exec("def x = false; def y = true; return x & y")); assertEquals(false, exec("def x = false; def y = false; return x & y")); } public void testDefTypedLHS() { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = (float)4; def y = (byte)1; return x & y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = (double)4; def y = (byte)1; return x & y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; def y = (byte)1; return x & y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; def y = (byte)1; return x & y"); }); assertEquals(0, exec("byte x = (byte)4; def y = (byte)1; return x & y")); assertEquals(0, exec("short x = (short)4; def y = (byte)1; return x & y")); assertEquals(0, exec("char x = (char)4; def y = (byte)1; return x & y")); @@ -167,19 +155,15 @@ public class AndTests extends ScriptTestCase { assertEquals(0, exec("int x = (int)4; def y = (int)1; return x & y")); assertEquals(0L, exec("long x = (long)4; def y = (long)1; return x & y")); - assertEquals(true, exec("boolean x = true; def y = true; return x & y")); + assertEquals(true, exec("boolean x = true; def y = true; return x & y")); assertEquals(false, exec("boolean x = true; def y = false; return x & y")); assertEquals(false, exec("boolean x = false; def y = true; return x & y")); assertEquals(false, exec("boolean x = false; def y = false; return x & y")); } public void testDefTypedRHS() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (float)4; byte y = (byte)1; return x & y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (double)4; byte y = (byte)1; return x & y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; byte y = (byte)1; return x & y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; byte y = (byte)1; return x & y"); }); assertEquals(0, exec("def x = (byte)4; byte y = (byte)1; return x & y")); assertEquals(0, exec("def x = (short)4; byte y = (byte)1; return x & y")); assertEquals(0, exec("def x = (char)4; byte y = (byte)1; return x & y")); @@ -216,7 +200,7 @@ public class AndTests extends ScriptTestCase { assertEquals(0, exec("def x = (int)4; int y = (int)1; return x & y")); assertEquals(0L, exec("def x = (long)4; long y = (long)1; return x & y")); - assertEquals(true, exec("def x = true; boolean y = true; return x & y")); + assertEquals(true, exec("def x = true; boolean y = true; return x & y")); assertEquals(false, exec("def x = true; boolean y = false; return x & y")); assertEquals(false, exec("def x = false; boolean y = true; return x & y")); assertEquals(false, exec("def x = false; boolean y = false; return x & y")); @@ -246,18 +230,10 @@ public class AndTests extends ScriptTestCase { } public void testBogusCompoundAssignment() { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = 4; int y = 1; x &= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = 4; int y = 1; x &= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; float y = 1; x &= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; double y = 1; x &= y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 4; int y = 1; x &= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 4; int y = 1; x &= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; float y = 1; x &= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; double y = 1; x &= y"); }); } public void testDefCompoundAssignment() { @@ -284,17 +260,9 @@ public class AndTests extends ScriptTestCase { } public void testDefBogusCompoundAssignment() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4F; int y = 1; x &= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4D; int y = 1; x &= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; def y = 1F; x &= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; def y = 1D; x &= y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4F; int y = 1; x &= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4D; int y = 1; x &= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; def y = 1F; x &= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; def y = 1D; x &= y"); }); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayLikeObjectTestCase.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayLikeObjectTestCase.java index 7ef28f36148..e3c66c134cb 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayLikeObjectTestCase.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayLikeObjectTestCase.java @@ -46,11 +46,13 @@ public abstract class ArrayLikeObjectTestCase extends ScriptTestCase { * lists. */ protected abstract String declType(String valueType); + /** * Build the string for calling the constructor for the array-like-object to test. So {@code new int[5]} for arrays and * {@code [0, 0, 0, 0, 0]} or {@code [null, null, null, null, null]} for lists. */ protected abstract String valueCtorCall(String valueType, int size); + /** * Matcher for the message of the out of bounds exceptions thrown for too negative or too positive offsets. */ @@ -65,14 +67,14 @@ public abstract class ArrayLikeObjectTestCase extends ScriptTestCase { assertEquals(val, exec(decl + "; x[ 0] = params.val; return x[-5];", singletonMap("val", val), true)); assertEquals(val, exec(decl + "; x[-5] = params.val; return x[-5];", singletonMap("val", val), true)); - expectOutOfBounds( 6, decl + "; return x[ 6]", val); + expectOutOfBounds(6, decl + "; return x[ 6]", val); expectOutOfBounds(-1, decl + "; return x[-6]", val); - expectOutOfBounds( 6, decl + "; x[ 6] = params.val; return 0", val); + expectOutOfBounds(6, decl + "; x[ 6] = params.val; return 0", val); expectOutOfBounds(-1, decl + "; x[-6] = params.val; return 0", val); if (valPlusOne != null) { - assertEquals(val, exec(decl + "; x[0] = params.val; x[ 0] = x[ 0]++; return x[0];", singletonMap("val", val), true)); - assertEquals(val, exec(decl + "; x[0] = params.val; x[ 0] = x[-5]++; return x[0];", singletonMap("val", val), true)); + assertEquals(val, exec(decl + "; x[0] = params.val; x[ 0] = x[ 0]++; return x[0];", singletonMap("val", val), true)); + assertEquals(val, exec(decl + "; x[0] = params.val; x[ 0] = x[-5]++; return x[0];", singletonMap("val", val), true)); assertEquals(valPlusOne, exec(decl + "; x[0] = params.val; x[ 0] = ++x[ 0]; return x[0];", singletonMap("val", val), true)); assertEquals(valPlusOne, exec(decl + "; x[0] = params.val; x[ 0] = ++x[-5]; return x[0];", singletonMap("val", val), true)); assertEquals(valPlusOne, exec(decl + "; x[0] = params.val; x[ 0]++ ; return x[0];", singletonMap("val", val), true)); @@ -80,18 +82,20 @@ public abstract class ArrayLikeObjectTestCase extends ScriptTestCase { assertEquals(valPlusOne, exec(decl + "; x[0] = params.val; x[ 0] += 1 ; return x[0];", singletonMap("val", val), true)); assertEquals(valPlusOne, exec(decl + "; x[0] = params.val; x[-5] += 1 ; return x[0];", singletonMap("val", val), true)); - expectOutOfBounds( 6, decl + "; return x[ 6]++", val); + expectOutOfBounds(6, decl + "; return x[ 6]++", val); expectOutOfBounds(-1, decl + "; return x[-6]++", val); - expectOutOfBounds( 6, decl + "; return ++x[ 6]", val); + expectOutOfBounds(6, decl + "; return ++x[ 6]", val); expectOutOfBounds(-1, decl + "; return ++x[-6]", val); - expectOutOfBounds( 6, decl + "; x[ 6] += 1; return 0", val); + expectOutOfBounds(6, decl + "; x[ 6] += 1; return 0", val); expectOutOfBounds(-1, decl + "; x[-6] += 1; return 0", val); } } private void expectOutOfBounds(int index, String script, Object val) { - IndexOutOfBoundsException e = expectScriptThrows(IndexOutOfBoundsException.class, () -> - exec(script, singletonMap("val", val), true)); + IndexOutOfBoundsException e = expectScriptThrows( + IndexOutOfBoundsException.class, + () -> exec(script, singletonMap("val", val), true) + ); try { /* If this fails you *might* be missing -XX:-OmitStackTraceInFastThrow in the test jvm * In Eclipse you can add this by default by going to Preference->Java->Installed JREs, @@ -106,19 +110,63 @@ public abstract class ArrayLikeObjectTestCase extends ScriptTestCase { } } - public void testInts() { arrayLoadStoreTestCase(false, "int", 5, 6); } - public void testIntsInDef() { arrayLoadStoreTestCase(true, "int", 5, 6); } - public void testLongs() { arrayLoadStoreTestCase(false, "long", 5L, 6L); } - public void testLongsInDef() { arrayLoadStoreTestCase(true, "long", 5L, 6L); } - public void testShorts() { arrayLoadStoreTestCase(false, "short", (short) 5, (short) 6); } - public void testShortsInDef() { arrayLoadStoreTestCase(true, "short", (short) 5, (short) 6); } - public void testBytes() { arrayLoadStoreTestCase(false, "byte", (byte) 5, (byte) 6); } - public void testBytesInDef() { arrayLoadStoreTestCase(true, "byte", (byte) 5, (byte) 6); } - public void testFloats() { arrayLoadStoreTestCase(false, "float", 5.0f, 6.0f); } - public void testFloatsInDef() { arrayLoadStoreTestCase(true, "float", 5.0f, 6.0f); } - public void testDoubles() { arrayLoadStoreTestCase(false, "double", 5.0d, 6.0d); } - public void testDoublesInDef() { arrayLoadStoreTestCase(true, "double", 5.0d, 6.0d); } - public void testStrings() { arrayLoadStoreTestCase(false, "String", "cat", null); } - public void testStringsInDef() { arrayLoadStoreTestCase(true, "String", "cat", null); } - public void testDef() { arrayLoadStoreTestCase(true, "def", 5, null); } + public void testInts() { + arrayLoadStoreTestCase(false, "int", 5, 6); + } + + public void testIntsInDef() { + arrayLoadStoreTestCase(true, "int", 5, 6); + } + + public void testLongs() { + arrayLoadStoreTestCase(false, "long", 5L, 6L); + } + + public void testLongsInDef() { + arrayLoadStoreTestCase(true, "long", 5L, 6L); + } + + public void testShorts() { + arrayLoadStoreTestCase(false, "short", (short) 5, (short) 6); + } + + public void testShortsInDef() { + arrayLoadStoreTestCase(true, "short", (short) 5, (short) 6); + } + + public void testBytes() { + arrayLoadStoreTestCase(false, "byte", (byte) 5, (byte) 6); + } + + public void testBytesInDef() { + arrayLoadStoreTestCase(true, "byte", (byte) 5, (byte) 6); + } + + public void testFloats() { + arrayLoadStoreTestCase(false, "float", 5.0f, 6.0f); + } + + public void testFloatsInDef() { + arrayLoadStoreTestCase(true, "float", 5.0f, 6.0f); + } + + public void testDoubles() { + arrayLoadStoreTestCase(false, "double", 5.0d, 6.0d); + } + + public void testDoublesInDef() { + arrayLoadStoreTestCase(true, "double", 5.0d, 6.0d); + } + + public void testStrings() { + arrayLoadStoreTestCase(false, "String", "cat", null); + } + + public void testStringsInDef() { + arrayLoadStoreTestCase(true, "String", "cat", null); + } + + public void testDef() { + arrayLoadStoreTestCase(true, "def", 5, null); + } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java index f0eae8e2f6f..7563ab87fd5 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java @@ -57,7 +57,7 @@ public class ArrayTests extends ArrayLikeObjectTestCase { protected Matcher outOfBoundsExceptionMessageMatcher(int index, int size) { if (JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0) { return equalTo(Integer.toString(index)); - } else{ + } else { return equalTo("Index " + Integer.toString(index) + " out of bounds for length " + Integer.toString(size)); } } @@ -79,8 +79,7 @@ public class ArrayTests extends ArrayLikeObjectTestCase { private void assertArrayLength(int length, Object array) throws Throwable { final MethodHandle mh = Def.arrayLengthGetter(array.getClass()); assertSame(array.getClass(), mh.type().parameterType(0)); - assertEquals(length, (int) mh.asType(MethodType.methodType(int.class, Object.class)) - .invokeExact(array)); + assertEquals(length, (int) mh.asType(MethodType.methodType(int.class, Object.class)).invokeExact(array)); } public void testJacksCrazyExpression1() { @@ -96,8 +95,13 @@ public class ArrayTests extends ArrayLikeObjectTestCase { } public void testForLoop() { - assertEquals(999*1000/2, exec("def a = new int[1000]; for (int x = 0; x < a.length; x++) { a[x] = x; } "+ - "int total = 0; for (int x = 0; x < a.length; x++) { total += a[x]; } return total;")); + assertEquals( + 999 * 1000 / 2, + exec( + "def a = new int[1000]; for (int x = 0; x < a.length; x++) { a[x] = x; } " + + "int total = 0; for (int x = 0; x < a.length; x++) { total += a[x]; } return total;" + ) + ); } /** diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java index 1c54f1220a4..d5cd3205b31 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/AugmentationTests.java @@ -71,13 +71,14 @@ public class AugmentationTests extends ScriptTestCase { public abstract static class DigestTestScript { public static final String[] PARAMETERS = {}; + public abstract String execute(); + public interface Factory { DigestTestScript newInstance(); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", DigestTestScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>("test", DigestTestScript.Factory.class); } public void testStatic() { @@ -96,42 +97,39 @@ public class AugmentationTests extends ScriptTestCase { } public void testCapturingReference() { - assertEquals(1, exec("int foo(Supplier t) { return t.get() }" + - "ArrayList l = new ArrayList(); l.add(1);" + - "return foo(l::getLength);")); - assertEquals(1, exec("int foo(Supplier t) { return t.get() }" + - "List l = new ArrayList(); l.add(1);" + - "return foo(l::getLength);")); - assertEquals(1, exec("int foo(Supplier t) { return t.get() }" + - "def l = new ArrayList(); l.add(1);" + - "return foo(l::getLength);")); + assertEquals( + 1, + exec("int foo(Supplier t) { return t.get() }" + "ArrayList l = new ArrayList(); l.add(1);" + "return foo(l::getLength);") + ); + assertEquals( + 1, + exec("int foo(Supplier t) { return t.get() }" + "List l = new ArrayList(); l.add(1);" + "return foo(l::getLength);") + ); + assertEquals( + 1, + exec("int foo(Supplier t) { return t.get() }" + "def l = new ArrayList(); l.add(1);" + "return foo(l::getLength);") + ); } public void testIterable_Any() { - assertEquals(true, - exec("List l = new ArrayList(); l.add(1); l.any(x -> x == 1)")); + assertEquals(true, exec("List l = new ArrayList(); l.add(1); l.any(x -> x == 1)")); } public void testIterable_AsCollection() { - assertEquals(true, - exec("List l = new ArrayList(); return l.asCollection() === l")); + assertEquals(true, exec("List l = new ArrayList(); return l.asCollection() === l")); } public void testIterable_AsList() { - assertEquals(true, - exec("List l = new ArrayList(); return l.asList() === l")); - assertEquals(5, - exec("Set l = new HashSet(); l.add(5); return l.asList()[0]")); + assertEquals(true, exec("List l = new ArrayList(); return l.asList() === l")); + assertEquals(5, exec("Set l = new HashSet(); l.add(5); return l.asList()[0]")); } public void testIterable_Each() { - assertEquals(1, - exec("List l = new ArrayList(); l.add(1); List l2 = new ArrayList(); l.each(l2::add); return l2.size()")); + assertEquals(1, exec("List l = new ArrayList(); l.add(1); List l2 = new ArrayList(); l.each(l2::add); return l2.size()")); } public void testIterable_EachWithIndex() { - assertEquals(0, - exec("List l = new ArrayList(); l.add(2); Map m = new HashMap(); l.eachWithIndex(m::put); return m.get(2)")); + assertEquals(0, exec("List l = new ArrayList(); l.add(2); Map m = new HashMap(); l.eachWithIndex(m::put); return m.get(2)")); } public void testIterable_Every() { @@ -139,119 +137,138 @@ public class AugmentationTests extends ScriptTestCase { } public void testIterable_FindResults() { - assertEquals(1, - exec("List l = new ArrayList(); l.add(1); l.add(2); l.findResults(x -> x == 1 ? x : null).size()")); + assertEquals(1, exec("List l = new ArrayList(); l.add(1); l.add(2); l.findResults(x -> x == 1 ? x : null).size()")); } public void testIterable_GroupBy() { - assertEquals(2, - exec("List l = new ArrayList(); l.add(1); l.add(-1); l.groupBy(x -> x < 0 ? 'negative' : 'positive').size()")); + assertEquals(2, exec("List l = new ArrayList(); l.add(1); l.add(-1); l.groupBy(x -> x < 0 ? 'negative' : 'positive').size()")); } public void testIterable_Join() { - assertEquals("test,ing", - exec("List l = new ArrayList(); l.add('test'); l.add('ing'); l.join(',')")); + assertEquals("test,ing", exec("List l = new ArrayList(); l.add('test'); l.add('ing'); l.join(',')")); } public void testIterable_Sum() { assertEquals(3.0D, exec("def l = [1,2]; return l.sum()")); - assertEquals(5.0D, - exec("List l = new ArrayList(); l.add(1); l.add(2); l.sum(x -> x + 1)")); + assertEquals(5.0D, exec("List l = new ArrayList(); l.add(1); l.add(2); l.sum(x -> x + 1)")); } public void testCollection_Collect() { - assertEquals(Arrays.asList(2, 3), - exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(x -> x + 1)")); - assertEquals(asSet(2, 3), - exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(new HashSet(), x -> x + 1)")); + assertEquals(Arrays.asList(2, 3), exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(x -> x + 1)")); + assertEquals(asSet(2, 3), exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(new HashSet(), x -> x + 1)")); } public void testCollection_Find() { - assertEquals(2, - exec("List l = new ArrayList(); l.add(1); l.add(2); return l.find(x -> x == 2)")); + assertEquals(2, exec("List l = new ArrayList(); l.add(1); l.add(2); return l.find(x -> x == 2)")); } public void testCollection_FindAll() { - assertEquals(Arrays.asList(2), - exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findAll(x -> x == 2)")); + assertEquals(Arrays.asList(2), exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findAll(x -> x == 2)")); } public void testCollection_FindResult() { - assertEquals("found", - exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult(x -> x > 1 ? 'found' : null)")); - assertEquals("notfound", - exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult('notfound', x -> x > 10 ? 'found' : null)")); + assertEquals("found", exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult(x -> x > 1 ? 'found' : null)")); + assertEquals( + "notfound", + exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult('notfound', x -> x > 10 ? 'found' : null)") + ); } public void testCollection_Split() { - assertEquals(Arrays.asList(Arrays.asList(2), Arrays.asList(1)), - exec("List l = new ArrayList(); l.add(1); l.add(2); return l.split(x -> x == 2)")); + assertEquals( + Arrays.asList(Arrays.asList(2), Arrays.asList(1)), + exec("List l = new ArrayList(); l.add(1); l.add(2); return l.split(x -> x == 2)") + ); } public void testMap_Collect() { - assertEquals(Arrays.asList("one1", "two2"), - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect((key,value) -> key + value)")); - assertEquals(asSet("one1", "two2"), - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect(new HashSet(), (key,value) -> key + value)")); + assertEquals( + Arrays.asList("one1", "two2"), + exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect((key,value) -> key + value)") + ); + assertEquals( + asSet("one1", "two2"), + exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect(new HashSet(), (key,value) -> key + value)") + ); } public void testMap_Count() { - assertEquals(1, - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.count((key,value) -> value == 2)")); + assertEquals(1, exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.count((key,value) -> value == 2)")); } public void testMap_Each() { - assertEquals(2, - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; Map m2 = new TreeMap(); m.each(m2::put); return m2.size()")); + assertEquals(2, exec("Map m = new TreeMap(); m.one = 1; m.two = 2; Map m2 = new TreeMap(); m.each(m2::put); return m2.size()")); } public void testMap_Every() { - assertEquals(false, - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.every((key,value) -> value == 2)")); + assertEquals(false, exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.every((key,value) -> value == 2)")); } public void testMap_Find() { - assertEquals("two", - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.find((key,value) -> value == 2).key")); + assertEquals("two", exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.find((key,value) -> value == 2).key")); } public void testMap_FindAll() { - assertEquals(Collections.singletonMap("two", 2), - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findAll((key,value) -> value == 2)")); + assertEquals( + Collections.singletonMap("two", 2), + exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findAll((key,value) -> value == 2)") + ); } public void testMap_FindResult() { - assertEquals("found", - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findResult((key,value) -> value == 2 ? 'found' : null)")); - assertEquals("notfound", - exec("Map m = new TreeMap(); m.one = 1; m.two = 2; " + - "return m.findResult('notfound', (key,value) -> value == 10 ? 'found' : null)")); + assertEquals( + "found", + exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findResult((key,value) -> value == 2 ? 'found' : null)") + ); + assertEquals( + "notfound", + exec( + "Map m = new TreeMap(); m.one = 1; m.two = 2; " + + "return m.findResult('notfound', (key,value) -> value == 10 ? 'found' : null)" + ) + ); } public void testMap_FindResults() { - assertEquals(Arrays.asList("negative", "positive"), - exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " + - "return m.findResults((key,value) -> value < 0 ? 'negative' : 'positive')")); + assertEquals( + Arrays.asList("negative", "positive"), + exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " + "return m.findResults((key,value) -> value < 0 ? 'negative' : 'positive')") + ); } public void testMap_GroupBy() { - Map> expected = new HashMap<>(); + Map> expected = new HashMap<>(); expected.put("negative", Collections.singletonMap("a", -1)); expected.put("positive", Collections.singletonMap("b", 1)); - assertEquals(expected, - exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " + - "return m.groupBy((key,value) -> value < 0 ? 'negative' : 'positive')")); + assertEquals( + expected, + exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " + "return m.groupBy((key,value) -> value < 0 ? 'negative' : 'positive')") + ); } public void testFeatureTest() { - assertEquals(5, exec("org.opensearch.painless.FeatureTestObject ft = new org.opensearch.painless.FeatureTestObject();" + - " ft.setX(3); ft.setY(2); return ft.getTotal()")); - assertEquals(5, exec("def ft = new org.opensearch.painless.FeatureTestObject();" + - " ft.setX(3); ft.setY(2); return ft.getTotal()")); - assertEquals(8, exec("org.opensearch.painless.FeatureTestObject ft = new org.opensearch.painless.FeatureTestObject();" + - " ft.setX(3); ft.setY(2); return ft.addToTotal(3)")); - assertEquals(8, exec("def ft = new org.opensearch.painless.FeatureTestObject();" + - " ft.setX(3); ft.setY(2); return ft.addToTotal(3)")); + assertEquals( + 5, + exec( + "org.opensearch.painless.FeatureTestObject ft = new org.opensearch.painless.FeatureTestObject();" + + " ft.setX(3); ft.setY(2); return ft.getTotal()" + ) + ); + assertEquals( + 5, + exec("def ft = new org.opensearch.painless.FeatureTestObject();" + " ft.setX(3); ft.setY(2); return ft.getTotal()") + ); + assertEquals( + 8, + exec( + "org.opensearch.painless.FeatureTestObject ft = new org.opensearch.painless.FeatureTestObject();" + + " ft.setX(3); ft.setY(2); return ft.addToTotal(3)" + ) + ); + assertEquals( + 8, + exec("def ft = new org.opensearch.painless.FeatureTestObject();" + " ft.setX(3); ft.setY(2); return ft.addToTotal(3)") + ); } private static class SplitCase { @@ -264,10 +281,12 @@ public class AugmentationTests extends ScriptTestCase { this.token = token; this.count = count; } + SplitCase(String input, String token) { this(input, token, -1); } } + public void testString_SplitOnToken() { SplitCase[] cases = new SplitCase[] { new SplitCase("", ""), @@ -282,22 +301,17 @@ public class AugmentationTests extends ScriptTestCase { new SplitCase("aaaaaaa", "a", 2), new SplitCase("1.1.1.1.111", "1"), new SplitCase("1.1.1.1.111", "."), - new SplitCase("1\n1.1.\r\n1\r\n111", "\r\n"), - }; + new SplitCase("1\n1.1.\r\n1\r\n111", "\r\n"), }; for (SplitCase split : cases) { assertArrayEquals( split.input.split(Pattern.quote(split.token), split.count), - (String[])exec("return \""+split.input+"\".splitOnToken(\""+split.token+"\", "+split.count+");") + (String[]) exec("return \"" + split.input + "\".splitOnToken(\"" + split.token + "\", " + split.count + ");") ); } } public String execDigest(String script) { - return getEngine().compile( - "digest_test", - script, - DigestTestScript.CONTEXT, Collections.emptyMap() - ).newInstance().execute(); + return getEngine().compile("digest_test", script, DigestTestScript.CONTEXT, Collections.emptyMap()).newInstance().execute(); } public void testSha1() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BaseClassTests.java index 915214b82df..6892974b6ca 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BaseClassTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BaseClassTests.java @@ -110,33 +110,44 @@ public class BaseClassTests extends ScriptTestCase { } public static final String[] PARAMETERS = new String[] {}; + public abstract Object execute(); public String getTestString() { return testString; } + public int getTestInt() { return Math.abs(testInt); } + public Map getTestMap() { return testMap == null ? new HashMap<>() : testMap; } } + public void testGets() throws Exception { Map map = new HashMap<>(); map.put("s", 1); assertEquals(1, getEngine().compile("testGets0", "testInt", Gets.CONTEXT, emptyMap()).newInstance("s", -1, null).execute()); - assertEquals(Collections.emptyMap(), - getEngine().compile("testGets1", "testMap", Gets.CONTEXT, emptyMap()).newInstance("s", -1, null).execute()); - assertEquals(Collections.singletonMap("1", "1"), - getEngine().compile("testGets2", "testMap", Gets.CONTEXT, emptyMap()) - .newInstance("s", -1, Collections.singletonMap("1", "1")).execute()); - assertEquals("s", - getEngine().compile("testGets3", "testString", Gets.CONTEXT, emptyMap()).newInstance("s", -1, null).execute()); - assertEquals(map, - getEngine().compile("testGets4", "testMap.put(testString, testInt); testMap", Gets.CONTEXT, emptyMap()) - .newInstance("s", -1, null).execute()); + assertEquals( + Collections.emptyMap(), + getEngine().compile("testGets1", "testMap", Gets.CONTEXT, emptyMap()).newInstance("s", -1, null).execute() + ); + assertEquals( + Collections.singletonMap("1", "1"), + getEngine().compile("testGets2", "testMap", Gets.CONTEXT, emptyMap()) + .newInstance("s", -1, Collections.singletonMap("1", "1")) + .execute() + ); + assertEquals("s", getEngine().compile("testGets3", "testString", Gets.CONTEXT, emptyMap()).newInstance("s", -1, null).execute()); + assertEquals( + map, + getEngine().compile("testGets4", "testMap.put(testString, testInt); testMap", Gets.CONTEXT, emptyMap()) + .newInstance("s", -1, null) + .execute() + ); } public abstract static class NoArgs { @@ -147,17 +158,23 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("noargs", Factory.class); public static final String[] PARAMETERS = new String[] {}; + public abstract Object execute(); } + public void testNoArgs() throws Exception { assertEquals(1, getEngine().compile("testNoArgs0", "1", NoArgs.CONTEXT, emptyMap()).newInstance().execute()); assertEquals("foo", getEngine().compile("testNoArgs1", "'foo'", NoArgs.CONTEXT, emptyMap()).newInstance().execute()); - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> - getEngine().compile("testNoArgs2", "doc", NoArgs.CONTEXT, emptyMap())); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + () -> getEngine().compile("testNoArgs2", "doc", NoArgs.CONTEXT, emptyMap()) + ); assertEquals("cannot resolve symbol [doc]", e.getMessage()); - e = expectScriptThrows(IllegalArgumentException.class, () -> - getEngine().compile("testNoArgs3", "_score", NoArgs.CONTEXT, emptyMap())); + e = expectScriptThrows( + IllegalArgumentException.class, + () -> getEngine().compile("testNoArgs3", "_score", NoArgs.CONTEXT, emptyMap()) + ); assertEquals("cannot resolve symbol [_score]", e.getMessage()); String debug = Debugger.toString(NoArgs.class, "int i = 0", new CompilerSettings()); @@ -172,9 +189,11 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("onearg", Factory.class); - public static final String[] PARAMETERS = new String[] {"arg"}; + public static final String[] PARAMETERS = new String[] { "arg" }; + public abstract Object execute(Object arg); } + public void testOneArg() throws Exception { Object rando = randomInt(); assertEquals(rando, getEngine().compile("testOneArg0", "arg", OneArg.CONTEXT, emptyMap()).newInstance().execute(rando)); @@ -189,14 +208,19 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("arrayarg", Factory.class); - public static final String[] PARAMETERS = new String[] {"arg"}; + public static final String[] PARAMETERS = new String[] { "arg" }; + public abstract Object execute(String[] arg); } + public void testArrayArg() throws Exception { String rando = randomAlphaOfLength(5); - assertEquals(rando, - getEngine().compile("testArrayArg0", "arg[0]", ArrayArg.CONTEXT, emptyMap()) - .newInstance().execute(new String[] {rando, "foo"})); + assertEquals( + rando, + getEngine().compile("testArrayArg0", "arg[0]", ArrayArg.CONTEXT, emptyMap()) + .newInstance() + .execute(new String[] { rando, "foo" }) + ); } public abstract static class PrimitiveArrayArg { @@ -206,14 +230,19 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("primitivearrayarg", Factory.class); - public static final String[] PARAMETERS = new String[] {"arg"}; + public static final String[] PARAMETERS = new String[] { "arg" }; + public abstract Object execute(int[] arg); } + public void testPrimitiveArrayArg() throws Exception { int rando = randomInt(); - assertEquals(rando, - getEngine().compile("PrimitiveArrayArg0", "arg[0]", PrimitiveArrayArg.CONTEXT, emptyMap()) - .newInstance().execute(new int[] {rando, 10})); + assertEquals( + rando, + getEngine().compile("PrimitiveArrayArg0", "arg[0]", PrimitiveArrayArg.CONTEXT, emptyMap()) + .newInstance() + .execute(new int[] { rando, 10 }) + ); } public abstract static class DefArrayArg { @@ -223,21 +252,32 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("defarrayarg", Factory.class); - public static final String[] PARAMETERS = new String[] {"arg"}; + public static final String[] PARAMETERS = new String[] { "arg" }; + public abstract Object execute(Object[] arg); } - public void testDefArrayArg()throws Exception { + + public void testDefArrayArg() throws Exception { Object rando = randomInt(); - assertEquals(rando, - getEngine().compile("testDefArray0", "arg[0]", DefArrayArg.CONTEXT, emptyMap()) - .newInstance().execute(new Object[] {rando, 10})); + assertEquals( + rando, + getEngine().compile("testDefArray0", "arg[0]", DefArrayArg.CONTEXT, emptyMap()) + .newInstance() + .execute(new Object[] { rando, 10 }) + ); rando = randomAlphaOfLength(5); - assertEquals(rando, - getEngine().compile("testDefArray1", "arg[0]", DefArrayArg.CONTEXT, emptyMap()) - .newInstance().execute(new Object[] {rando, 10})); - assertEquals(5, getEngine().compile( - "testDefArray2", "arg[0].length()", DefArrayArg.CONTEXT, emptyMap()) - .newInstance().execute(new Object[] {rando, 10})); + assertEquals( + rando, + getEngine().compile("testDefArray1", "arg[0]", DefArrayArg.CONTEXT, emptyMap()) + .newInstance() + .execute(new Object[] { rando, 10 }) + ); + assertEquals( + 5, + getEngine().compile("testDefArray2", "arg[0].length()", DefArrayArg.CONTEXT, emptyMap()) + .newInstance() + .execute(new Object[] { rando, 10 }) + ); } public abstract static class ManyArgs { @@ -247,19 +287,26 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("manyargs", Factory.class); - public static final String[] PARAMETERS = new String[] {"a", "b", "c", "d"}; + public static final String[] PARAMETERS = new String[] { "a", "b", "c", "d" }; + public abstract Object execute(int a, int b, int c, int d); + public abstract boolean needsA(); + public abstract boolean needsB(); + public abstract boolean needsC(); + public abstract boolean needsD(); } + public void testManyArgs() throws Exception { int rando = randomInt(); - assertEquals(rando, - getEngine().compile("testManyArgs0", "a", ManyArgs.CONTEXT, emptyMap()).newInstance().execute(rando, 0, 0, 0)); - assertEquals(10, - getEngine().compile("testManyArgs1", "a + b + c + d", ManyArgs.CONTEXT, emptyMap()).newInstance().execute(1, 2, 3, 4)); + assertEquals(rando, getEngine().compile("testManyArgs0", "a", ManyArgs.CONTEXT, emptyMap()).newInstance().execute(rando, 0, 0, 0)); + assertEquals( + 10, + getEngine().compile("testManyArgs1", "a + b + c + d", ManyArgs.CONTEXT, emptyMap()).newInstance().execute(1, 2, 3, 4) + ); // While we're here we can verify that painless correctly finds used variables ManyArgs script = getEngine().compile("testManyArgs2", "a", ManyArgs.CONTEXT, emptyMap()).newInstance(); @@ -286,13 +333,18 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("varargs", Factory.class); - public static final String[] PARAMETERS = new String[] {"arg"}; + public static final String[] PARAMETERS = new String[] { "arg" }; + public abstract Object execute(String... arg); } + public void testVarArgs() throws Exception { - assertEquals("foo bar baz", - getEngine().compile("testVarArgs0", "String.join(' ', Arrays.asList(arg))", VarArgs.CONTEXT, emptyMap()) - .newInstance().execute("foo", "bar", "baz")); + assertEquals( + "foo bar baz", + getEngine().compile("testVarArgs0", "String.join(' ', Arrays.asList(arg))", VarArgs.CONTEXT, emptyMap()) + .newInstance() + .execute("foo", "bar", "baz") + ); } public abstract static class DefaultMethods { @@ -302,31 +354,47 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("defaultmethods", Factory.class); - public static final String[] PARAMETERS = new String[] {"a", "b", "c", "d"}; + public static final String[] PARAMETERS = new String[] { "a", "b", "c", "d" }; + public abstract Object execute(int a, int b, int c, int d); + public Object executeWithOne() { return execute(1, 1, 1, 1); } + public Object executeWithASingleOne(int a, int b, int c) { return execute(a, b, c, 1); } } + public void testDefaultMethods() throws Exception { int rando = randomInt(); - assertEquals(rando, - getEngine().compile("testDefaultMethods0", "a", DefaultMethods.CONTEXT, emptyMap()).newInstance().execute(rando, 0, 0, 0)); - assertEquals(rando, - getEngine().compile("testDefaultMethods1", "a", DefaultMethods.CONTEXT, emptyMap()) - .newInstance().executeWithASingleOne(rando, 0, 0)); - assertEquals(10, - getEngine().compile("testDefaultMethods2", "a + b + c + d", DefaultMethods.CONTEXT, emptyMap()) - .newInstance().execute(1, 2, 3, 4)); - assertEquals(4, - getEngine().compile("testDefaultMethods3", "a + b + c + d", DefaultMethods.CONTEXT, emptyMap()) - .newInstance().executeWithOne()); - assertEquals(7, - getEngine().compile("testDefaultMethods4", "a + b + c + d", DefaultMethods.CONTEXT, emptyMap()) - .newInstance().executeWithASingleOne(1, 2, 3)); + assertEquals( + rando, + getEngine().compile("testDefaultMethods0", "a", DefaultMethods.CONTEXT, emptyMap()).newInstance().execute(rando, 0, 0, 0) + ); + assertEquals( + rando, + getEngine().compile("testDefaultMethods1", "a", DefaultMethods.CONTEXT, emptyMap()) + .newInstance() + .executeWithASingleOne(rando, 0, 0) + ); + assertEquals( + 10, + getEngine().compile("testDefaultMethods2", "a + b + c + d", DefaultMethods.CONTEXT, emptyMap()) + .newInstance() + .execute(1, 2, 3, 4) + ); + assertEquals( + 4, + getEngine().compile("testDefaultMethods3", "a + b + c + d", DefaultMethods.CONTEXT, emptyMap()).newInstance().executeWithOne() + ); + assertEquals( + 7, + getEngine().compile("testDefaultMethods4", "a + b + c + d", DefaultMethods.CONTEXT, emptyMap()) + .newInstance() + .executeWithASingleOne(1, 2, 3) + ); } public abstract static class ReturnsVoid { @@ -336,9 +404,11 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("returnsvoid", Factory.class); - public static final String[] PARAMETERS = new String[] {"map"}; + public static final String[] PARAMETERS = new String[] { "map" }; + public abstract void execute(Map map); } + public void testReturnsVoid() throws Exception { Map map = new HashMap<>(); getEngine().compile("testReturnsVoid0", "map.a = 'foo'", ReturnsVoid.CONTEXT, emptyMap()).newInstance().execute(map); @@ -361,62 +431,96 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("returnsprimitiveboolean", Factory.class); public static final String[] PARAMETERS = new String[] {}; + public abstract boolean execute(); } + public void testReturnsPrimitiveBoolean() throws Exception { assertTrue( - getEngine().compile("testReturnsPrimitiveBoolean0", "true", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean0", "true", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()).newInstance().execute() + ); assertFalse( - getEngine().compile("testReturnsPrimitiveBoolean1", "false", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean1", "false", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertTrue( - getEngine().compile("testReturnsPrimitiveBoolean2", "Boolean.TRUE", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean2", "Boolean.TRUE", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertFalse( - getEngine().compile("testReturnsPrimitiveBoolean3", "Boolean.FALSE", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean3", "Boolean.FALSE", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertTrue( - getEngine().compile("testReturnsPrimitiveBoolean4", "def i = true; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean4", "def i = true; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertTrue( - getEngine().compile("testReturnsPrimitiveBoolean5", "def i = Boolean.TRUE; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean5", "def i = Boolean.TRUE; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertTrue( - getEngine().compile("testReturnsPrimitiveBoolean6", "true || false", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean6", "true || false", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings()); assertThat(debug, containsString("ICONST_0")); // The important thing here is that we have the bytecode for returning an integer instead of an object. booleans are integers. assertThat(debug, containsString("IRETURN")); - Exception e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveBoolean7", "1L",ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + Exception e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveBoolean7", "1L", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertEquals("Cannot cast from [long] to [boolean].", e.getMessage()); - e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveBoolean8", "1.1f", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveBoolean8", "1.1f", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertEquals("Cannot cast from [float] to [boolean].", e.getMessage()); - e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveBoolean9", "1.1d", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveBoolean9", "1.1d", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertEquals("Cannot cast from [double] to [boolean].", e.getMessage()); - expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveBoolean10", "def i = 1L; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); - expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveBoolean11", "def i = 1.1f; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); - expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveBoolean12", "def i = 1.1d; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveBoolean10", "def i = 1L; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); + expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveBoolean11", "def i = 1.1f; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); + expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveBoolean12", "def i = 1.1d; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertFalse( - getEngine().compile("testReturnsPrimitiveBoolean13", "int i = 0", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) - .newInstance().execute()); + getEngine().compile("testReturnsPrimitiveBoolean13", "int i = 0", ReturnsPrimitiveBoolean.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); } public abstract static class ReturnsPrimitiveInt { @@ -427,60 +531,93 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("returnsprimitiveint", Factory.class); public static final String[] PARAMETERS = new String[] {}; + public abstract int execute(); } + public void testReturnsPrimitiveInt() throws Exception { - assertEquals(1, - getEngine().compile("testReturnsPrimitiveInt0", "1", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); - assertEquals(1, - getEngine().compile("testReturnsPrimitiveInt1", "(int) 1L", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); - assertEquals(1, getEngine().compile("testReturnsPrimitiveInt2", "(int) 1.1d", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); - assertEquals(1, - getEngine().compile("testReturnsPrimitiveInt3", "(int) 1.1f", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); - assertEquals(1, - getEngine().compile("testReturnsPrimitiveInt4", "Integer.valueOf(1)", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); + assertEquals( + 1, + getEngine().compile("testReturnsPrimitiveInt0", "1", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); + assertEquals( + 1, + getEngine().compile("testReturnsPrimitiveInt1", "(int) 1L", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); + assertEquals( + 1, + getEngine().compile("testReturnsPrimitiveInt2", "(int) 1.1d", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); + assertEquals( + 1, + getEngine().compile("testReturnsPrimitiveInt3", "(int) 1.1f", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); + assertEquals( + 1, + getEngine().compile("testReturnsPrimitiveInt4", "Integer.valueOf(1)", ReturnsPrimitiveInt.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); - assertEquals(1, - getEngine().compile("testReturnsPrimitiveInt5", "def i = 1; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); - assertEquals(1, - getEngine().compile("testReturnsPrimitiveInt6", "def i = Integer.valueOf(1); i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); + assertEquals( + 1, + getEngine().compile("testReturnsPrimitiveInt5", "def i = 1; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); + assertEquals( + 1, + getEngine().compile("testReturnsPrimitiveInt6", "def i = Integer.valueOf(1); i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); - assertEquals(2, - getEngine().compile("testReturnsPrimitiveInt7", "1 + 1", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute()); + assertEquals( + 2, + getEngine().compile("testReturnsPrimitiveInt7", "1 + 1", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings()); assertThat(debug, containsString("ICONST_1")); // The important thing here is that we have the bytecode for returning an integer instead of an object assertThat(debug, containsString("IRETURN")); - Exception e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveInt8", "1L", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute()); + Exception e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveInt8", "1L", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); assertEquals("Cannot cast from [long] to [int].", e.getMessage()); - e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveInt9", "1.1f", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute()); + e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveInt9", "1.1f", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); assertEquals("Cannot cast from [float] to [int].", e.getMessage()); - e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveInt10", "1.1d", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute()); + e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveInt10", "1.1d", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); assertEquals("Cannot cast from [double] to [int].", e.getMessage()); - expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveInt11", "def i = 1L; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); - expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveInt12", "def i = 1.1f; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); - expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveInt13", "def i = 1.1d; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); + expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveInt11", "def i = 1L; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); + expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveInt12", "def i = 1.1f; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); + expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveInt13", "def i = 1.1d; i", ReturnsPrimitiveInt.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); - assertEquals(0, getEngine().compile("testReturnsPrimitiveInt14", "int i = 0", ReturnsPrimitiveInt.CONTEXT, emptyMap()) - .newInstance().execute()); + assertEquals( + 0, + getEngine().compile("testReturnsPrimitiveInt14", "int i = 0", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() + ); } public abstract static class ReturnsPrimitiveFloat { @@ -491,107 +628,199 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("returnsprimitivefloat", Factory.class); public static final String[] PARAMETERS = new String[] {}; + public abstract float execute(); } + public void testReturnsPrimitiveFloat() throws Exception { - assertEquals(1.1f, - getEngine().compile("testReturnsPrimitiveFloat0", "1.1f", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.1f, - getEngine().compile("testReturnsPrimitiveFloat1", "(float) 1.1d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.1f, - getEngine().compile("testReturnsPrimitiveFloat2", "def d = 1.1f; d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.1f, getEngine().compile( - "testReturnsPrimitiveFloat3", "def d = Float.valueOf(1.1f); d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute(), 0); + assertEquals( + 1.1f, + getEngine().compile("testReturnsPrimitiveFloat0", "1.1f", ReturnsPrimitiveFloat.CONTEXT, emptyMap()).newInstance().execute(), + 0 + ); + assertEquals( + 1.1f, + getEngine().compile("testReturnsPrimitiveFloat1", "(float) 1.1d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); + assertEquals( + 1.1f, + getEngine().compile("testReturnsPrimitiveFloat2", "def d = 1.1f; d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); + assertEquals( + 1.1f, + getEngine().compile("testReturnsPrimitiveFloat3", "def d = Float.valueOf(1.1f); d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); - assertEquals(1.1f + 6.7f, - getEngine().compile("testReturnsPrimitiveFloat4", "1.1f + 6.7f", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute(), 0); + assertEquals( + 1.1f + 6.7f, + getEngine().compile("testReturnsPrimitiveFloat4", "1.1f + 6.7f", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); - Exception e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveFloat5", "1.1d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute()); + Exception e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveFloat5", "1.1d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); assertEquals("Cannot cast from [double] to [float].", e.getMessage()); - e = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("testReturnsPrimitiveFloat6", "def d = 1.1d; d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute()); - e = expectScriptThrows(ClassCastException.class, () -> getEngine().compile( - "testReturnsPrimitiveFloat7", "def d = Double.valueOf(1.1); d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute()); + e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("testReturnsPrimitiveFloat6", "def d = 1.1d; d", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) + .newInstance() + .execute() + ); + e = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile( + "testReturnsPrimitiveFloat7", + "def d = Double.valueOf(1.1); d", + ReturnsPrimitiveFloat.CONTEXT, + emptyMap() + ).newInstance().execute() + ); String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings()); assertThat(debug, containsString("FCONST_1")); // The important thing here is that we have the bytecode for returning a float instead of an object assertThat(debug, containsString("FRETURN")); - assertEquals(0.0f, - getEngine().compile("testReturnsPrimitiveFloat8", "int i = 0", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) - .newInstance().execute(), 0); + assertEquals( + 0.0f, + getEngine().compile("testReturnsPrimitiveFloat8", "int i = 0", ReturnsPrimitiveFloat.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); } - public abstract static class ReturnsPrimitiveDouble { - public interface Factory { - ReturnsPrimitiveDouble newInstance(); - } + public abstract static class ReturnsPrimitiveDouble { + public interface Factory { + ReturnsPrimitiveDouble newInstance(); + } - public static final ScriptContext CONTEXT = new ScriptContext<>("returnsprimitivedouble", Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>("returnsprimitivedouble", Factory.class); public static final String[] PARAMETERS = new String[] {}; + public abstract double execute(); } + public void testReturnsPrimitiveDouble() throws Exception { - assertEquals(1.0, - getEngine().compile("testReturnsPrimitiveDouble0", "1", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.0, - getEngine().compile("testReturnsPrimitiveDouble1", "1L", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.1, - getEngine().compile("testReturnsPrimitiveDouble2", "1.1d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals((double) 1.1f, - getEngine().compile("testReturnsPrimitiveDouble3", "1.1f", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.1, getEngine().compile( - "testReturnsPrimitiveDouble4", "Double.valueOf(1.1)", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals((double) 1.1f, getEngine().compile( - "testReturnsPrimitiveDouble5", "Float.valueOf(1.1f)", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); + assertEquals( + 1.0, + getEngine().compile("testReturnsPrimitiveDouble0", "1", ReturnsPrimitiveDouble.CONTEXT, emptyMap()).newInstance().execute(), + 0 + ); + assertEquals( + 1.0, + getEngine().compile("testReturnsPrimitiveDouble1", "1L", ReturnsPrimitiveDouble.CONTEXT, emptyMap()).newInstance().execute(), + 0 + ); + assertEquals( + 1.1, + getEngine().compile("testReturnsPrimitiveDouble2", "1.1d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()).newInstance().execute(), + 0 + ); + assertEquals( + (double) 1.1f, + getEngine().compile("testReturnsPrimitiveDouble3", "1.1f", ReturnsPrimitiveDouble.CONTEXT, emptyMap()).newInstance().execute(), + 0 + ); + assertEquals( + 1.1, + getEngine().compile("testReturnsPrimitiveDouble4", "Double.valueOf(1.1)", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); + assertEquals( + (double) 1.1f, + getEngine().compile("testReturnsPrimitiveDouble5", "Float.valueOf(1.1f)", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); - assertEquals(1.0, - getEngine().compile("testReturnsPrimitiveDouble6", "def d = 1; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.0, - getEngine().compile("testReturnsPrimitiveDouble7", "def d = 1L; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.1, - getEngine().compile("testReturnsPrimitiveDouble8", "def d = 1.1d; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()). - newInstance().execute(), 0); - assertEquals((double) 1.1f, - getEngine().compile("testReturnsPrimitiveDouble9", "def d = 1.1f; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals(1.1, getEngine().compile( - "testReturnsPrimitiveDouble10", "def d = Double.valueOf(1.1); d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); - assertEquals((double) 1.1f, getEngine().compile( - "testReturnsPrimitiveDouble11", "def d = Float.valueOf(1.1f); d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); + assertEquals( + 1.0, + getEngine().compile("testReturnsPrimitiveDouble6", "def d = 1; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); + assertEquals( + 1.0, + getEngine().compile("testReturnsPrimitiveDouble7", "def d = 1L; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); + assertEquals( + 1.1, + getEngine().compile("testReturnsPrimitiveDouble8", "def d = 1.1d; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); + assertEquals( + (double) 1.1f, + getEngine().compile("testReturnsPrimitiveDouble9", "def d = 1.1f; d", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); + assertEquals( + 1.1, + getEngine().compile( + "testReturnsPrimitiveDouble10", + "def d = Double.valueOf(1.1); d", + ReturnsPrimitiveDouble.CONTEXT, + emptyMap() + ).newInstance().execute(), + 0 + ); + assertEquals( + (double) 1.1f, + getEngine().compile( + "testReturnsPrimitiveDouble11", + "def d = Float.valueOf(1.1f); d", + ReturnsPrimitiveDouble.CONTEXT, + emptyMap() + ).newInstance().execute(), + 0 + ); - assertEquals(1.1 + 6.7, - getEngine().compile("testReturnsPrimitiveDouble12", "1.1 + 6.7", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); + assertEquals( + 1.1 + 6.7, + getEngine().compile("testReturnsPrimitiveDouble12", "1.1 + 6.7", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings()); // The important thing here is that we have the bytecode for returning a double instead of an object assertThat(debug, containsString("DRETURN")); - assertEquals(0.0, - getEngine().compile("testReturnsPrimitiveDouble13", "int i = 0", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) - .newInstance().execute(), 0); + assertEquals( + 0.0, + getEngine().compile("testReturnsPrimitiveDouble13", "int i = 0", ReturnsPrimitiveDouble.CONTEXT, emptyMap()) + .newInstance() + .execute(), + 0 + ); } public abstract static class NoArgsConstant { @@ -603,12 +832,22 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(String foo); } + public void testNoArgsConstant() { - Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> - getEngine().compile("testNoArgsConstant0", "1", NoArgsConstant.CONTEXT, emptyMap()).newInstance().execute("constant")); - assertThat(e.getMessage(), startsWith( + Exception e = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> getEngine().compile("testNoArgsConstant0", "1", NoArgsConstant.CONTEXT, emptyMap()).newInstance().execute("constant") + ); + assertThat( + e.getMessage(), + startsWith( "Painless needs a constant [String[] PARAMETERS] on all interfaces it implements with the " - + "names of the method arguments but [" + NoArgsConstant.class.getName() + "] doesn't have one.")); + + "names of the method arguments but [" + + NoArgsConstant.class.getName() + + "] doesn't have one." + ) + ); } public abstract static class WrongArgsConstant { @@ -618,15 +857,26 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("wrongargscontext", Factory.class); - boolean[] PARAMETERS = new boolean[] {false}; + boolean[] PARAMETERS = new boolean[] { false }; + public abstract Object execute(String foo); } + public void testWrongArgsConstant() { - Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> - getEngine().compile("testWrongArgsConstant0", "1", WrongArgsConstant.CONTEXT, emptyMap())); - assertThat(e.getMessage(), startsWith( + Exception e = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> getEngine().compile("testWrongArgsConstant0", "1", WrongArgsConstant.CONTEXT, emptyMap()) + ); + assertThat( + e.getMessage(), + startsWith( "Painless needs a constant [String[] PARAMETERS] on all interfaces it implements with the " - + "names of the method arguments but [" + WrongArgsConstant.class.getName() + "] doesn't have one.")); + + "names of the method arguments but [" + + WrongArgsConstant.class.getName() + + "] doesn't have one." + ) + ); } public abstract static class WrongLengthOfArgConstant { @@ -636,14 +886,27 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("wronglengthofargcontext", Factory.class); - public static final String[] PARAMETERS = new String[] {"foo", "bar"}; + public static final String[] PARAMETERS = new String[] { "foo", "bar" }; + public abstract Object execute(String foo); } + public void testWrongLengthOfArgConstant() { - Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> - getEngine().compile("testWrongLengthOfArgConstant", "1", WrongLengthOfArgConstant.CONTEXT, emptyMap())); - assertThat(e.getMessage(), startsWith("[" + WrongLengthOfArgConstant.class.getName() + "#ARGUMENTS] has length [2] but [" - + WrongLengthOfArgConstant.class.getName() + "#execute] takes [1] argument.")); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> getEngine().compile("testWrongLengthOfArgConstant", "1", WrongLengthOfArgConstant.CONTEXT, emptyMap()) + ); + assertThat( + e.getMessage(), + startsWith( + "[" + + WrongLengthOfArgConstant.class.getName() + + "#ARGUMENTS] has length [2] but [" + + WrongLengthOfArgConstant.class.getName() + + "#execute] takes [1] argument." + ) + ); } public abstract static class UnknownArgType { @@ -653,14 +916,24 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("unknownargtype", Factory.class); - public static final String[] PARAMETERS = new String[] {"foo"}; + public static final String[] PARAMETERS = new String[] { "foo" }; + public abstract Object execute(UnknownArgType foo); } + public void testUnknownArgType() { - Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> - getEngine().compile("testUnknownArgType0", "1", UnknownArgType.CONTEXT, emptyMap())); - assertEquals("[foo] is of unknown type [" + UnknownArgType.class.getName() + ". Painless interfaces can only accept arguments " - + "that are of whitelisted types.", e.getMessage()); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> getEngine().compile("testUnknownArgType0", "1", UnknownArgType.CONTEXT, emptyMap()) + ); + assertEquals( + "[foo] is of unknown type [" + + UnknownArgType.class.getName() + + ". Painless interfaces can only accept arguments " + + "that are of whitelisted types.", + e.getMessage() + ); } public abstract static class UnknownReturnType { @@ -670,14 +943,25 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("unknownreturntype", Factory.class); - public static final String[] PARAMETERS = new String[] {"foo"}; + public static final String[] PARAMETERS = new String[] { "foo" }; + public abstract UnknownReturnType execute(String foo); } + public void testUnknownReturnType() { - Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> - getEngine().compile("testUnknownReturnType0", "1", UnknownReturnType.CONTEXT, emptyMap())); - assertEquals("Painless can only implement execute methods returning a whitelisted type but [" + UnknownReturnType.class.getName() - + "#execute] returns [" + UnknownReturnType.class.getName() + "] which isn't whitelisted.", e.getMessage()); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> getEngine().compile("testUnknownReturnType0", "1", UnknownReturnType.CONTEXT, emptyMap()) + ); + assertEquals( + "Painless can only implement execute methods returning a whitelisted type but [" + + UnknownReturnType.class.getName() + + "#execute] returns [" + + UnknownReturnType.class.getName() + + "] which isn't whitelisted.", + e.getMessage() + ); } public abstract static class UnknownArgTypeInArray { @@ -687,14 +971,24 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("unknownargtypeinarray", Factory.class); - public static final String[] PARAMETERS = new String[] {"foo"}; + public static final String[] PARAMETERS = new String[] { "foo" }; + public abstract Object execute(UnknownArgTypeInArray[] foo); } + public void testUnknownArgTypeInArray() { - Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> - getEngine().compile("testUnknownAryTypeInArray0", "1", UnknownArgTypeInArray.CONTEXT, emptyMap())); - assertEquals("[foo] is of unknown type [" + UnknownArgTypeInArray.class.getName() + ". Painless interfaces can only accept " - + "arguments that are of whitelisted types.", e.getMessage()); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> getEngine().compile("testUnknownAryTypeInArray0", "1", UnknownArgTypeInArray.CONTEXT, emptyMap()) + ); + assertEquals( + "[foo] is of unknown type [" + + UnknownArgTypeInArray.class.getName() + + ". Painless interfaces can only accept " + + "arguments that are of whitelisted types.", + e.getMessage() + ); } public abstract static class TwoExecuteMethods { @@ -705,12 +999,21 @@ public class BaseClassTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("twoexecutemethods", Factory.class); public abstract Object execute(); + public abstract Object execute(boolean foo); } + public void testTwoExecuteMethods() { - Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> - getEngine().compile("testTwoExecuteMethods0", "null", TwoExecuteMethods.CONTEXT, emptyMap())); - assertEquals("Painless can only implement interfaces that have a single method named [execute] but [" - + TwoExecuteMethods.class.getName() + "] has more than one.", e.getMessage()); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> getEngine().compile("testTwoExecuteMethods0", "null", TwoExecuteMethods.CONTEXT, emptyMap()) + ); + assertEquals( + "Painless can only implement interfaces that have a single method named [execute] but [" + + TwoExecuteMethods.class.getName() + + "] has more than one.", + e.getMessage() + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BasicAPITests.java index 6751ae21a27..c4cd7503bbb 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BasicAPITests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BasicAPITests.java @@ -41,28 +41,68 @@ import java.util.regex.Pattern; public class BasicAPITests extends ScriptTestCase { public void testListIterator() { - assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + - "int total = 0; while (y.hasNext()) total += y.next(); return total;")); - assertEquals("abc", exec("List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + - "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); - assertEquals(3, exec("def x = new ArrayList(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + - "def total = 0; while (y.hasNext()) total += y.next(); return total;")); + assertEquals( + 3, + exec( + "List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += y.next(); return total;" + ) + ); + assertEquals( + "abc", + exec( + "List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;" + ) + ); + assertEquals( + 3, + exec( + "def x = new ArrayList(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + + "def total = 0; while (y.hasNext()) total += y.next(); return total;" + ) + ); } public void testSetIterator() { - assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + - "int total = 0; while (y.hasNext()) total += y.next(); return total;")); - assertEquals("abc", exec("Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + - "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); - assertEquals(3, exec("def x = new HashSet(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + - "def total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals( + 3, + exec( + "Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += y.next(); return total;" + ) + ); + assertEquals( + "abc", + exec( + "Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;" + ) + ); + assertEquals( + 3, + exec( + "def x = new HashSet(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + + "def total = 0; while (y.hasNext()) total += (int)y.next(); return total;" + ) + ); } public void testMapIterator() { - assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.keySet().iterator(); " + - "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); - assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.values().iterator(); " + - "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals( + 3, + exec( + "Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.keySet().iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;" + ) + ); + assertEquals( + 3, + exec( + "Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.values().iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;" + ) + ); } /** Test loads and stores with a map */ @@ -83,8 +123,7 @@ public class BasicAPITests extends ScriptTestCase { ctx.put("_source", _source); params.put("ctx", ctx); - assertEquals("testvalue", exec("params.ctx._source['load'].5 = params.ctx._source['load'].remove('load5')", - params, true)); + assertEquals("testvalue", exec("params.ctx._source['load'].5 = params.ctx._source['load'].remove('load5')", params, true)); } /** Test loads and stores with a list */ @@ -143,8 +182,12 @@ public class BasicAPITests extends ScriptTestCase { } public void testPublicMemberAccess() { - assertEquals(5, exec("org.opensearch.painless.FeatureTestObject ft = new org.opensearch.painless.FeatureTestObject();" + - "ft.z = 5; return ft.z;")); + assertEquals( + 5, + exec( + "org.opensearch.painless.FeatureTestObject ft = new org.opensearch.painless.FeatureTestObject();" + "ft.z = 5; return ft.z;" + ) + ); } public void testNoSemicolon() { @@ -158,27 +201,32 @@ public class BasicAPITests extends ScriptTestCase { // TODO: remove this when the transition from Joda to Java datetimes is completed public void testJCZDTToZonedDateTime() { - assertEquals(0L, exec( - "Instant instant = Instant.ofEpochMilli(434931330000L);" + - "JodaCompatibleZonedDateTime d = new JodaCompatibleZonedDateTime(instant, ZoneId.of('Z'));" + - "ZonedDateTime t = d;" + - "return ChronoUnit.MILLIS.between(d, t);" - )); + assertEquals( + 0L, + exec( + "Instant instant = Instant.ofEpochMilli(434931330000L);" + + "JodaCompatibleZonedDateTime d = new JodaCompatibleZonedDateTime(instant, ZoneId.of('Z'));" + + "ZonedDateTime t = d;" + + "return ChronoUnit.MILLIS.between(d, t);" + ) + ); } public void testRandomUUID() { assertTrue( - Pattern.compile("\\p{XDigit}{8}(-\\p{XDigit}{4}){3}-\\p{XDigit}{12}").matcher( - (String)exec( - "UUID a = UUID.randomUUID();" + - "String s = a.toString(); " + - "UUID b = UUID.fromString(s);" + - "if (a.equals(b) == false) {" + - " throw new RuntimeException('uuids did not match');" + - "}" + - "return s;" + Pattern.compile("\\p{XDigit}{8}(-\\p{XDigit}{4}){3}-\\p{XDigit}{12}") + .matcher( + (String) exec( + "UUID a = UUID.randomUUID();" + + "String s = a.toString(); " + + "UUID b = UUID.fromString(s);" + + "if (a.equals(b) == false) {" + + " throw new RuntimeException('uuids did not match');" + + "}" + + "return s;" ) - ).matches() + ) + .matches() ); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BasicExpressionTests.java index 4304a2977ff..9011a68f82f 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BasicExpressionTests.java @@ -52,8 +52,8 @@ public class BasicExpressionTests extends ScriptTestCase { assertEquals(33.0F, exec("return 33f")); assertEquals(34.0F, exec("return 34.0F")); assertEquals(35.0F, exec("return 35F")); - assertEquals((byte)255, exec("return (byte)255")); - assertEquals((short)5, exec("return (short)5")); + assertEquals((byte) 255, exec("return (byte)255")); + assertEquals((short) 5, exec("return (short)5")); assertEquals("string", exec("return \"string\"")); assertEquals("string", exec("return 'string'")); assertEquals(true, exec("return true")); @@ -77,12 +77,16 @@ public class BasicExpressionTests extends ScriptTestCase { // `\"` is a `"` if surrounded by `"`s assertEquals("\"string", exec("\"\\\"string\"")); Exception e = expectScriptThrows(IllegalArgumentException.class, () -> exec("'\\\"string'", false)); - assertEquals("unexpected character ['\\\"]. The only valid escape sequences in strings starting with ['] are [\\\\] and [\\'].", - e.getMessage()); + assertEquals( + "unexpected character ['\\\"]. The only valid escape sequences in strings starting with ['] are [\\\\] and [\\'].", + e.getMessage() + ); // `\'` is a `'` if surrounded by `'`s e = expectScriptThrows(IllegalArgumentException.class, () -> exec("\"\\'string\"", false)); - assertEquals("unexpected character [\"\\']. The only valid escape sequences in strings starting with [\"] are [\\\\] and [\\\"].", - e.getMessage()); + assertEquals( + "unexpected character [\"\\']. The only valid escape sequences in strings starting with [\"] are [\\\\] and [\\\"].", + e.getMessage() + ); assertEquals("'string", exec("'\\'string'")); // We don't break native escapes like new line assertEquals("\nstring", exec("\"\nstring\"")); @@ -106,8 +110,8 @@ public class BasicExpressionTests extends ScriptTestCase { assertEquals(7L, exec("long l = 7; return l;")); assertEquals(7.0, exec("double d = 7; return d;")); assertEquals(32.0F, exec("float f = 32F; return f;")); - assertEquals((byte)255, exec("byte b = (byte)255; return b;")); - assertEquals((short)5, exec("short s = (short)5; return s;")); + assertEquals((byte) 255, exec("byte b = (byte)255; return b;")); + assertEquals((short) 5, exec("short s = (short)5; return s;")); assertEquals("string", exec("String s = \"string\"; return s;")); assertEquals(true, exec("boolean v = true; return v;")); assertEquals(false, exec("boolean v = false; return v;")); @@ -115,24 +119,16 @@ public class BasicExpressionTests extends ScriptTestCase { public void testCast() { assertEquals(1, exec("return (int)1.0;")); - assertEquals((byte)100, exec("double x = 100; return (byte)x;")); + assertEquals((byte) 100, exec("double x = 100; return (byte)x;")); - assertEquals(3, exec( - "Map x = new HashMap();\n" + - "Object y = x;\n" + - "((Map)y).put(2, 3);\n" + - "return x.get(2);\n")); + assertEquals(3, exec("Map x = new HashMap();\n" + "Object y = x;\n" + "((Map)y).put(2, 3);\n" + "return x.get(2);\n")); } public void testIllegalDefCast() { - Exception exception = expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 1.0; int y = x; return y;"); - }); + Exception exception = expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1.0; int y = x; return y;"); }); assertTrue(exception.getMessage().contains("cannot implicitly cast")); - exception = expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (short)1; byte y = x; return y;"); - }); + exception = expectScriptThrows(ClassCastException.class, () -> { exec("def x = (short)1; byte y = x; return y;"); }); assertTrue(exception.getMessage().contains("cannot implicitly cast")); } @@ -140,11 +136,10 @@ public class BasicExpressionTests extends ScriptTestCase { assertEquals("aaabbb", exec("return \"aaa\" + \"bbb\";")); assertEquals("aaabbb", exec("String aaa = \"aaa\", bbb = \"bbb\"; return aaa + bbb;")); - assertEquals("aaabbbbbbbbb", exec( - "String aaa = \"aaa\", bbb = \"bbb\"; int x;\n" + - "for (; x < 3; ++x) \n" + - " aaa += bbb;\n" + - "return aaa;")); + assertEquals( + "aaabbbbbbbbb", + exec("String aaa = \"aaa\", bbb = \"bbb\"; int x;\n" + "for (; x < 3; ++x) \n" + " aaa += bbb;\n" + "return aaa;") + ); } public void testComp() { @@ -193,88 +188,83 @@ public class BasicExpressionTests extends ScriptTestCase { public void testNullSafeDeref() { // Objects in general - // Call - assertNull( exec("String a = null; return a?.toString()")); + // Call + assertNull(exec("String a = null; return a?.toString()")); assertEquals("foo", exec("String a = 'foo'; return a?.toString()")); - assertNull( exec("def a = null; return a?.toString()")); + assertNull(exec("def a = null; return a?.toString()")); assertEquals("foo", exec("def a = 'foo'; return a?.toString()")); - // Call with primitive result - assertMustBeNullable( "String a = null; return a?.length()"); - assertMustBeNullable( "String a = 'foo'; return a?.length()"); - assertNull( exec("def a = null; return a?.length()")); - assertEquals(3, exec("def a = 'foo'; return a?.length()")); - // Read shortcut - assertMustBeNullable( "org.opensearch.painless.FeatureTestObject a = null; return a?.x"); - assertMustBeNullable( - "org.opensearch.painless.FeatureTestObject a = new org.opensearch.painless.FeatureTestObject(); return a?.x"); - assertNull( exec("def a = null; return a?.x")); - assertEquals(0, exec("def a = new org.opensearch.painless.FeatureTestObject(); return a?.x")); + // Call with primitive result + assertMustBeNullable("String a = null; return a?.length()"); + assertMustBeNullable("String a = 'foo'; return a?.length()"); + assertNull(exec("def a = null; return a?.length()")); + assertEquals(3, exec("def a = 'foo'; return a?.length()")); + // Read shortcut + assertMustBeNullable("org.opensearch.painless.FeatureTestObject a = null; return a?.x"); + assertMustBeNullable("org.opensearch.painless.FeatureTestObject a = new org.opensearch.painless.FeatureTestObject(); return a?.x"); + assertNull(exec("def a = null; return a?.x")); + assertEquals(0, exec("def a = new org.opensearch.painless.FeatureTestObject(); return a?.x")); // Maps - // Call - assertNull( exec("Map a = null; return a?.toString()")); + // Call + assertNull(exec("Map a = null; return a?.toString()")); assertEquals("{}", exec("Map a = [:]; return a?.toString()")); - assertNull( exec("def a = null; return a?.toString()")); + assertNull(exec("def a = null; return a?.toString()")); assertEquals("{}", exec("def a = [:]; return a?.toString()")); - // Call with primitive result - assertMustBeNullable( "Map a = [:]; return a?.size()"); - assertMustBeNullable( "Map a = null; return a?.size()"); - assertNull( exec("def a = null; return a?.size()")); - assertEquals(0, exec("def a = [:]; return a?.size()")); - // Read shortcut - assertNull( exec("Map a = null; return a?.other")); // Read shortcut - assertEquals(1, exec("Map a = ['other':1]; return a?.other")); // Read shortcut - assertNull( exec("def a = null; return a?.other")); // Read shortcut - assertEquals(1, exec("def a = ['other':1]; return a?.other")); // Read shortcut + // Call with primitive result + assertMustBeNullable("Map a = [:]; return a?.size()"); + assertMustBeNullable("Map a = null; return a?.size()"); + assertNull(exec("def a = null; return a?.size()")); + assertEquals(0, exec("def a = [:]; return a?.size()")); + // Read shortcut + assertNull(exec("Map a = null; return a?.other")); // Read shortcut + assertEquals(1, exec("Map a = ['other':1]; return a?.other")); // Read shortcut + assertNull(exec("def a = null; return a?.other")); // Read shortcut + assertEquals(1, exec("def a = ['other':1]; return a?.other")); // Read shortcut // Array // Since you can't invoke methods on arrays we skip the toString and hashCode tests assertMustBeNullable("int[] a = null; return a?.length"); assertMustBeNullable("int[] a = new int[] {2, 3}; return a?.length"); - assertNull( exec("def a = null; return a?.length")); + assertNull(exec("def a = null; return a?.length")); assertEquals(2, exec("def a = new int[] {2, 3}; return a?.length")); // Results from maps (should just work but let's test anyway) FeatureTestObject t = new FeatureTestObject(); - assertNull( exec("Map a = ['thing': params.t]; return a.other?.getX()", singletonMap("t", t), true)); - assertNull( exec("Map a = ['thing': params.t]; return a.other?.x", singletonMap("t", t), true)); - assertNull( exec("def a = ['thing': params.t]; return a.other?.getX()", singletonMap("t", t), true)); - assertNull( exec("def a = ['thing': params.t]; return a.other?.x", singletonMap("t", t), true)); + assertNull(exec("Map a = ['thing': params.t]; return a.other?.getX()", singletonMap("t", t), true)); + assertNull(exec("Map a = ['thing': params.t]; return a.other?.x", singletonMap("t", t), true)); + assertNull(exec("def a = ['thing': params.t]; return a.other?.getX()", singletonMap("t", t), true)); + assertNull(exec("def a = ['thing': params.t]; return a.other?.x", singletonMap("t", t), true)); assertEquals(0, exec("Map a = ['other': params.t]; return a.other?.getX()", singletonMap("t", t), true)); - assertEquals(0, exec("Map a = ['other': params.t]; return a.other?.x", singletonMap("t", t), true)); + assertEquals(0, exec("Map a = ['other': params.t]; return a.other?.x", singletonMap("t", t), true)); assertEquals(0, exec("def a = ['other': params.t]; return a.other?.getX()", singletonMap("t", t), true)); - assertEquals(0, exec("def a = ['other': params.t]; return a.other?.x", singletonMap("t", t), true)); + assertEquals(0, exec("def a = ['other': params.t]; return a.other?.x", singletonMap("t", t), true)); // Chains - assertNull( exec("Map a = ['thing': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); - assertNull( exec("Map a = ['thing': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); - assertNull( exec("def a = ['thing': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); - assertNull( exec("def a = ['thing': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + assertNull(exec("Map a = ['thing': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); + assertNull(exec("Map a = ['thing': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + assertNull(exec("def a = ['thing': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); + assertNull(exec("def a = ['thing': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); assertEquals(0, exec("Map a = ['other': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); - assertEquals(0, exec("Map a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + assertEquals(0, exec("Map a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); - assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); + assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); // Assignments - assertNull(exec( - "def a = [:];\n" - + "a.missing_length = a.missing?.length();\n" - + "return a.missing_length", true)); - assertEquals(3, exec( - "def a = [:];\n" - + "a.missing = 'foo';\n" - + "a.missing_length = a.missing?.length();\n" - + "return a.missing_length", true)); + assertNull(exec("def a = [:];\n" + "a.missing_length = a.missing?.length();\n" + "return a.missing_length", true)); + assertEquals( + 3, + exec("def a = [:];\n" + "a.missing = 'foo';\n" + "a.missing_length = a.missing?.length();\n" + "return a.missing_length", true) + ); // Writes, all unsupported at this point -// assertEquals(null, exec("org.opensearch.painless.FeatureTestObject a = null; return a?.x")); // Read field -// assertEquals(null, exec("org.opensearch.painless.FeatureTestObject a = null; a?.x = 7; return a?.x")); // Write field -// assertEquals(null, exec("Map a = null; a?.other = 'wow'; return a?.other")); // Write shortcut -// assertEquals(null, exec("def a = null; a?.other = 'cat'; return a?.other")); // Write shortcut -// assertEquals(null, exec("Map a = ['thing': 'bar']; a.other?.cat = 'no'; return a.other?.cat")); -// assertEquals(null, exec("def a = ['thing': 'bar']; a.other?.cat = 'no'; return a.other?.cat")); -// assertEquals(null, exec("Map a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); -// assertEquals(null, exec("def a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); + // assertEquals(null, exec("org.opensearch.painless.FeatureTestObject a = null; return a?.x")); // Read field + // assertEquals(null, exec("org.opensearch.painless.FeatureTestObject a = null; a?.x = 7; return a?.x")); // Write field + // assertEquals(null, exec("Map a = null; a?.other = 'wow'; return a?.other")); // Write shortcut + // assertEquals(null, exec("def a = null; a?.other = 'cat'; return a?.other")); // Write shortcut + // assertEquals(null, exec("Map a = ['thing': 'bar']; a.other?.cat = 'no'; return a.other?.cat")); + // assertEquals(null, exec("def a = ['thing': 'bar']; a.other?.cat = 'no'; return a.other?.cat")); + // assertEquals(null, exec("Map a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); + // assertEquals(null, exec("def a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); } // test to ensure static interface methods are called correctly diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BasicStatementTests.java index f216dc3dac8..403bd12147c 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BasicStatementTests.java @@ -72,18 +72,22 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(2, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 2; else return 0;")); assertEquals(1, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 1; else return 0;")); - assertEquals(3, exec( - "int x = 5;\n" + - "if (x == 5) {\n" + - " int y = 2;\n" + - " \n" + - " if (y == 2) {\n" + - " x = 3;\n" + - " }\n" + - " \n" + - "}\n" + - "\n" + - "return x;\n")); + assertEquals( + 3, + exec( + "int x = 5;\n" + + "if (x == 5) {\n" + + " int y = 2;\n" + + " \n" + + " if (y == 2) {\n" + + " x = 3;\n" + + " }\n" + + " \n" + + "}\n" + + "\n" + + "return x;\n" + ) + ); } public void testWhileStatement() { @@ -91,27 +95,28 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals("aaaaaa", exec("String c = \"a\"; int x; while (x < 5) { c += \"a\"; ++x; } return c;")); Object value = exec( - " byte[][] b = new byte[5][5]; \n" + - " byte x = 0, y; \n" + - " \n" + - " while (x < 5) { \n" + - " y = 0; \n" + - " \n" + - " while (y < 5) { \n" + - " b[x][y] = (byte)(x*y); \n" + - " ++y; \n" + - " } \n" + - " \n" + - " ++x; \n" + - " } \n" + - " \n" + - " return b; \n"); + " byte[][] b = new byte[5][5]; \n" + + " byte x = 0, y; \n" + + " \n" + + " while (x < 5) { \n" + + " y = 0; \n" + + " \n" + + " while (y < 5) { \n" + + " b[x][y] = (byte)(x*y); \n" + + " ++y; \n" + + " } \n" + + " \n" + + " ++x; \n" + + " } \n" + + " \n" + + " return b; \n" + ); - byte[][] b = (byte[][])value; + byte[][] b = (byte[][]) value; for (byte x = 0; x < 5; ++x) { for (byte y = 0; y < 5; ++y) { - assertEquals(x*y, b[x][y]); + assertEquals(x * y, b[x][y]); } } } @@ -120,27 +125,28 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals("aaaaaa", exec("String c = \"a\"; int x; do { c += \"a\"; ++x; } while (x < 5); return c;")); Object value = exec( - " int[][] b = new int[5][5]; \n" + - " int x = 0, y; \n" + - " \n" + - " do { \n" + - " y = 0; \n" + - " \n" + - " do { \n" + - " b[x][y] = x*y; \n" + - " ++y; \n" + - " } while (y < 5); \n" + - " \n" + - " ++x; \n" + - " } while (x < 5); \n" + - " \n" + - " return b; \n"); + " int[][] b = new int[5][5]; \n" + + " int x = 0, y; \n" + + " \n" + + " do { \n" + + " y = 0; \n" + + " \n" + + " do { \n" + + " b[x][y] = x*y; \n" + + " ++y; \n" + + " } while (y < 5); \n" + + " \n" + + " ++x; \n" + + " } while (x < 5); \n" + + " \n" + + " return b; \n" + ); - int[][] b = (int[][])value; + int[][] b = (int[][]) value; for (byte x = 0; x < 5; ++x) { for (byte y = 0; y < 5; ++y) { - assertEquals(x*y, b[x][y]); + assertEquals(x * y, b[x][y]); } } } @@ -149,96 +155,180 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(6, exec("int x, y; for (x = 0; x < 4; ++x) {y += x;} return y;")); assertEquals("aaaaaa", exec("String c = \"a\"; for (int x = 0; x < 5; ++x) c += \"a\"; return c;")); - assertEquals(6, exec("double test() { return 0.0; }" + - "int x, y; for (test(); x < 4; test()) {y += x; ++x;} return y;")); + assertEquals(6, exec("double test() { return 0.0; }" + "int x, y; for (test(); x < 4; test()) {y += x; ++x;} return y;")); Object value = exec( - " int[][] b = new int[5][5]; \n" + - " for (int x = 0; x < 5; ++x) { \n" + - " for (int y = 0; y < 5; ++y) { \n" + - " b[x][y] = x*y; \n" + - " } \n" + - " } \n" + - " \n" + - " return b; \n"); + " int[][] b = new int[5][5]; \n" + + " for (int x = 0; x < 5; ++x) { \n" + + " for (int y = 0; y < 5; ++y) { \n" + + " b[x][y] = x*y; \n" + + " } \n" + + " } \n" + + " \n" + + " return b; \n" + ); - int[][] b = (int[][])value; + int[][] b = (int[][]) value; for (byte x = 0; x < 5; ++x) { for (byte y = 0; y < 5; ++y) { - assertEquals(x*y, b[x][y]); + assertEquals(x * y, b[x][y]); } } } public void testIterableForEachStatement() { - assertEquals(6, exec("List l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + - " for (int x : l) total += x; return total")); - assertEquals(6, exec("List l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + - " for (x in l) total += x; return total")); - assertEquals("123", exec("List l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + - " for (String x : l) cat += x; return cat")); - assertEquals("123", exec("List l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + - " for (x in l) cat += x; return cat")); - assertEquals("1236", exec("Map m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + - " String cat = ''; int total = 0;" + - " for (Map.Entry e : m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); - assertEquals("1236", exec("Map m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + - " String cat = ''; int total = 0;" + - " for (e in m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); + assertEquals( + 6, + exec("List l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + " for (int x : l) total += x; return total") + ); + assertEquals( + 6, + exec("List l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + " for (x in l) total += x; return total") + ); + assertEquals( + "123", + exec( + "List l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + + " for (String x : l) cat += x; return cat" + ) + ); + assertEquals( + "123", + exec("List l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + " for (x in l) cat += x; return cat") + ); + assertEquals( + "1236", + exec( + "Map m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + + " String cat = ''; int total = 0;" + + " for (Map.Entry e : m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total" + ) + ); + assertEquals( + "1236", + exec( + "Map m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + + " String cat = ''; int total = 0;" + + " for (e in m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total" + ) + ); } public void testIterableForEachStatementDef() { - assertEquals(6, exec("def l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + - " for (int x : l) total += x; return total")); - assertEquals(6, exec("def l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + - " for (x in l) total += x; return total")); - assertEquals("123", exec("def l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + - " for (String x : l) cat += x; return cat")); - assertEquals("123", exec("def l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + - " for (x in l) cat += x; return cat")); - assertEquals("1236", exec("def m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + - " String cat = ''; int total = 0;" + - " for (Map.Entry e : m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); - assertEquals("1236", exec("def m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + - " String cat = ''; int total = 0;" + - " for (e in m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); + assertEquals( + 6, + exec("def l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + " for (int x : l) total += x; return total") + ); + assertEquals( + 6, + exec("def l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + " for (x in l) total += x; return total") + ); + assertEquals( + "123", + exec( + "def l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + " for (String x : l) cat += x; return cat" + ) + ); + assertEquals( + "123", + exec("def l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + " for (x in l) cat += x; return cat") + ); + assertEquals( + "1236", + exec( + "def m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + + " String cat = ''; int total = 0;" + + " for (Map.Entry e : m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total" + ) + ); + assertEquals( + "1236", + exec( + "def m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + + " String cat = ''; int total = 0;" + + " for (e in m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total" + ) + ); } public void testArrayForEachStatement() { - assertEquals(6, exec("int[] a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + - " for (int x : a) total += x; return total")); - assertEquals(6, exec("int[] a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + - " for (x in a) total += x; return total")); - assertEquals("123", exec("String[] a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + - " for (String x : a) total += x; return total")); - assertEquals("123", exec("String[] a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + - " for (x in a) total += x; return total")); - assertEquals(6, exec("int[][] i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + - " for (int[] j : i) total += j[0]; return total")); - assertEquals(6, exec("int[][] i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + - " for (j in i) total += j[0]; return total")); + assertEquals( + 6, + exec("int[] a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + " for (int x : a) total += x; return total") + ); + assertEquals( + 6, + exec("int[] a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + " for (x in a) total += x; return total") + ); + assertEquals( + "123", + exec( + "String[] a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + + " for (String x : a) total += x; return total" + ) + ); + assertEquals( + "123", + exec( + "String[] a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + " for (x in a) total += x; return total" + ) + ); + assertEquals( + 6, + exec( + "int[][] i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + + " for (int[] j : i) total += j[0]; return total" + ) + ); + assertEquals( + 6, + exec( + "int[][] i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + + " for (j in i) total += j[0]; return total" + ) + ); } public void testArrayForEachStatementDef() { - assertEquals(6, exec("def a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + - " for (int x : a) total += x; return total")); - assertEquals(6, exec("def a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + - " for (x in a) total += x; return total")); - assertEquals("123", exec("def a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + - " for (String x : a) total += x; return total")); - assertEquals("123", exec("def a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + - " for (x in a) total += x; return total")); - assertEquals(6, exec("def i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + - " for (int[] j : i) total += j[0]; return total")); - assertEquals(6, exec("def i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + - " for (j in i) total += j[0]; return total")); + assertEquals( + 6, + exec("def a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + " for (int x : a) total += x; return total") + ); + assertEquals( + 6, + exec("def a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + " for (x in a) total += x; return total") + ); + assertEquals( + "123", + exec( + "def a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + + " for (String x : a) total += x; return total" + ) + ); + assertEquals( + "123", + exec("def a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + " for (x in a) total += x; return total") + ); + assertEquals( + 6, + exec( + "def i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + + " for (int[] j : i) total += j[0]; return total" + ) + ); + assertEquals( + 6, + exec( + "def i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + " for (j in i) total += j[0]; return total" + ) + ); } public void testDeclarationStatement() { - assertEquals((byte)2, exec("byte a = 2; return a;")); - assertEquals((short)2, exec("short a = 2; return a;")); - assertEquals((char)2, exec("char a = 2; return a;")); + assertEquals((byte) 2, exec("byte a = 2; return a;")); + assertEquals((short) 2, exec("short a = 2; return a;")); + assertEquals((char) 2, exec("char a = 2; return a;")); assertEquals(2, exec("int a = 2; return a;")); assertEquals(2L, exec("long a = 2; return a;")); assertEquals(2F, exec("float a = 2; return a;")); @@ -283,8 +373,8 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(10, exec("return 10;")); assertEquals(5, exec("int x = 5; return x;")); assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1];")); - assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s;"))[1]); - assertEquals(10, ((Map)exec("Map s = new HashMap(); s.put(\"x\", 10); return s;")).get("x")); + assertEquals(5, ((short[]) exec("short[] s = new short[3]; s[1] = 5; return s;"))[1]); + assertEquals(10, ((Map) exec("Map s = new HashMap(); s.put(\"x\", 10); return s;")).get("x")); } public abstract static class OneArg { @@ -294,33 +384,53 @@ public class BasicStatementTests extends ScriptTestCase { public static final ScriptContext CONTEXT = new ScriptContext<>("onearg", OneArg.Factory.class); - public static final String[] PARAMETERS = new String[] {"arg"}; + public static final String[] PARAMETERS = new String[] { "arg" }; + public abstract void execute(List arg); } + public void testVoidReturnStatement() { List expected = Collections.singletonList(1); - assertEquals(expected, exec("void test(List list) {if (list.isEmpty()) {list.add(1); return;} list.add(2);} " + - "List rtn = new ArrayList(); test(rtn); rtn")); - assertEquals(expected, exec("void test(List list) {if (list.isEmpty()) {list.add(1); return} list.add(2);} " + - "List rtn = new ArrayList(); test(rtn); rtn")); + assertEquals( + expected, + exec( + "void test(List list) {if (list.isEmpty()) {list.add(1); return;} list.add(2);} " + + "List rtn = new ArrayList(); test(rtn); rtn" + ) + ); + assertEquals( + expected, + exec( + "void test(List list) {if (list.isEmpty()) {list.add(1); return} list.add(2);} " + + "List rtn = new ArrayList(); test(rtn); rtn" + ) + ); expected = new ArrayList<>(); expected.add(0); expected.add(2); - assertEquals(expected, exec("void test(List list) {if (list.isEmpty()) {list.add(1); return} list.add(2);} " + - "List rtn = new ArrayList(); rtn.add(0); test(rtn); rtn")); + assertEquals( + expected, + exec( + "void test(List list) {if (list.isEmpty()) {list.add(1); return} list.add(2);} " + + "List rtn = new ArrayList(); rtn.add(0); test(rtn); rtn" + ) + ); ArrayList input = new ArrayList<>(); - getEngine().compile("testOneArg", "if (arg.isEmpty()) {arg.add(1); return;} arg.add(2);", - OneArg.CONTEXT, emptyMap()).newInstance().execute(input); + getEngine().compile("testOneArg", "if (arg.isEmpty()) {arg.add(1); return;} arg.add(2);", OneArg.CONTEXT, emptyMap()) + .newInstance() + .execute(input); assertEquals(Collections.singletonList(1), input); input = new ArrayList<>(); - getEngine().compile("testOneArg", "if (arg.isEmpty()) {arg.add(1); return} arg.add(2);", - OneArg.CONTEXT, emptyMap()).newInstance().execute(input); + getEngine().compile("testOneArg", "if (arg.isEmpty()) {arg.add(1); return} arg.add(2);", OneArg.CONTEXT, emptyMap()) + .newInstance() + .execute(input); assertEquals(Collections.singletonList(1), input); input = new ArrayList<>(); input.add(0); - getEngine().compile("testOneArg", "if (arg.isEmpty()) {arg.add(1); return} arg.add(2);", - OneArg.CONTEXT, emptyMap()).newInstance().execute(input); + getEngine().compile("testOneArg", "if (arg.isEmpty()) {arg.add(1); return} arg.add(2);", OneArg.CONTEXT, emptyMap()) + .newInstance() + .execute(input); assertEquals(expected, input); } @@ -334,217 +444,253 @@ public class BasicStatementTests extends ScriptTestCase { } public void testArrayLoopWithoutCounter() { - assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + - "for (int i = 0; i < array.length; i++) { sum += array[i] } return sum", - Collections.emptyMap(), - Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - true - )); - assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + - "int i = 0; while (i < array.length) { sum += array[i++] } return sum", - Collections.emptyMap(), - Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - true - )); - assertEquals(6L, exec("long sum = 0; long[] array = new long[] { 1, 2, 3 };" + - "int i = 0; do { sum += array[i++] } while (i < array.length); return sum", - Collections.emptyMap(), - Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), - true - )); + assertEquals( + 6L, + exec( + "long sum = 0; long[] array = new long[] { 1, 2, 3 };" + + "for (int i = 0; i < array.length; i++) { sum += array[i] } return sum", + Collections.emptyMap(), + Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), + true + ) + ); + assertEquals( + 6L, + exec( + "long sum = 0; long[] array = new long[] { 1, 2, 3 };" + + "int i = 0; while (i < array.length) { sum += array[i++] } return sum", + Collections.emptyMap(), + Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), + true + ) + ); + assertEquals( + 6L, + exec( + "long sum = 0; long[] array = new long[] { 1, 2, 3 };" + + "int i = 0; do { sum += array[i++] } while (i < array.length); return sum", + Collections.emptyMap(), + Collections.singletonMap(CompilerSettings.MAX_LOOP_COUNTER, "0"), + true + ) + ); } // tests both single break and multiple breaks used in a script public void testForWithBreak() { // single break test - assertEquals(1, exec( - "Map settings = ['test1' : '1'];" + - "int i = 0;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (; i < keys.size(); ++i) {" + - " if (settings.containsKey(keys[i])) {" + - " break;" + - " }" + - "}" + - "return i;" - )); + assertEquals( + 1, + exec( + "Map settings = ['test1' : '1'];" + + "int i = 0;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (; i < keys.size(); ++i) {" + + " if (settings.containsKey(keys[i])) {" + + " break;" + + " }" + + "}" + + "return i;" + ) + ); List expected = new ArrayList<>(); expected.add(1); expected.add(0); // multiple breaks test - assertEquals(expected, exec( - "Map outer = ['test1' : '1'];" + - "Map inner = ['test0' : '2'];" + - "boolean found = false;" + - "int i = 0, j = 0;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (; i < keys.size(); ++i) {" + - " if (outer.containsKey(keys[i])) {" + - " for (; j < keys.size(); ++j) {" + - " if (inner.containsKey(keys[j])) {" + - " found = true;" + - " break;" + - " }" + - " }" + - " if (found) {" + - " break;" + - " }" + - " }" + - "}" + - "[i, j];" - )); + assertEquals( + expected, + exec( + "Map outer = ['test1' : '1'];" + + "Map inner = ['test0' : '2'];" + + "boolean found = false;" + + "int i = 0, j = 0;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (; i < keys.size(); ++i) {" + + " if (outer.containsKey(keys[i])) {" + + " for (; j < keys.size(); ++j) {" + + " if (inner.containsKey(keys[j])) {" + + " found = true;" + + " break;" + + " }" + + " }" + + " if (found) {" + + " break;" + + " }" + + " }" + + "}" + + "[i, j];" + ) + ); expected.set(1, 3); // multiple breaks test, ignore inner break - assertEquals(expected, exec( - "Map outer = ['test1' : '1'];" + - "Map inner = ['test3' : '2'];" + - "int i = 0, j = 0;" + - "boolean found = false;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (; i < keys.size(); ++i) {" + - " if (outer.containsKey(keys[i])) {" + - " for (; j < keys.size(); ++j) {" + - " if (found) {" + - " break;" + - " }" + - " }" + - " found = true;" + - " if (found) {" + - " break;" + - " }" + - " }" + - "}" + - "[i, j];" - )); + assertEquals( + expected, + exec( + "Map outer = ['test1' : '1'];" + + "Map inner = ['test3' : '2'];" + + "int i = 0, j = 0;" + + "boolean found = false;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (; i < keys.size(); ++i) {" + + " if (outer.containsKey(keys[i])) {" + + " for (; j < keys.size(); ++j) {" + + " if (found) {" + + " break;" + + " }" + + " }" + + " found = true;" + + " if (found) {" + + " break;" + + " }" + + " }" + + "}" + + "[i, j];" + ) + ); expected.set(0, 3); expected.set(1, 1); // multiple breaks test, ignore outer break - assertEquals(expected, exec( - "Map outer = ['test3' : '1'];" + - "Map inner = ['test1' : '2'];" + - "int i = 0, j = 0;" + - "boolean found = false;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (; i < keys.size(); ++i) {" + - " if (outer.containsKey('test3')) {" + - " for (; j < keys.size(); ++j) {" + - " if (inner.containsKey(keys[j])) {" + - " break;" + - " }" + - " }" + - " if (found) {" + - " break;" + - " }" + - " }" + - "}" + - "[i, j];" - )); + assertEquals( + expected, + exec( + "Map outer = ['test3' : '1'];" + + "Map inner = ['test1' : '2'];" + + "int i = 0, j = 0;" + + "boolean found = false;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (; i < keys.size(); ++i) {" + + " if (outer.containsKey('test3')) {" + + " for (; j < keys.size(); ++j) {" + + " if (inner.containsKey(keys[j])) {" + + " break;" + + " }" + + " }" + + " if (found) {" + + " break;" + + " }" + + " }" + + "}" + + "[i, j];" + ) + ); } // tests both single break and multiple breaks used in a script public void testForEachWithBreak() { // single break test - assertEquals(1, exec( - "Map settings = ['test1' : '1'];" + - "int i = 0;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (String key : keys) {" + - " if (settings.containsKey(key)) {" + - " break;" + - " }" + - " ++i;" + - "}" + - "return i;" - )); + assertEquals( + 1, + exec( + "Map settings = ['test1' : '1'];" + + "int i = 0;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (String key : keys) {" + + " if (settings.containsKey(key)) {" + + " break;" + + " }" + + " ++i;" + + "}" + + "return i;" + ) + ); List expected = new ArrayList<>(); expected.add(1); expected.add(0); // multiple breaks test - assertEquals(expected, exec( - "Map outer = ['test1' : '1'];" + - "Map inner = ['test0' : '2'];" + - "int i = 0, j = 0;" + - "boolean found = false;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (String okey : keys) {" + - " if (outer.containsKey(okey)) {" + - " for (String ikey : keys) {" + - " if (inner.containsKey(ikey)) {" + - " found = true;" + - " break;" + - " }" + - " ++j;" + - " }" + - " if (found) {" + - " break;" + - " }" + - " }" + - " ++i;" + - "}" + - "[i, j];" - )); + assertEquals( + expected, + exec( + "Map outer = ['test1' : '1'];" + + "Map inner = ['test0' : '2'];" + + "int i = 0, j = 0;" + + "boolean found = false;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (String okey : keys) {" + + " if (outer.containsKey(okey)) {" + + " for (String ikey : keys) {" + + " if (inner.containsKey(ikey)) {" + + " found = true;" + + " break;" + + " }" + + " ++j;" + + " }" + + " if (found) {" + + " break;" + + " }" + + " }" + + " ++i;" + + "}" + + "[i, j];" + ) + ); expected.set(0, 3); expected.set(1, 1); // multiple breaks test, ignore outer break - assertEquals(expected, exec( - "Map outer = ['test1' : '1'];" + - "Map inner = ['test1' : '1'];" + - "int i = 0, j = 0;" + - "boolean found = false;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (String okey : keys) {" + - " if (outer.containsKey(okey)) {" + - " for (String ikey : keys) {" + - " if (inner.containsKey(ikey)) {" + - " break;" + - " }" + - " ++j;" + - " }" + - " if (found) {" + - " break;" + - " }" + - " }" + - " ++i;" + - "}" + - "[i, j];" - )); + assertEquals( + expected, + exec( + "Map outer = ['test1' : '1'];" + + "Map inner = ['test1' : '1'];" + + "int i = 0, j = 0;" + + "boolean found = false;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (String okey : keys) {" + + " if (outer.containsKey(okey)) {" + + " for (String ikey : keys) {" + + " if (inner.containsKey(ikey)) {" + + " break;" + + " }" + + " ++j;" + + " }" + + " if (found) {" + + " break;" + + " }" + + " }" + + " ++i;" + + "}" + + "[i, j];" + ) + ); expected.set(0, 1); expected.set(1, 3); // multiple breaks test, ignore inner break - assertEquals(expected, exec( - "Map outer = ['test1' : '1'];" + - "Map inner = ['test1' : '1'];" + - "int i = 0, j = 0;" + - "boolean found = false;" + - "List keys = ['test0', 'test1', 'test2'];" + - "for (String okey : keys) {" + - " if (outer.containsKey(okey)) {" + - " for (String ikey : keys) {" + - " if (found) {" + - " break;" + - " }" + - " ++j;" + - " }" + - " found = true;" + - " if (found) {" + - " break;" + - " }" + - " }" + - " ++i;" + - "}" + - "[i, j];" - )); + assertEquals( + expected, + exec( + "Map outer = ['test1' : '1'];" + + "Map inner = ['test1' : '1'];" + + "int i = 0, j = 0;" + + "boolean found = false;" + + "List keys = ['test0', 'test1', 'test2'];" + + "for (String okey : keys) {" + + " if (outer.containsKey(okey)) {" + + " for (String ikey : keys) {" + + " if (found) {" + + " break;" + + " }" + + " ++j;" + + " }" + + " found = true;" + + " if (found) {" + + " break;" + + " }" + + " }" + + " ++i;" + + "}" + + "[i, j];" + ) + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java index d770a18b1e9..65f277741cc 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BindingsTests.java @@ -55,15 +55,32 @@ public class BindingsTests extends ScriptTestCase { whitelists.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.opensearch.painless.test")); InstanceBindingTestClass instanceBindingTestClass = new InstanceBindingTestClass(1); - WhitelistInstanceBinding getter = new WhitelistInstanceBinding("test", instanceBindingTestClass, - "setInstanceBindingValue", "void", Collections.singletonList("int"), Collections.emptyList()); - WhitelistInstanceBinding setter = new WhitelistInstanceBinding("test", instanceBindingTestClass, - "getInstanceBindingValue", "int", Collections.emptyList(), Collections.emptyList()); + WhitelistInstanceBinding getter = new WhitelistInstanceBinding( + "test", + instanceBindingTestClass, + "setInstanceBindingValue", + "void", + Collections.singletonList("int"), + Collections.emptyList() + ); + WhitelistInstanceBinding setter = new WhitelistInstanceBinding( + "test", + instanceBindingTestClass, + "getInstanceBindingValue", + "int", + Collections.emptyList(), + Collections.emptyList() + ); List instanceBindingsList = new ArrayList<>(); instanceBindingsList.add(getter); instanceBindingsList.add(setter); - Whitelist instanceBindingsWhitelist = new Whitelist(instanceBindingTestClass.getClass().getClassLoader(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), instanceBindingsList); + Whitelist instanceBindingsWhitelist = new Whitelist( + instanceBindingTestClass.getClass().getClassLoader(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + instanceBindingsList + ); whitelists.add(instanceBindingsWhitelist); contexts.put(BindingsTestScript.CONTEXT, whitelists); @@ -88,7 +105,7 @@ public class BindingsTests extends ScriptTestCase { } public int addWithState(int istateless, double dstateless) { - return istateless + state + (int)dstateless; + return istateless + state + (int) dstateless; } } @@ -102,7 +119,7 @@ public class BindingsTests extends ScriptTestCase { } public int addThisWithState(int istateless, double dstateless) { - return istateless + state + (int)dstateless + bindingsTestScript.getTestValue(); + return istateless + state + (int) dstateless + bindingsTestScript.getTestValue(); } } @@ -136,11 +153,17 @@ public class BindingsTests extends ScriptTestCase { public abstract static class BindingsTestScript { public static final String[] PARAMETERS = { "test", "bound" }; - public int getTestValue() {return 7;} + + public int getTestValue() { + return 7; + } + public abstract int execute(int test, int bound); + public interface Factory { BindingsTestScript newInstance(); } + public static final ScriptContext CONTEXT = new ScriptContext<>("bindings_test", Factory.class); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/BoxedCastTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/BoxedCastTests.java index 08bf7d3399d..cda5a5d23a1 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/BoxedCastTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/BoxedCastTests.java @@ -37,8 +37,7 @@ public class BoxedCastTests extends ScriptTestCase { public void testMethodCallByteToBoxedCasts() { assertEquals(0, exec("byte u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("byte u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("byte u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); @@ -46,8 +45,10 @@ public class BoxedCastTests extends ScriptTestCase { assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Byte u = Byte.valueOf((byte)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Byte u = Byte.valueOf((byte)1); Character b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); @@ -55,8 +56,7 @@ public class BoxedCastTests extends ScriptTestCase { assertEquals(0, exec("byte u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("byte u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("byte u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("byte u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); @@ -64,8 +64,10 @@ public class BoxedCastTests extends ScriptTestCase { assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Byte u = Byte.valueOf((byte)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Byte u = Byte.valueOf((byte)1); def b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Float.valueOf((float)1); b.compareTo(u);")); @@ -73,8 +75,10 @@ public class BoxedCastTests extends ScriptTestCase { assertEquals(0, exec("def u = (byte)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("def u = (byte)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (byte)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("def u = (byte)1; Character b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("def u = (byte)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (byte)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (byte)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); @@ -82,8 +86,7 @@ public class BoxedCastTests extends ScriptTestCase { assertEquals(0, exec("def u = (byte)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("def u = (byte)1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (byte)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (byte)1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("def u = (byte)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (byte)1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (byte)1; def b = Float.valueOf((float)1); b.compareTo(u);")); @@ -91,61 +94,64 @@ public class BoxedCastTests extends ScriptTestCase { } public void testMethodCallShortToBoxedCasts() { - expectScriptThrows(ClassCastException.class, - () -> exec("short u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("short u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("short u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("short u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Short u = Short.valueOf((short)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);") + ); assertEquals(0, exec("Short u = Short.valueOf((short)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Short u = Short.valueOf((short)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); Character b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("Short u = Short.valueOf((short)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Short u = Short.valueOf((short)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Short u = Short.valueOf((short)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Short u = Short.valueOf((short)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("short u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("short u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("short u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("short u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("short u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Short u = Short.valueOf((short)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); def b = Byte.valueOf((byte)1); b.compareTo(u);") + ); assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Short u = Short.valueOf((short)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); def b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (short)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (short)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (short)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("def u = (short)1; Character b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("def u = (short)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (short)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (short)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (short)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (short)1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (short)1; def b = Float.valueOf((float)1); b.compareTo(u);")); @@ -153,60 +159,60 @@ public class BoxedCastTests extends ScriptTestCase { } public void testMethodCallCharacterToBoxedCasts() { - expectScriptThrows(ClassCastException.class, - () -> exec("char u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("char u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("char u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("char u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Character u = Character.valueOf((char)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Character u = Character.valueOf((char)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); Short b = Short.valueOf((short)1); b.compareTo(u);") + ); assertEquals(0, exec("Character u = Character.valueOf((char)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("char u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("char u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("char u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("char u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("char u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Character u = Character.valueOf((char)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Character u = Character.valueOf((char)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); def b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); def b = Short.valueOf((short)1); b.compareTo(u);") + ); assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (char)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (char)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (char)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (char)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (char)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (char)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (char)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (char)1; def b = Short.valueOf((short)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (char)1; def b = Long.valueOf((long)1); b.compareTo(u);")); @@ -215,67 +221,70 @@ public class BoxedCastTests extends ScriptTestCase { } public void testMethodCallIntegerToBoxedCasts() { - expectScriptThrows(ClassCastException.class, - () -> exec("int u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("int u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("int u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("int u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("int u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("int u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Integer u = Integer.valueOf((int)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Integer u = Integer.valueOf((int)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Integer u = Integer.valueOf((int)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Short b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Character b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("int u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("int u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("int u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("int u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("int u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("int u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("int u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Integer u = Integer.valueOf((int)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Integer u = Integer.valueOf((int)1); def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Integer u = Integer.valueOf((int)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (int)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (int)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (int)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (int)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (int)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("def u = (int)1; Character b = Character.valueOf((char)1); b.compareTo(u);") + ); assertEquals(0, exec("def u = (int)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (int)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (int)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (int)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (int)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (int)1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (int)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (int)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (int)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (int)1; def b = Character.valueOf((char)1); b.compareTo(u);")); assertEquals(0, exec("def u = (int)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (int)1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (int)1; def b = Float.valueOf((float)1); b.compareTo(u);")); @@ -283,247 +292,256 @@ public class BoxedCastTests extends ScriptTestCase { } public void testMethodCallLongToBoxedCasts() { - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("long u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("long u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("long u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Short b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Character b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);") + ); assertEquals(0, exec("Long u = Long.valueOf((long)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Long u = Long.valueOf((long)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Long u = Long.valueOf((long)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("long u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("long u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("long u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("long u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("long u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Long u = Long.valueOf((long)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Integer.valueOf((int)1); b.compareTo(u);") + ); assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (long)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (long)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("def u = (long)1; Character b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (long)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (long)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (long)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (long)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (long)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (long)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (long)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (long)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (long)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); assertEquals(0, exec("def u = (long)1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (long)1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (long)1; def b = Double.valueOf((double)1); b.compareTo(u);")); } public void testMethodCallFloatToBoxedCasts() { - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("float u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("float u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Short b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Character b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Long b = Long.valueOf((long)1); b.compareTo(u);") + ); assertEquals(0, exec("Float u = Float.valueOf((float)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Float u = Float.valueOf((float)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("float u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("float u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("float u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("float u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Float u = Float.valueOf((float)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Integer.valueOf((int)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Long.valueOf((long)1); b.compareTo(u);") + ); assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("def u = (float)1; Character b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (float)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (float)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (float)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (float)1; def b = Long.valueOf((long)1); b.compareTo(u);")); assertEquals(0, exec("def u = (float)1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (float)1; def b = Double.valueOf((double)1); b.compareTo(u);")); } public void testMethodCallDoubleToBoxedCasts() { - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("double u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Short b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Character b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Long b = Long.valueOf((long)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Float b = Float.valueOf((float)1); b.compareTo(u);") + ); assertEquals(0, exec("Double u = Double.valueOf((double)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("double u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("double u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("double u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); def b = Long.valueOf((long)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("Double u = Double.valueOf((double)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Byte.valueOf((byte)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Short.valueOf((short)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Integer.valueOf((int)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Long.valueOf((long)1); b.compareTo(u);") + ); + expectScriptThrows( + ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Float.valueOf((float)1); b.compareTo(u);") + ); assertEquals(0, exec("Double u = Double.valueOf((double)1); def b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows( + ClassCastException.class, + () -> exec("def u = (double)1; Character b = Character.valueOf((char)1); b.compareTo(u);") + ); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (double)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; def b = Short.valueOf((short)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; def b = Character.valueOf((char)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; def b = Long.valueOf((long)1); b.compareTo(u);")); - expectScriptThrows(ClassCastException.class, - () -> exec("def u = (double)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, () -> exec("def u = (double)1; def b = Float.valueOf((float)1); b.compareTo(u);")); assertEquals(0, exec("def u = (double)1; def b = Double.valueOf((double)1); b.compareTo(u);")); } public void testReturnToByteBoxedCasts() { - assertEquals((byte)1, exec("Byte rtn() {return (byte)1} rtn()")); + assertEquals((byte) 1, exec("Byte rtn() {return (byte)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (short)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (char)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (int)1} rtn()")); @@ -531,7 +549,7 @@ public class BoxedCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (float)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (double)1} rtn()")); - assertEquals((byte)1, exec("Byte rtn() {return Byte.valueOf((byte)1)} rtn()")); + assertEquals((byte) 1, exec("Byte rtn() {return Byte.valueOf((byte)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Short.valueOf((short)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Character.valueOf((char)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Integer.valueOf((int)1)} rtn()")); @@ -539,7 +557,7 @@ public class BoxedCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Float.valueOf((float)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Double.valueOf((double)1)} rtn()")); - assertEquals((byte)1, exec("Byte rtn() {def d = (byte)1; return d} rtn()")); + assertEquals((byte) 1, exec("Byte rtn() {def d = (byte)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (short)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (char)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (int)1; return d} rtn()")); @@ -547,7 +565,7 @@ public class BoxedCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (float)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (double)1; return d} rtn()")); - assertEquals((byte)1, exec("Byte rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); + assertEquals((byte) 1, exec("Byte rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Short.valueOf((short)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Character.valueOf((char)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); @@ -557,32 +575,32 @@ public class BoxedCastTests extends ScriptTestCase { } public void testReturnToShortBoxedCasts() { - assertEquals((short)1, exec("Short rtn() {return (byte)1} rtn()")); - assertEquals((short)1, exec("Short rtn() {return (short)1} rtn()")); + assertEquals((short) 1, exec("Short rtn() {return (byte)1} rtn()")); + assertEquals((short) 1, exec("Short rtn() {return (short)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (char)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (int)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (long)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (float)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (double)1} rtn()")); - assertEquals((short)1, exec("Short rtn() {return Byte.valueOf((byte)1)} rtn()")); - assertEquals((short)1, exec("Short rtn() {return Short.valueOf((short)1)} rtn()")); + assertEquals((short) 1, exec("Short rtn() {return Byte.valueOf((byte)1)} rtn()")); + assertEquals((short) 1, exec("Short rtn() {return Short.valueOf((short)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Character.valueOf((char)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Integer.valueOf((int)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Long.valueOf((long)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Float.valueOf((float)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Double.valueOf((double)1)} rtn()")); - assertEquals((short)1, exec("Short rtn() {def d = (byte)1; return d} rtn()")); - assertEquals((short)1, exec("Short rtn() {def d = (short)1; return d} rtn()")); + assertEquals((short) 1, exec("Short rtn() {def d = (byte)1; return d} rtn()")); + assertEquals((short) 1, exec("Short rtn() {def d = (short)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (char)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (int)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (long)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (float)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (double)1; return d} rtn()")); - assertEquals((short)1, exec("Short rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); - assertEquals((short)1, exec("Short rtn() {def d = Short.valueOf((short)1); return d} rtn()")); + assertEquals((short) 1, exec("Short rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); + assertEquals((short) 1, exec("Short rtn() {def d = Short.valueOf((short)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Character.valueOf((char)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Long.valueOf((long)1); return d} rtn()")); @@ -593,7 +611,7 @@ public class BoxedCastTests extends ScriptTestCase { public void testReturnToCharacterBoxedCasts() { expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (byte)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (short)1} rtn()")); - assertEquals((char)1, exec("Character rtn() {return (char)1} rtn()")); + assertEquals((char) 1, exec("Character rtn() {return (char)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (int)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (long)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (float)1} rtn()")); @@ -601,7 +619,7 @@ public class BoxedCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Byte.valueOf((byte)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Short.valueOf((short)1)} rtn()")); - assertEquals((char)1, exec("Character rtn() {return Character.valueOf((char)1)} rtn()")); + assertEquals((char) 1, exec("Character rtn() {return Character.valueOf((char)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Integer.valueOf((int)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Long.valueOf((long)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Float.valueOf((float)1)} rtn()")); @@ -609,7 +627,7 @@ public class BoxedCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (byte)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (short)1; return d} rtn()")); - assertEquals((char)1, exec("Character rtn() {def d = (char)1; return d} rtn()")); + assertEquals((char) 1, exec("Character rtn() {def d = (char)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (int)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (long)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (float)1; return d} rtn()")); @@ -617,7 +635,7 @@ public class BoxedCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Short.valueOf((short)1); return d} rtn()")); - assertEquals((char)1, exec("Character rtn() {def d = Character.valueOf((char)1); return d} rtn()")); + assertEquals((char) 1, exec("Character rtn() {def d = Character.valueOf((char)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Long.valueOf((long)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Float.valueOf((float)1); return d} rtn()")); @@ -659,104 +677,104 @@ public class BoxedCastTests extends ScriptTestCase { } public void testReturnToLongBoxedCasts() { - assertEquals((long)1, exec("Long rtn() {return (byte)1} rtn()")); - assertEquals((long)1, exec("Long rtn() {return (short)1} rtn()")); - assertEquals((long)1, exec("Long rtn() {return (char)1} rtn()")); - assertEquals((long)1, exec("Long rtn() {return (int)1} rtn()")); - assertEquals((long)1, exec("Long rtn() {return (long)1} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return (byte)1} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return (short)1} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return (char)1} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return (int)1} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return (long)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return (float)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return (double)1} rtn()")); - assertEquals((long)1, exec("Long rtn() {return Byte.valueOf((byte)1)} rtn()")); - assertEquals((long)1, exec("Long rtn() {return Short.valueOf((short)1)} rtn()")); - assertEquals((long)1, exec("Long rtn() {return Character.valueOf((char)1)} rtn()")); - assertEquals((long)1, exec("Long rtn() {return Integer.valueOf((int)1)} rtn()")); - assertEquals((long)1, exec("Long rtn() {return Long.valueOf((long)1)} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return Byte.valueOf((byte)1)} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return Short.valueOf((short)1)} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return Character.valueOf((char)1)} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return Integer.valueOf((int)1)} rtn()")); + assertEquals((long) 1, exec("Long rtn() {return Long.valueOf((long)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return Float.valueOf((float)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return Double.valueOf((double)1)} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = (byte)1; return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = (short)1; return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = (char)1; return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = (int)1; return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = (long)1; return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = (byte)1; return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = (short)1; return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = (char)1; return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = (int)1; return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = (long)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = (float)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = (double)1; return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = Short.valueOf((short)1); return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = Character.valueOf((char)1); return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); - assertEquals((long)1, exec("Long rtn() {def d = Long.valueOf((long)1); return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = Short.valueOf((short)1); return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = Character.valueOf((char)1); return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); + assertEquals((long) 1, exec("Long rtn() {def d = Long.valueOf((long)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = Float.valueOf((float)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = Double.valueOf((double)1); return d} rtn()")); } public void testReturnToFloatBoxedCasts() { - assertEquals((float)1, exec("Float rtn() {return (byte)1} rtn()")); - assertEquals((float)1, exec("Float rtn() {return (short)1} rtn()")); - assertEquals((float)1, exec("Float rtn() {return (char)1} rtn()")); - assertEquals((float)1, exec("Float rtn() {return (int)1} rtn()")); - assertEquals((float)1, exec("Float rtn() {return (long)1} rtn()")); - assertEquals((float)1, exec("Float rtn() {return (float)1} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return (byte)1} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return (short)1} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return (char)1} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return (int)1} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return (long)1} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return (float)1} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {return (double)1} rtn()")); - assertEquals((float)1, exec("Float rtn() {return Byte.valueOf((byte)1)} rtn()")); - assertEquals((float)1, exec("Float rtn() {return Short.valueOf((short)1)} rtn()")); - assertEquals((float)1, exec("Float rtn() {return Character.valueOf((char)1)} rtn()")); - assertEquals((float)1, exec("Float rtn() {return Integer.valueOf((int)1)} rtn()")); - assertEquals((float)1, exec("Float rtn() {return Long.valueOf((long)1)} rtn()")); - assertEquals((float)1, exec("Float rtn() {return Float.valueOf((float)1)} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return Byte.valueOf((byte)1)} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return Short.valueOf((short)1)} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return Character.valueOf((char)1)} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return Integer.valueOf((int)1)} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return Long.valueOf((long)1)} rtn()")); + assertEquals((float) 1, exec("Float rtn() {return Float.valueOf((float)1)} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {return Double.valueOf((double)1)} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = (byte)1; return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = (short)1; return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = (char)1; return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = (int)1; return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = (long)1; return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = (float)1; return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = (byte)1; return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = (short)1; return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = (char)1; return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = (int)1; return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = (long)1; return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = (float)1; return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {def d = (double)1; return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = Short.valueOf((short)1); return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = Character.valueOf((char)1); return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = Long.valueOf((long)1); return d} rtn()")); - assertEquals((float)1, exec("Float rtn() {def d = Float.valueOf((float)1); return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = Short.valueOf((short)1); return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = Character.valueOf((char)1); return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = Long.valueOf((long)1); return d} rtn()")); + assertEquals((float) 1, exec("Float rtn() {def d = Float.valueOf((float)1); return d} rtn()")); expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {def d = Double.valueOf((double)1); return d} rtn()")); } public void testReturnToDoubleBoxedCasts() { - assertEquals((double)1, exec("Double rtn() {return (byte)1} rtn()")); - assertEquals((double)1, exec("Double rtn() {return (short)1} rtn()")); - assertEquals((double)1, exec("Double rtn() {return (char)1} rtn()")); - assertEquals((double)1, exec("Double rtn() {return (int)1} rtn()")); - assertEquals((double)1, exec("Double rtn() {return (long)1} rtn()")); - assertEquals((double)1, exec("Double rtn() {return (float)1} rtn()")); - assertEquals((double)1, exec("Double rtn() {return (double)1} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return (byte)1} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return (short)1} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return (char)1} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return (int)1} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return (long)1} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return (float)1} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return (double)1} rtn()")); - assertEquals((double)1, exec("Double rtn() {return Byte.valueOf((byte)1)} rtn()")); - assertEquals((double)1, exec("Double rtn() {return Short.valueOf((short)1)} rtn()")); - assertEquals((double)1, exec("Double rtn() {return Character.valueOf((char)1)} rtn()")); - assertEquals((double)1, exec("Double rtn() {return Integer.valueOf((int)1)} rtn()")); - assertEquals((double)1, exec("Double rtn() {return Long.valueOf((long)1)} rtn()")); - assertEquals((double)1, exec("Double rtn() {return Float.valueOf((float)1)} rtn()")); - assertEquals((double)1, exec("Double rtn() {return Double.valueOf((double)1)} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return Byte.valueOf((byte)1)} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return Short.valueOf((short)1)} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return Character.valueOf((char)1)} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return Integer.valueOf((int)1)} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return Long.valueOf((long)1)} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return Float.valueOf((float)1)} rtn()")); + assertEquals((double) 1, exec("Double rtn() {return Double.valueOf((double)1)} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = (byte)1; return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = (short)1; return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = (char)1; return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = (int)1; return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = (long)1; return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = (float)1; return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = (double)1; return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = (byte)1; return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = (short)1; return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = (char)1; return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = (int)1; return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = (long)1; return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = (float)1; return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = (double)1; return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = Short.valueOf((short)1); return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = Character.valueOf((char)1); return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = Long.valueOf((long)1); return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = Float.valueOf((float)1); return d} rtn()")); - assertEquals((double)1, exec("Double rtn() {def d = Double.valueOf((double)1); return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = Byte.valueOf((byte)1); return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = Short.valueOf((short)1); return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = Character.valueOf((char)1); return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = Integer.valueOf((int)1); return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = Long.valueOf((long)1); return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = Float.valueOf((float)1); return d} rtn()")); + assertEquals((double) 1, exec("Double rtn() {def d = Double.valueOf((double)1); return d} rtn()")); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ComparisonTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ComparisonTests.java index 9c6bec2b423..87705a5c3b4 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ComparisonTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ComparisonTests.java @@ -160,10 +160,10 @@ public class ComparisonTests extends ScriptTestCase { assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x != y")); assertEquals(false, exec("def x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x != y")); - assertEquals(false, exec("def x = true; def y = true; return x != y")); - assertEquals(true, exec("def x = true; def y = false; return x != y")); - assertEquals(true, exec("def x = false; def y = true; return x != y")); - assertEquals(false, exec("def x = false; def y = false; return x != y")); + assertEquals(false, exec("def x = true; def y = true; return x != y")); + assertEquals(true, exec("def x = true; def y = false; return x != y")); + assertEquals(true, exec("def x = false; def y = true; return x != y")); + assertEquals(false, exec("def x = false; def y = false; return x != y")); } public void testDefNeTypedLHS() { @@ -188,10 +188,10 @@ public class ComparisonTests extends ScriptTestCase { assertEquals(false, exec("Map x = new HashMap(); x.put(3, 3); def y = new HashMap(); y.put(3, 3); return x != y")); assertEquals(false, exec("Map x = new HashMap(); def y = x; x.put(3, 3); y.put(3, 3); return x != y")); - assertEquals(false, exec("boolean x = true; def y = true; return x != y")); - assertEquals(true, exec("boolean x = true; def y = false; return x != y")); - assertEquals(true, exec("boolean x = false; def y = true; return x != y")); - assertEquals(false, exec("boolean x = false; def y = false; return x != y")); + assertEquals(false, exec("boolean x = true; def y = true; return x != y")); + assertEquals(true, exec("boolean x = true; def y = false; return x != y")); + assertEquals(true, exec("boolean x = false; def y = true; return x != y")); + assertEquals(false, exec("boolean x = false; def y = false; return x != y")); } public void testDefNeTypedRHS() { @@ -216,10 +216,10 @@ public class ComparisonTests extends ScriptTestCase { assertEquals(false, exec("def x = new HashMap(); x.put(3, 3); Map y = new HashMap(); y.put(3, 3); return x != y")); assertEquals(false, exec("def x = new HashMap(); Map y = x; x.put(3, 3); y.put(3, 3); return x != y")); - assertEquals(false, exec("def x = true; boolean y = true; return x != y")); - assertEquals(true, exec("def x = true; boolean y = false; return x != y")); - assertEquals(true, exec("def x = false; boolean y = true; return x != y")); - assertEquals(false, exec("def x = false; boolean y = false; return x != y")); + assertEquals(false, exec("def x = true; boolean y = true; return x != y")); + assertEquals(true, exec("def x = true; boolean y = false; return x != y")); + assertEquals(true, exec("def x = false; boolean y = true; return x != y")); + assertEquals(false, exec("def x = false; boolean y = false; return x != y")); } public void testDefNer() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ConditionalTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ConditionalTests.java index fcaee44e5ce..1296f4dd256 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ConditionalTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ConditionalTests.java @@ -69,50 +69,54 @@ public class ConditionalTests extends ScriptTestCase { public void testAssignment() { assertEquals(4D, exec("boolean x = false; double z = x ? 2 : 4.0F; return z;")); - assertEquals((byte)7, exec("boolean x = false; int y = 2; byte z = x ? (byte)y : 7; return z;")); - assertEquals((byte)7, exec("boolean x = false; int y = 2; byte z = (byte)(x ? y : 7); return z;")); + assertEquals((byte) 7, exec("boolean x = false; int y = 2; byte z = x ? (byte)y : 7; return z;")); + assertEquals((byte) 7, exec("boolean x = false; int y = 2; byte z = (byte)(x ? y : 7); return z;")); assertEquals(ArrayList.class, exec("boolean x = false; Object z = x ? new HashMap() : new ArrayList(); return z;").getClass()); } public void testNullArguments() { assertEquals(null, exec("boolean b = false, c = true; Object x; Map y; return b && c ? x : y;")); - assertEquals(HashMap.class, - exec("boolean b = false, c = true; Object x; Map y = new HashMap(); return b && c ? x : y;").getClass()); + assertEquals( + HashMap.class, + exec("boolean b = false, c = true; Object x; Map y = new HashMap(); return b && c ? x : y;").getClass() + ); } public void testPromotion() { assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? 2 : 4.0F);")); - assertEquals(false, exec("boolean x = false; boolean y = true; " + - "return (x ? new HashMap() : new ArrayList()) == (y ? new HashMap() : new ArrayList());")); + assertEquals( + false, + exec( + "boolean x = false; boolean y = true; " + + "return (x ? new HashMap() : new ArrayList()) == (y ? new HashMap() : new ArrayList());" + ) + ); } public void testIncompatibleAssignment() { - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = false; byte z = x ? 2 : 4.0F; return z;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("boolean x = false; byte z = x ? 2 : 4.0F; return z;"); }); - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = false; Map z = x ? 4 : (byte)7; return z;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("boolean x = false; Map z = x ? 4 : (byte)7; return z;"); }); - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = false; Map z = x ? new HashMap() : new ArrayList(); return z;"); - }); + expectScriptThrows( + ClassCastException.class, + () -> { exec("boolean x = false; Map z = x ? new HashMap() : new ArrayList(); return z;"); } + ); - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = false; int y = 2; byte z = x ? y : 7; return z;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("boolean x = false; int y = 2; byte z = x ? y : 7; return z;"); }); } public void testNested() { for (int i = 0; i < 100; i++) { String scriptPart = IntStream.range(0, i).mapToObj(j -> "field == '" + j + "' ? '" + j + "' :").collect(joining("\n")); - assertEquals("z", exec("def field = params.a;\n" + - "\n" + - "return (\n" + - scriptPart + - "field == '' ? 'unknown' :\n" + - "field);", Collections.singletonMap("a", "z"), true)); + assertEquals( + "z", + exec( + "def field = params.a;\n" + "\n" + "return (\n" + scriptPart + "field == '' ? 'unknown' :\n" + "field);", + Collections.singletonMap("a", "z"), + true + ) + ); } } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java index b23e67d97b2..740a49e6a9a 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java @@ -6,8 +6,6 @@ * compatible open source license. */ - - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -271,32 +269,34 @@ public class ContextExampleTests extends ScriptTestCase { */ public void testIngestProcessorScript() { - assertEquals(1535785200000L, - exec("def x = ['date': '2018-9-1', 'time': '3:00 PM'];" + - "String[] dateSplit = x.date.splitOnToken('-');" + - "String year = dateSplit[0].trim();" + - "String month = dateSplit[1].trim();" + - "if (month.length() == 1) {" + - " month = '0' + month;" + - "}" + - "String day = dateSplit[2].trim();" + - "if (day.length() == 1) {" + - " day = '0' + day;" + - "}" + - "boolean pm = x.time.substring(x.time.length() - 2).equals('PM');" + - "String[] timeSplit = x.time.substring(0, x.time.length() - 2).splitOnToken(':');" + - "int hours = Integer.parseInt(timeSplit[0].trim());" + - "int minutes = Integer.parseInt(timeSplit[1].trim());" + - "if (pm) {" + - " hours += 12;" + - "}" + - "String dts = year + '-' + month + '-' + day + 'T' +" + - " (hours < 10 ? '0' + hours : '' + hours) + ':' +" + - " (minutes < 10 ? '0' + minutes : '' + minutes) +" + - " ':00+08:00';" + - "ZonedDateTime dt = ZonedDateTime.parse(" + - " dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME);" + - "return dt.getLong(ChronoField.INSTANT_SECONDS) * 1000L" + assertEquals( + 1535785200000L, + exec( + "def x = ['date': '2018-9-1', 'time': '3:00 PM'];" + + "String[] dateSplit = x.date.splitOnToken('-');" + + "String year = dateSplit[0].trim();" + + "String month = dateSplit[1].trim();" + + "if (month.length() == 1) {" + + " month = '0' + month;" + + "}" + + "String day = dateSplit[2].trim();" + + "if (day.length() == 1) {" + + " day = '0' + day;" + + "}" + + "boolean pm = x.time.substring(x.time.length() - 2).equals('PM');" + + "String[] timeSplit = x.time.substring(0, x.time.length() - 2).splitOnToken(':');" + + "int hours = Integer.parseInt(timeSplit[0].trim());" + + "int minutes = Integer.parseInt(timeSplit[1].trim());" + + "if (pm) {" + + " hours += 12;" + + "}" + + "String dts = year + '-' + month + '-' + day + 'T' +" + + " (hours < 10 ? '0' + hours : '' + hours) + ':' +" + + " (minutes < 10 ? '0' + minutes : '' + minutes) +" + + " ':00+08:00';" + + "ZonedDateTime dt = ZonedDateTime.parse(" + + " dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME);" + + "return dt.getLong(ChronoField.INSTANT_SECONDS) * 1000L" ) ); } @@ -309,7 +309,6 @@ public class ContextExampleTests extends ScriptTestCase { */ - // Use script_fields API to add two extra fields to the hits /* @@ -333,7 +332,6 @@ public class ContextExampleTests extends ScriptTestCase { } */ - // Testing only params, as I am not sure how to test Script Doc Values in painless public void testScriptFieldsScript() { Map hit = new HashMap<>(); @@ -342,16 +340,20 @@ public class ContextExampleTests extends ScriptTestCase { hit.put("fields", fields); Map source = new HashMap<>(); - String[] actors = {"James Holland", "Krissy Smith", "Joe Muir", "Ryan Earns"}; + String[] actors = { "James Holland", "Krissy Smith", "Joe Muir", "Ryan Earns" }; source.put("actors", actors); - assertEquals(hit, exec( - "Map fields = new HashMap();" + - "fields[\"number-of-actors\"] = params['_source']['actors'].length;" + - "Map rtn = new HashMap();" + - "rtn[\"fields\"] = fields;" + - "return rtn;", - singletonMap("_source", source), true) + assertEquals( + hit, + exec( + "Map fields = new HashMap();" + + "fields[\"number-of-actors\"] = params['_source']['actors'].length;" + + "Map rtn = new HashMap();" + + "rtn[\"fields\"] = fields;" + + "return rtn;", + singletonMap("_source", source), + true + ) ); } @@ -385,13 +387,10 @@ public class ContextExampleTests extends ScriptTestCase { params.put("_source", source); params.put("cost", 18); - boolean result = (boolean) exec( - " params['_source']['sold'] == false && params['_source']['cost'] < params.cost;", - params, true); + boolean result = (boolean) exec(" params['_source']['sold'] == false && params['_source']['cost'] < params.cost;", params, true); assertTrue(result); } - // Use script_fields API to add two extra fields to the hits /* curl -X GET localhost:9200/seats/_search @@ -420,4 +419,3 @@ public class ContextExampleTests extends ScriptTestCase { assertEquals(2, result, 0); } } - diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DateTimeTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DateTimeTests.java index e8b085e7720..9a04240bcb0 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DateTimeTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DateTimeTests.java @@ -38,170 +38,215 @@ import java.time.ZonedDateTime; public class DateTimeTests extends ScriptTestCase { public void testLongToZonedDateTime() { - assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "long milliSinceEpoch = 434931330000L;" + - "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + - "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), + exec( + "long milliSinceEpoch = 434931330000L;" + + "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + + "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));" + ) + ); } public void testStringToZonedDateTime() { - assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "String milliSinceEpochString = '434931330000';" + - "long milliSinceEpoch = Long.parseLong(milliSinceEpochString);" + - "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + - "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), + exec( + "String milliSinceEpochString = '434931330000';" + + "long milliSinceEpoch = Long.parseLong(milliSinceEpochString);" + + "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + + "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));" + ) + ); - assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "String datetime = '1983-10-13T22:15:30Z';" + - "return ZonedDateTime.parse(datetime);" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), + exec("String datetime = '1983-10-13T22:15:30Z';" + "return ZonedDateTime.parse(datetime);") + ); - assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "String datetime = 'Thu, 13 Oct 1983 22:15:30 GMT';" + - "return ZonedDateTime.parse(datetime, DateTimeFormatter.RFC_1123_DATE_TIME);" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), + exec( + "String datetime = 'Thu, 13 Oct 1983 22:15:30 GMT';" + + "return ZonedDateTime.parse(datetime, DateTimeFormatter.RFC_1123_DATE_TIME);" + ) + ); - assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "String datetime = 'custom y 1983 m 10 d 13 22:15:30 Z';" + - "DateTimeFormatter dtf = DateTimeFormatter.ofPattern(" + - "\"'custom' 'y' yyyy 'm' MM 'd' dd HH:mm:ss VV\");" + - "return ZonedDateTime.parse(datetime, dtf);" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), + exec( + "String datetime = 'custom y 1983 m 10 d 13 22:15:30 Z';" + + "DateTimeFormatter dtf = DateTimeFormatter.ofPattern(" + + "\"'custom' 'y' yyyy 'm' MM 'd' dd HH:mm:ss VV\");" + + "return ZonedDateTime.parse(datetime, dtf);" + ) + ); } public void testPiecesToZonedDateTime() { - assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "int year = 1983;" + - "int month = 10;" + - "int day = 13;" + - "int hour = 22;" + - "int minutes = 15;" + - "int seconds = 30;" + - "int nanos = 0;" + - "String tz = 'Z';" + - "return ZonedDateTime.of(year, month, day, hour, minutes, seconds, nanos, ZoneId.of(tz));" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), + exec( + "int year = 1983;" + + "int month = 10;" + + "int day = 13;" + + "int hour = 22;" + + "int minutes = 15;" + + "int seconds = 30;" + + "int nanos = 0;" + + "String tz = 'Z';" + + "return ZonedDateTime.of(year, month, day, hour, minutes, seconds, nanos, ZoneId.of(tz));" + ) + ); } public void testZonedDatetimeToLong() { - assertEquals(434931330000L, exec( - "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "return zdt.toInstant().toEpochMilli();" - )); + assertEquals( + 434931330000L, + exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return zdt.toInstant().toEpochMilli();" + ) + ); } public void testZonedDateTimeToString() { - assertEquals("1983-10-13T22:15:30Z", exec( - "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "return zdt.format(DateTimeFormatter.ISO_INSTANT);" - )); + assertEquals( + "1983-10-13T22:15:30Z", + exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return zdt.format(DateTimeFormatter.ISO_INSTANT);" + ) + ); - assertEquals("date: 1983/10/13 time: 22:15:30", exec( - "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "DateTimeFormatter dtf = DateTimeFormatter.ofPattern(" + - "\"'date:' yyyy/MM/dd 'time:' HH:mm:ss\");" + - "return zdt.format(dtf);" - )); + assertEquals( + "date: 1983/10/13 time: 22:15:30", + exec( + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "DateTimeFormatter dtf = DateTimeFormatter.ofPattern(" + + "\"'date:' yyyy/MM/dd 'time:' HH:mm:ss\");" + + "return zdt.format(dtf);" + ) + ); } public void testZonedDateTimeToPieces() { - assertArrayEquals(new int[] {1983, 10, 13, 22, 15, 30, 100}, (int[])exec( - "int[] pieces = new int[7];" + - "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 100, ZoneId.of('Z'));" + - "pieces[0] = zdt.year;" + - "pieces[1] = zdt.monthValue;" + - "pieces[2] = zdt.dayOfMonth;" + - "pieces[3] = zdt.hour;" + - "pieces[4] = zdt.minute;" + - "pieces[5] = zdt.second;" + - "pieces[6] = zdt.nano;" + - "return pieces;" - )); + assertArrayEquals( + new int[] { 1983, 10, 13, 22, 15, 30, 100 }, + (int[]) exec( + "int[] pieces = new int[7];" + + "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 100, ZoneId.of('Z'));" + + "pieces[0] = zdt.year;" + + "pieces[1] = zdt.monthValue;" + + "pieces[2] = zdt.dayOfMonth;" + + "pieces[3] = zdt.hour;" + + "pieces[4] = zdt.minute;" + + "pieces[5] = zdt.second;" + + "pieces[6] = zdt.nano;" + + "return pieces;" + ) + ); } public void testLongManipulation() { - assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 27, 0, ZoneId.of("Z")), exec( - "long milliSinceEpoch = 434931330000L;" + - "milliSinceEpoch = milliSinceEpoch - 1000L*3L;" + - "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + - "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'))" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 22, 15, 27, 0, ZoneId.of("Z")), + exec( + "long milliSinceEpoch = 434931330000L;" + + "milliSinceEpoch = milliSinceEpoch - 1000L*3L;" + + "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);" + + "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'))" + ) + ); } public void testZonedDateTimeManipulation() { - assertEquals(ZonedDateTime.of(1983, 10, 16, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "return zdt.plusDays(3);" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 16, 22, 15, 30, 0, ZoneId.of("Z")), + exec("ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + "return zdt.plusDays(3);") + ); - assertEquals(ZonedDateTime.of(1983, 10, 13, 20, 10, 30, 0, ZoneId.of("Z")), exec( - "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "return zdt.minusMinutes(125);" - )); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 20, 10, 30, 0, ZoneId.of("Z")), + exec("ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + "return zdt.minusMinutes(125);") + ); - assertEquals(ZonedDateTime.of(1976, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec( - "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "return zdt.withYear(1976);" - )); + assertEquals( + ZonedDateTime.of(1976, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), + exec("ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + "return zdt.withYear(1976);") + ); } public void testLongTimeDifference() { - assertEquals(3000L, exec( - "long startTimestamp = 434931327000L;" + - "long endTimestamp = 434931330000L;" + - "return endTimestamp - startTimestamp;" - )); + assertEquals( + 3000L, + exec("long startTimestamp = 434931327000L;" + "long endTimestamp = 434931330000L;" + "return endTimestamp - startTimestamp;") + ); } public void testZonedDateTimeDifference() { - assertEquals(4989L, exec( - "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));" + - "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 13, 22, 15, 35, 0, ZoneId.of('Z'));" + - "return ChronoUnit.MILLIS.between(zdt1, zdt2);" - )); + assertEquals( + 4989L, + exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 13, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return ChronoUnit.MILLIS.between(zdt1, zdt2);" + ) + ); - assertEquals(4L, exec( - "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));" + - "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + - "return ChronoUnit.DAYS.between(zdt1, zdt2);" - )); + assertEquals( + 4L, + exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return ChronoUnit.DAYS.between(zdt1, zdt2);" + ) + ); } public void compareLongs() { - assertEquals(false, exec( - "long ts1 = 434931327000L;" + - "long ts2 = 434931330000L;" + - "return ts1 > ts2;" - )); + assertEquals(false, exec("long ts1 = 434931327000L;" + "long ts2 = 434931330000L;" + "return ts1 > ts2;")); } public void compareZonedDateTimes() { - assertEquals(true, exec( - "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + - "return zdt1.isBefore(zdt2);" - )); + assertEquals( + true, + exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return zdt1.isBefore(zdt2);" + ) + ); - assertEquals(false, exec( - "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + - "return zdt1.isAfter(zdt2);" - )); + assertEquals( + false, + exec( + "ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));" + + "return zdt1.isAfter(zdt2);" + ) + ); } public void testTimeZone() { - assertEquals(ZonedDateTime.of(1983, 10, 13, 15, 15, 30, 0, ZoneId.of("America/Los_Angeles")), exec( - "ZonedDateTime utc = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + - "return utc.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));")); + assertEquals( + ZonedDateTime.of(1983, 10, 13, 15, 15, 30, 0, ZoneId.of("America/Los_Angeles")), + exec( + "ZonedDateTime utc = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));" + + "return utc.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));" + ) + ); - assertEquals("Thu, 13 Oct 1983 15:15:30 -0700", exec( - "String gmtString = 'Thu, 13 Oct 1983 22:15:30 GMT';" + - "ZonedDateTime gmtZdt = ZonedDateTime.parse(gmtString," + - "DateTimeFormatter.RFC_1123_DATE_TIME);" + - "ZonedDateTime pstZdt =" + - "gmtZdt.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));" + - "return pstZdt.format(DateTimeFormatter.RFC_1123_DATE_TIME);")); + assertEquals( + "Thu, 13 Oct 1983 15:15:30 -0700", + exec( + "String gmtString = 'Thu, 13 Oct 1983 22:15:30 GMT';" + + "ZonedDateTime gmtZdt = ZonedDateTime.parse(gmtString," + + "DateTimeFormatter.RFC_1123_DATE_TIME);" + + "ZonedDateTime pstZdt =" + + "gmtZdt.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));" + + "return pstZdt.format(DateTimeFormatter.RFC_1123_DATE_TIME);" + ) + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DebugTests.java index ebd10f73d2e..285de018f1d 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DebugTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DebugTests.java @@ -55,8 +55,10 @@ public class DebugTests extends ScriptTestCase { public void testExplain() { // Debug.explain can explain an object Object dummy = new Object(); - PainlessExplainError e = expectScriptThrows(PainlessExplainError.class, () -> exec( - "Debug.explain(params.a)", singletonMap("a", dummy), true)); + PainlessExplainError e = expectScriptThrows( + PainlessExplainError.class, + () -> exec("Debug.explain(params.a)", singletonMap("a", dummy), true) + ); assertSame(dummy, e.getObjectToExplain()); assertThat(e.getHeaders(painlessLookup), hasEntry("opensearch.to_string", singletonList(dummy.toString()))); assertThat(e.getHeaders(painlessLookup), hasEntry("opensearch.java_class", singletonList("java.lang.Object"))); @@ -70,12 +72,14 @@ public class DebugTests extends ScriptTestCase { assertThat(e.getHeaders(painlessLookup), not(hasKey("opensearch.painless_class"))); // You can't catch the explain exception - e = expectScriptThrows(PainlessExplainError.class, () -> exec( - "try {\n" - + " Debug.explain(params.a)\n" - + "} catch (Exception e) {\n" - + " return 1\n" - + "}", singletonMap("a", dummy), true)); + e = expectScriptThrows( + PainlessExplainError.class, + () -> exec( + "try {\n" + " Debug.explain(params.a)\n" + "} catch (Exception e) {\n" + " return 1\n" + "}", + singletonMap("a", dummy), + true + ) + ); assertSame(dummy, e.getObjectToExplain()); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/opensearch/painless/Debugger.java index d69eb828953..31233bd7518 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/Debugger.java @@ -57,8 +57,7 @@ final class Debugger { PrintWriter outputWriter = new PrintWriter(output); Textifier textifier = new Textifier(); try { - new Compiler(iface, null, null, LOOKUP) - .compile("", source, settings, textifier); + new Compiler(iface, null, null, LOOKUP).compile("", source, settings, textifier); } catch (RuntimeException e) { textifier.print(outputWriter); e.addSuppressed(new Exception("current bytecode: \n" + output)); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DefBootstrapTests.java index 5d8a72f6fa0..9134d7a3c06 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DefBootstrapTests.java @@ -51,174 +51,197 @@ public class DefBootstrapTests extends OpenSearchTestCase { /** calls toString() on integers, twice */ public void testOneType() throws Throwable { - CallSite site = DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "toString", - MethodType.methodType(String.class, Object.class), - 0, - DefBootstrap.METHOD_CALL, - ""); + CallSite site = DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "toString", + MethodType.methodType(String.class, Object.class), + 0, + DefBootstrap.METHOD_CALL, + "" + ); MethodHandle handle = site.dynamicInvoker(); assertDepthEquals(site, 0); // invoke with integer, needs lookup - assertEquals("5", (String)handle.invokeExact((Object)5)); + assertEquals("5", (String) handle.invokeExact((Object) 5)); assertDepthEquals(site, 1); // invoked with integer again: should be cached - assertEquals("6", (String)handle.invokeExact((Object)6)); + assertEquals("6", (String) handle.invokeExact((Object) 6)); assertDepthEquals(site, 1); } public void testTwoTypes() throws Throwable { - CallSite site = DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "toString", - MethodType.methodType(String.class, Object.class), - 0, - DefBootstrap.METHOD_CALL, - ""); + CallSite site = DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "toString", + MethodType.methodType(String.class, Object.class), + 0, + DefBootstrap.METHOD_CALL, + "" + ); MethodHandle handle = site.dynamicInvoker(); assertDepthEquals(site, 0); - assertEquals("5", (String)handle.invokeExact((Object)5)); + assertEquals("5", (String) handle.invokeExact((Object) 5)); assertDepthEquals(site, 1); - assertEquals("1.5", (String)handle.invokeExact((Object)1.5f)); + assertEquals("1.5", (String) handle.invokeExact((Object) 1.5f)); assertDepthEquals(site, 2); // both these should be cached - assertEquals("6", (String)handle.invokeExact((Object)6)); + assertEquals("6", (String) handle.invokeExact((Object) 6)); assertDepthEquals(site, 2); - assertEquals("2.5", (String)handle.invokeExact((Object)2.5f)); + assertEquals("2.5", (String) handle.invokeExact((Object) 2.5f)); assertDepthEquals(site, 2); } public void testTooManyTypes() throws Throwable { // if this changes, test must be rewritten assertEquals(5, DefBootstrap.PIC.MAX_DEPTH); - CallSite site = DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "toString", - MethodType.methodType(String.class, Object.class), - 0, - DefBootstrap.METHOD_CALL, - ""); + CallSite site = DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "toString", + MethodType.methodType(String.class, Object.class), + 0, + DefBootstrap.METHOD_CALL, + "" + ); MethodHandle handle = site.dynamicInvoker(); assertDepthEquals(site, 0); - assertEquals("5", (String)handle.invokeExact((Object)5)); + assertEquals("5", (String) handle.invokeExact((Object) 5)); assertDepthEquals(site, 1); - assertEquals("1.5", (String)handle.invokeExact((Object)1.5f)); + assertEquals("1.5", (String) handle.invokeExact((Object) 1.5f)); assertDepthEquals(site, 2); - assertEquals("6", (String)handle.invokeExact((Object)6L)); + assertEquals("6", (String) handle.invokeExact((Object) 6L)); assertDepthEquals(site, 3); - assertEquals("3.2", (String)handle.invokeExact((Object)3.2d)); + assertEquals("3.2", (String) handle.invokeExact((Object) 3.2d)); assertDepthEquals(site, 4); - assertEquals("foo", (String)handle.invokeExact((Object)"foo")); + assertEquals("foo", (String) handle.invokeExact((Object) "foo")); assertDepthEquals(site, 5); - assertEquals("c", (String)handle.invokeExact((Object)'c')); + assertEquals("c", (String) handle.invokeExact((Object) 'c')); assertDepthEquals(site, 5); } /** test that we revert to the megamorphic classvalue cache and that it works as expected */ public void testMegamorphic() throws Throwable { - DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "size", - MethodType.methodType(int.class, Object.class), - 0, - DefBootstrap.METHOD_CALL, - ""); + DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "size", + MethodType.methodType(int.class, Object.class), + 0, + DefBootstrap.METHOD_CALL, + "" + ); site.depth = DefBootstrap.PIC.MAX_DEPTH; // mark megamorphic MethodHandle handle = site.dynamicInvoker(); - assertEquals(2, (int)handle.invokeExact((Object) Arrays.asList("1", "2"))); - assertEquals(1, (int)handle.invokeExact((Object) Collections.singletonMap("a", "b"))); - assertEquals(3, (int)handle.invokeExact((Object) Arrays.asList("x", "y", "z"))); - assertEquals(2, (int)handle.invokeExact((Object) Arrays.asList("u", "v"))); + assertEquals(2, (int) handle.invokeExact((Object) Arrays.asList("1", "2"))); + assertEquals(1, (int) handle.invokeExact((Object) Collections.singletonMap("a", "b"))); + assertEquals(3, (int) handle.invokeExact((Object) Arrays.asList("x", "y", "z"))); + assertEquals(2, (int) handle.invokeExact((Object) Arrays.asList("u", "v"))); - final HashMap map = new HashMap<>(); + final HashMap map = new HashMap<>(); map.put("x", "y"); map.put("a", "b"); - assertEquals(2, (int)handle.invokeExact((Object) map)); + assertEquals(2, (int) handle.invokeExact((Object) map)); - final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> { - Integer.toString((int)handle.invokeExact(new Object())); - }); + final IllegalArgumentException iae = expectThrows( + IllegalArgumentException.class, + () -> { Integer.toString((int) handle.invokeExact(new Object())); } + ); assertEquals("dynamic method [java.lang.Object, size/0] not found", iae.getMessage()); - assertTrue("Does not fail inside ClassValue.computeValue()", Arrays.stream(iae.getStackTrace()).anyMatch(e -> { - return e.getMethodName().equals("computeValue") && - e.getClassName().startsWith("org.opensearch.painless.DefBootstrap$PIC$"); - })); + assertTrue( + "Does not fail inside ClassValue.computeValue()", + Arrays.stream(iae.getStackTrace()) + .anyMatch( + e -> { + return e.getMethodName().equals("computeValue") + && e.getClassName().startsWith("org.opensearch.painless.DefBootstrap$PIC$"); + } + ) + ); } // test operators with null guards public void testNullGuardAdd() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, - DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL + ); MethodHandle handle = site.dynamicInvoker(); - assertEquals("nulltest", (Object)handle.invokeExact((Object)null, (Object)"test")); + assertEquals("nulltest", (Object) handle.invokeExact((Object) null, (Object) "test")); } public void testNullGuardAddWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, - DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL + ); MethodHandle handle = site.dynamicInvoker(); - assertEquals(2, (Object)handle.invokeExact((Object)1, (Object)1)); - assertEquals("nulltest", (Object)handle.invokeExact((Object)null, (Object)"test")); + assertEquals(2, (Object) handle.invokeExact((Object) 1, (Object) 1)); + assertEquals("nulltest", (Object) handle.invokeExact((Object) null, (Object) "test")); } public void testNullGuardEq() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "eq", - MethodType.methodType(boolean.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, - DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "eq", + MethodType.methodType(boolean.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL + ); MethodHandle handle = site.dynamicInvoker(); - assertFalse((boolean) handle.invokeExact((Object)null, (Object)"test")); - assertTrue((boolean) handle.invokeExact((Object)null, (Object)null)); + assertFalse((boolean) handle.invokeExact((Object) null, (Object) "test")); + assertTrue((boolean) handle.invokeExact((Object) null, (Object) null)); } public void testNullGuardEqWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "eq", - MethodType.methodType(boolean.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, - DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "eq", + MethodType.methodType(boolean.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL + ); MethodHandle handle = site.dynamicInvoker(); - assertTrue((boolean) handle.invokeExact((Object)1, (Object)1)); - assertFalse((boolean) handle.invokeExact((Object)null, (Object)"test")); - assertTrue((boolean) handle.invokeExact((Object)null, (Object)null)); + assertTrue((boolean) handle.invokeExact((Object) 1, (Object) 1)); + assertFalse((boolean) handle.invokeExact((Object) null, (Object) "test")); + assertTrue((boolean) handle.invokeExact((Object) null, (Object) null)); } // make sure these operators work without null guards too @@ -226,36 +249,36 @@ public class DefBootstrapTests extends OpenSearchTestCase { // and can be disabled in some circumstances. public void testNoNullGuardAdd() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, int.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, - 0); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, int.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + 0 + ); MethodHandle handle = site.dynamicInvoker(); - expectThrows(NullPointerException.class, () -> { - assertNotNull((Object)handle.invokeExact(5, (Object)null)); - }); + expectThrows(NullPointerException.class, () -> { assertNotNull((Object) handle.invokeExact(5, (Object) null)); }); } public void testNoNullGuardAddWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, - new FunctionTable(), - Collections.emptyMap(), - MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, int.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, - 0); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap( + painlessLookup, + new FunctionTable(), + Collections.emptyMap(), + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, int.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + 0 + ); MethodHandle handle = site.dynamicInvoker(); - assertEquals(2, (Object)handle.invokeExact(1, (Object)1)); - expectThrows(NullPointerException.class, () -> { - assertNotNull((Object)handle.invokeExact(5, (Object)null)); - }); + assertEquals(2, (Object) handle.invokeExact(1, (Object) 1)); + expectThrows(NullPointerException.class, () -> { assertNotNull((Object) handle.invokeExact(5, (Object) null)); }); } static void assertDepthEquals(CallSite site, int expected) { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DefCastTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DefCastTests.java index 6f7074ed577..df8ffd2be44 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DefCastTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DefCastTests.java @@ -58,7 +58,7 @@ public class DefCastTests extends ScriptTestCase { public void testdefTobyteImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; byte b = d;")); - assertEquals((byte)0, exec("def d = (byte)0; byte b = d; b")); + assertEquals((byte) 0, exec("def d = (byte)0; byte b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (short)0; byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (char)0; byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (int)0; byte b = d;")); @@ -66,7 +66,7 @@ public class DefCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (double)0; byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); byte b = d;")); - assertEquals((byte)0, exec("def d = Byte.valueOf(0); byte b = d; b")); + assertEquals((byte) 0, exec("def d = Byte.valueOf(0); byte b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Short.valueOf(0); byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Character.valueOf(0); byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Integer.valueOf(0); byte b = d;")); @@ -79,16 +79,16 @@ public class DefCastTests extends ScriptTestCase { public void testdefToshortImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; short b = d;")); - assertEquals((short)0, exec("def d = (byte)0; short b = d; b")); - assertEquals((short)0, exec("def d = (short)0; short b = d; b")); + assertEquals((short) 0, exec("def d = (byte)0; short b = d; b")); + assertEquals((short) 0, exec("def d = (short)0; short b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (char)0; short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (int)0; short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (long)0; short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (double)0; short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); short b = d;")); - assertEquals((short)0, exec("def d = Byte.valueOf(0); short b = d; b")); - assertEquals((short)0, exec("def d = Short.valueOf(0); short b = d; b")); + assertEquals((short) 0, exec("def d = Byte.valueOf(0); short b = d; b")); + assertEquals((short) 0, exec("def d = Short.valueOf(0); short b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Character.valueOf(0); short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Integer.valueOf(0); short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Long.valueOf(0); short b = d;")); @@ -103,7 +103,7 @@ public class DefCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("def d = true; char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (byte)0; char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (short)0; char b = d;")); - assertEquals((char)0, exec("def d = (char)0; char b = d; b")); + assertEquals((char) 0, exec("def d = (char)0; char b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (int)0; char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (long)0; char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; char b = d;")); @@ -111,7 +111,7 @@ public class DefCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Byte.valueOf(0); char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Short.valueOf(0); char b = d;")); - assertEquals((char)0, exec("def d = Character.valueOf(0); char b = d; b")); + assertEquals((char) 0, exec("def d = Character.valueOf(0); char b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Integer.valueOf(0); char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Long.valueOf(0); char b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Float.valueOf(0); char b = d;")); @@ -143,19 +143,19 @@ public class DefCastTests extends ScriptTestCase { public void testdefTolongImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; long b = d;")); - assertEquals((long)0, exec("def d = (byte)0; long b = d; b")); - assertEquals((long)0, exec("def d = (short)0; long b = d; b")); - assertEquals((long)0, exec("def d = (char)0; long b = d; b")); - assertEquals((long)0, exec("def d = 0; long b = d; b")); - assertEquals((long)0, exec("def d = (long)0; long b = d; b")); + assertEquals((long) 0, exec("def d = (byte)0; long b = d; b")); + assertEquals((long) 0, exec("def d = (short)0; long b = d; b")); + assertEquals((long) 0, exec("def d = (char)0; long b = d; b")); + assertEquals((long) 0, exec("def d = 0; long b = d; b")); + assertEquals((long) 0, exec("def d = (long)0; long b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (double)0; long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); long b = d;")); - assertEquals((long)0, exec("def d = Byte.valueOf(0); long b = d; b")); - assertEquals((long)0, exec("def d = Short.valueOf(0); long b = d; b")); - assertEquals((long)0, exec("def d = Character.valueOf(0); long b = d; b")); - assertEquals((long)0, exec("def d = Integer.valueOf(0); long b = d; b")); - assertEquals((long)0, exec("def d = Long.valueOf(0); long b = d; b")); + assertEquals((long) 0, exec("def d = Byte.valueOf(0); long b = d; b")); + assertEquals((long) 0, exec("def d = Short.valueOf(0); long b = d; b")); + assertEquals((long) 0, exec("def d = Character.valueOf(0); long b = d; b")); + assertEquals((long) 0, exec("def d = Integer.valueOf(0); long b = d; b")); + assertEquals((long) 0, exec("def d = Long.valueOf(0); long b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Float.valueOf(0); long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Double.valueOf(0); long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); long b = d;")); @@ -164,21 +164,21 @@ public class DefCastTests extends ScriptTestCase { public void testdefTodoubleImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; double b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; double b = d;")); - assertEquals((double)0, exec("def d = (byte)0; double b = d; b")); - assertEquals((double)0, exec("def d = (short)0; double b = d; b")); - assertEquals((double)0, exec("def d = (char)0; double b = d; b")); - assertEquals((double)0, exec("def d = 0; double b = d; b")); - assertEquals((double)0, exec("def d = (long)0; double b = d; b")); - assertEquals((double)0, exec("def d = (float)0; double b = d; b")); - assertEquals((double)0, exec("def d = (double)0; double b = d; b")); + assertEquals((double) 0, exec("def d = (byte)0; double b = d; b")); + assertEquals((double) 0, exec("def d = (short)0; double b = d; b")); + assertEquals((double) 0, exec("def d = (char)0; double b = d; b")); + assertEquals((double) 0, exec("def d = 0; double b = d; b")); + assertEquals((double) 0, exec("def d = (long)0; double b = d; b")); + assertEquals((double) 0, exec("def d = (float)0; double b = d; b")); + assertEquals((double) 0, exec("def d = (double)0; double b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); double b = d;")); - assertEquals((double)0, exec("def d = Byte.valueOf(0); double b = d; b")); - assertEquals((double)0, exec("def d = Short.valueOf(0); double b = d; b")); - assertEquals((double)0, exec("def d = Character.valueOf(0); double b = d; b")); - assertEquals((double)0, exec("def d = Integer.valueOf(0); double b = d; b")); - assertEquals((double)0, exec("def d = Long.valueOf(0); double b = d; b")); - assertEquals((double)0, exec("def d = Float.valueOf(0); double b = d; b")); - assertEquals((double)0, exec("def d = Double.valueOf(0); double b = d; b")); + assertEquals((double) 0, exec("def d = Byte.valueOf(0); double b = d; b")); + assertEquals((double) 0, exec("def d = Short.valueOf(0); double b = d; b")); + assertEquals((double) 0, exec("def d = Character.valueOf(0); double b = d; b")); + assertEquals((double) 0, exec("def d = Integer.valueOf(0); double b = d; b")); + assertEquals((double) 0, exec("def d = Long.valueOf(0); double b = d; b")); + assertEquals((double) 0, exec("def d = Float.valueOf(0); double b = d; b")); + assertEquals((double) 0, exec("def d = Double.valueOf(0); double b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); double b = d;")); } @@ -206,42 +206,42 @@ public class DefCastTests extends ScriptTestCase { public void testdefTobyteExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; byte b = (byte)d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; byte b = (byte)d;")); - assertEquals((byte)0, exec("def d = (byte)0; byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = (short)0; byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = (char)0; byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = 0; byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = (long)0; byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = (float)0; byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = (double)0; byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = (byte)0; byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = (short)0; byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = (char)0; byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = 0; byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = (long)0; byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = (float)0; byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = (double)0; byte b = (byte)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); byte b = d;")); - assertEquals((byte)0, exec("def d = Byte.valueOf(0); byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = Short.valueOf(0); byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = Character.valueOf(0); byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = Integer.valueOf(0); byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = Long.valueOf(0); byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = Float.valueOf(0); byte b = (byte)d; b")); - assertEquals((byte)0, exec("def d = Double.valueOf(0); byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = Byte.valueOf(0); byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = Short.valueOf(0); byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = Character.valueOf(0); byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = Integer.valueOf(0); byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = Long.valueOf(0); byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = Float.valueOf(0); byte b = (byte)d; b")); + assertEquals((byte) 0, exec("def d = Double.valueOf(0); byte b = (byte)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); byte b = (byte)d;")); } public void testdefToshortExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; short b = (short)d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; short b = (short)d;")); - assertEquals((short)0, exec("def d = (byte)0; short b = (short)d; b")); - assertEquals((short)0, exec("def d = (short)0; short b = (short)d; b")); - assertEquals((short)0, exec("def d = (char)0; short b = (short)d; b")); - assertEquals((short)0, exec("def d = 0; short b = (short)d; b")); - assertEquals((short)0, exec("def d = (long)0; short b = (short)d; b")); - assertEquals((short)0, exec("def d = (float)0; short b = (short)d; b")); - assertEquals((short)0, exec("def d = (double)0; short b = (short)d; b")); + assertEquals((short) 0, exec("def d = (byte)0; short b = (short)d; b")); + assertEquals((short) 0, exec("def d = (short)0; short b = (short)d; b")); + assertEquals((short) 0, exec("def d = (char)0; short b = (short)d; b")); + assertEquals((short) 0, exec("def d = 0; short b = (short)d; b")); + assertEquals((short) 0, exec("def d = (long)0; short b = (short)d; b")); + assertEquals((short) 0, exec("def d = (float)0; short b = (short)d; b")); + assertEquals((short) 0, exec("def d = (double)0; short b = (short)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); short b = d;")); - assertEquals((short)0, exec("def d = Byte.valueOf(0); short b = (short)d; b")); - assertEquals((short)0, exec("def d = Short.valueOf(0); short b = (short)d; b")); - assertEquals((short)0, exec("def d = Character.valueOf(0); short b = (short)d; b")); - assertEquals((short)0, exec("def d = Integer.valueOf(0); short b = (short)d; b")); - assertEquals((short)0, exec("def d = Long.valueOf(0); short b = (short)d; b")); - assertEquals((short)0, exec("def d = Float.valueOf(0); short b = (short)d; b")); - assertEquals((short)0, exec("def d = Double.valueOf(0); short b = (short)d; b")); + assertEquals((short) 0, exec("def d = Byte.valueOf(0); short b = (short)d; b")); + assertEquals((short) 0, exec("def d = Short.valueOf(0); short b = (short)d; b")); + assertEquals((short) 0, exec("def d = Character.valueOf(0); short b = (short)d; b")); + assertEquals((short) 0, exec("def d = Integer.valueOf(0); short b = (short)d; b")); + assertEquals((short) 0, exec("def d = Long.valueOf(0); short b = (short)d; b")); + assertEquals((short) 0, exec("def d = Float.valueOf(0); short b = (short)d; b")); + assertEquals((short) 0, exec("def d = Double.valueOf(0); short b = (short)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); short b = (short)d;")); } @@ -249,21 +249,21 @@ public class DefCastTests extends ScriptTestCase { assertEquals('s', exec("def d = 's'; char b = (char)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; char b = (char)d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; char b = (char)d;")); - assertEquals((char)0, exec("def d = (byte)0; char b = (char)d; b")); - assertEquals((char)0, exec("def d = (short)0; char b = (char)d; b")); - assertEquals((char)0, exec("def d = (char)0; char b = (char)d; b")); - assertEquals((char)0, exec("def d = 0; char b = (char)d; b")); - assertEquals((char)0, exec("def d = (long)0; char b = (char)d; b")); - assertEquals((char)0, exec("def d = (float)0; char b = (char)d; b")); - assertEquals((char)0, exec("def d = (double)0; char b = (char)d; b")); + assertEquals((char) 0, exec("def d = (byte)0; char b = (char)d; b")); + assertEquals((char) 0, exec("def d = (short)0; char b = (char)d; b")); + assertEquals((char) 0, exec("def d = (char)0; char b = (char)d; b")); + assertEquals((char) 0, exec("def d = 0; char b = (char)d; b")); + assertEquals((char) 0, exec("def d = (long)0; char b = (char)d; b")); + assertEquals((char) 0, exec("def d = (float)0; char b = (char)d; b")); + assertEquals((char) 0, exec("def d = (double)0; char b = (char)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); char b = d;")); - assertEquals((char)0, exec("def d = Byte.valueOf(0); char b = (char)d; b")); - assertEquals((char)0, exec("def d = Short.valueOf(0); char b = (char)d; b")); - assertEquals((char)0, exec("def d = Character.valueOf(0); char b = (char)d; b")); - assertEquals((char)0, exec("def d = Integer.valueOf(0); char b = (char)d; b")); - assertEquals((char)0, exec("def d = Long.valueOf(0); char b = (char)d; b")); - assertEquals((char)0, exec("def d = Float.valueOf(0); char b = (char)d; b")); - assertEquals((char)0, exec("def d = Double.valueOf(0); char b = (char)d; b")); + assertEquals((char) 0, exec("def d = Byte.valueOf(0); char b = (char)d; b")); + assertEquals((char) 0, exec("def d = Short.valueOf(0); char b = (char)d; b")); + assertEquals((char) 0, exec("def d = Character.valueOf(0); char b = (char)d; b")); + assertEquals((char) 0, exec("def d = Integer.valueOf(0); char b = (char)d; b")); + assertEquals((char) 0, exec("def d = Long.valueOf(0); char b = (char)d; b")); + assertEquals((char) 0, exec("def d = Float.valueOf(0); char b = (char)d; b")); + assertEquals((char) 0, exec("def d = Double.valueOf(0); char b = (char)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); char b = (char)d;")); } @@ -291,63 +291,63 @@ public class DefCastTests extends ScriptTestCase { public void testdefTolongExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; long b = (long)d;")); - assertEquals((long)0, exec("def d = (byte)0; long b = (long)d; b")); - assertEquals((long)0, exec("def d = (short)0; long b = (long)d; b")); - assertEquals((long)0, exec("def d = (char)0; long b = (long)d; b")); - assertEquals((long)0, exec("def d = 0; long b = (long)d; b")); - assertEquals((long)0, exec("def d = (long)0; long b = (long)d; b")); - assertEquals((long)0, exec("def d = (float)0; long b = (long)d; b")); - assertEquals((long)0, exec("def d = (double)0; long b = (long)d; b")); + assertEquals((long) 0, exec("def d = (byte)0; long b = (long)d; b")); + assertEquals((long) 0, exec("def d = (short)0; long b = (long)d; b")); + assertEquals((long) 0, exec("def d = (char)0; long b = (long)d; b")); + assertEquals((long) 0, exec("def d = 0; long b = (long)d; b")); + assertEquals((long) 0, exec("def d = (long)0; long b = (long)d; b")); + assertEquals((long) 0, exec("def d = (float)0; long b = (long)d; b")); + assertEquals((long) 0, exec("def d = (double)0; long b = (long)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); long b = d;")); - assertEquals((long)0, exec("def d = Byte.valueOf(0); long b = (long)d; b")); - assertEquals((long)0, exec("def d = Short.valueOf(0); long b = (long)d; b")); - assertEquals((long)0, exec("def d = Character.valueOf(0); long b = (long)d; b")); - assertEquals((long)0, exec("def d = Integer.valueOf(0); long b = (long)d; b")); - assertEquals((long)0, exec("def d = Long.valueOf(0); long b = (long)d; b")); - assertEquals((long)0, exec("def d = Float.valueOf(0); long b = (long)d; b")); - assertEquals((long)0, exec("def d = Double.valueOf(0); long b = (long)d; b")); + assertEquals((long) 0, exec("def d = Byte.valueOf(0); long b = (long)d; b")); + assertEquals((long) 0, exec("def d = Short.valueOf(0); long b = (long)d; b")); + assertEquals((long) 0, exec("def d = Character.valueOf(0); long b = (long)d; b")); + assertEquals((long) 0, exec("def d = Integer.valueOf(0); long b = (long)d; b")); + assertEquals((long) 0, exec("def d = Long.valueOf(0); long b = (long)d; b")); + assertEquals((long) 0, exec("def d = Float.valueOf(0); long b = (long)d; b")); + assertEquals((long) 0, exec("def d = Double.valueOf(0); long b = (long)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); long b = (long)d;")); } public void testdefTofloatExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; float b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; float b = (float)d;")); - assertEquals((float)0, exec("def d = (byte)0; float b = (float)d; b")); - assertEquals((float)0, exec("def d = (short)0; float b = (float)d; b")); - assertEquals((float)0, exec("def d = (char)0; float b = (float)d; b")); - assertEquals((float)0, exec("def d = 0; float b = (float)d; b")); - assertEquals((float)0, exec("def d = (long)0; float b = (float)d; b")); - assertEquals((float)0, exec("def d = (float)0; float b = (float)d; b")); - assertEquals((float)0, exec("def d = (double)0; float b = (float)d; b")); + assertEquals((float) 0, exec("def d = (byte)0; float b = (float)d; b")); + assertEquals((float) 0, exec("def d = (short)0; float b = (float)d; b")); + assertEquals((float) 0, exec("def d = (char)0; float b = (float)d; b")); + assertEquals((float) 0, exec("def d = 0; float b = (float)d; b")); + assertEquals((float) 0, exec("def d = (long)0; float b = (float)d; b")); + assertEquals((float) 0, exec("def d = (float)0; float b = (float)d; b")); + assertEquals((float) 0, exec("def d = (double)0; float b = (float)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); float b = d;")); - assertEquals((float)0, exec("def d = Byte.valueOf(0); float b = (float)d; b")); - assertEquals((float)0, exec("def d = Short.valueOf(0); float b = (float)d; b")); - assertEquals((float)0, exec("def d = Character.valueOf(0); float b = (float)d; b")); - assertEquals((float)0, exec("def d = Integer.valueOf(0); float b = (float)d; b")); - assertEquals((float)0, exec("def d = Long.valueOf(0); float b = (float)d; b")); - assertEquals((float)0, exec("def d = Float.valueOf(0); float b = (float)d; b")); - assertEquals((float)0, exec("def d = Double.valueOf(0); float b = (float)d; b")); + assertEquals((float) 0, exec("def d = Byte.valueOf(0); float b = (float)d; b")); + assertEquals((float) 0, exec("def d = Short.valueOf(0); float b = (float)d; b")); + assertEquals((float) 0, exec("def d = Character.valueOf(0); float b = (float)d; b")); + assertEquals((float) 0, exec("def d = Integer.valueOf(0); float b = (float)d; b")); + assertEquals((float) 0, exec("def d = Long.valueOf(0); float b = (float)d; b")); + assertEquals((float) 0, exec("def d = Float.valueOf(0); float b = (float)d; b")); + assertEquals((float) 0, exec("def d = Double.valueOf(0); float b = (float)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); float b = (float)d;")); } public void testdefTodoubleExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; double b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; double b = (double)d;")); - assertEquals((double)0, exec("def d = (byte)0; double b = (double)d; b")); - assertEquals((double)0, exec("def d = (short)0; double b = (double)d; b")); - assertEquals((double)0, exec("def d = (char)0; double b = (double)d; b")); - assertEquals((double)0, exec("def d = 0; double b = (double)d; b")); - assertEquals((double)0, exec("def d = (long)0; double b = (double)d; b")); - assertEquals((double)0, exec("def d = (float)0; double b = (double)d; b")); - assertEquals((double)0, exec("def d = (double)0; double b = (double)d; b")); + assertEquals((double) 0, exec("def d = (byte)0; double b = (double)d; b")); + assertEquals((double) 0, exec("def d = (short)0; double b = (double)d; b")); + assertEquals((double) 0, exec("def d = (char)0; double b = (double)d; b")); + assertEquals((double) 0, exec("def d = 0; double b = (double)d; b")); + assertEquals((double) 0, exec("def d = (long)0; double b = (double)d; b")); + assertEquals((double) 0, exec("def d = (float)0; double b = (double)d; b")); + assertEquals((double) 0, exec("def d = (double)0; double b = (double)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); double b = d;")); - assertEquals((double)0, exec("def d = Byte.valueOf(0); double b = (double)d; b")); - assertEquals((double)0, exec("def d = Short.valueOf(0); double b = (double)d; b")); - assertEquals((double)0, exec("def d = Character.valueOf(0); double b = (double)d; b")); - assertEquals((double)0, exec("def d = Integer.valueOf(0); double b = (double)d; b")); - assertEquals((double)0, exec("def d = Long.valueOf(0); double b = (double)d; b")); - assertEquals((double)0, exec("def d = Float.valueOf(0); double b = (double)d; b")); - assertEquals((double)0, exec("def d = Double.valueOf(0); double b = (double)d; b")); + assertEquals((double) 0, exec("def d = Byte.valueOf(0); double b = (double)d; b")); + assertEquals((double) 0, exec("def d = Short.valueOf(0); double b = (double)d; b")); + assertEquals((double) 0, exec("def d = Character.valueOf(0); double b = (double)d; b")); + assertEquals((double) 0, exec("def d = Integer.valueOf(0); double b = (double)d; b")); + assertEquals((double) 0, exec("def d = Long.valueOf(0); double b = (double)d; b")); + assertEquals((double) 0, exec("def d = Float.valueOf(0); double b = (double)d; b")); + assertEquals((double) 0, exec("def d = Double.valueOf(0); double b = (double)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); double b = (double)d;")); } @@ -375,7 +375,7 @@ public class DefCastTests extends ScriptTestCase { public void testdefToByteImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Byte b = d;")); - assertEquals((byte)0, exec("def d = (byte)0; Byte b = d; b")); + assertEquals((byte) 0, exec("def d = (byte)0; Byte b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (short)0; Byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (char)0; Byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (int)0; Byte b = d;")); @@ -383,7 +383,7 @@ public class DefCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; Byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (double)0; Byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Byte b = d;")); - assertEquals((byte)0, exec("def d = Byte.valueOf(0); Byte b = d; b")); + assertEquals((byte) 0, exec("def d = Byte.valueOf(0); Byte b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Short.valueOf(0); Byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Character.valueOf(0); Byte b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Integer.valueOf(0); Byte b = d;")); @@ -396,16 +396,16 @@ public class DefCastTests extends ScriptTestCase { public void testdefToShortImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Short b = d;")); - assertEquals((short)0, exec("def d = (byte)0; Short b = d; b")); - assertEquals((short)0, exec("def d = (short)0; Short b = d; b")); + assertEquals((short) 0, exec("def d = (byte)0; Short b = d; b")); + assertEquals((short) 0, exec("def d = (short)0; Short b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (char)0; Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (int)0; Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (long)0; Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (double)0; Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Short b = d;")); - assertEquals((short)0, exec("def d = Byte.valueOf(0); Short b = d; b")); - assertEquals((short)0, exec("def d = Short.valueOf(0); Short b = d; b")); + assertEquals((short) 0, exec("def d = Byte.valueOf(0); Short b = d; b")); + assertEquals((short) 0, exec("def d = Short.valueOf(0); Short b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Character.valueOf(0); Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Integer.valueOf(0); Short b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Long.valueOf(0); Short b = d;")); @@ -420,7 +420,7 @@ public class DefCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (byte)0; Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (short)0; Character b = d;")); - assertEquals((char)0, exec("def d = (char)0; Character b = d; b")); + assertEquals((char) 0, exec("def d = (char)0; Character b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (int)0; Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (long)0; Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; Character b = d;")); @@ -428,7 +428,7 @@ public class DefCastTests extends ScriptTestCase { expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Byte.valueOf(0); Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Short.valueOf(0); Character b = d;")); - assertEquals((char)0, exec("def d = Character.valueOf(0); Character b = d; b")); + assertEquals((char) 0, exec("def d = Character.valueOf(0); Character b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Integer.valueOf(0); Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Long.valueOf(0); Character b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Float.valueOf(0); Character b = d;")); @@ -460,19 +460,19 @@ public class DefCastTests extends ScriptTestCase { public void testdefToLongImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Long b = d;")); - assertEquals((long)0, exec("def d = (byte)0; Long b = d; b")); - assertEquals((long)0, exec("def d = (short)0; Long b = d; b")); - assertEquals((long)0, exec("def d = (char)0; Long b = d; b")); - assertEquals((long)0, exec("def d = 0; Long b = d; b")); - assertEquals((long)0, exec("def d = (long)0; Long b = d; b")); + assertEquals((long) 0, exec("def d = (byte)0; Long b = d; b")); + assertEquals((long) 0, exec("def d = (short)0; Long b = d; b")); + assertEquals((long) 0, exec("def d = (char)0; Long b = d; b")); + assertEquals((long) 0, exec("def d = 0; Long b = d; b")); + assertEquals((long) 0, exec("def d = (long)0; Long b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (float)0; Long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (double)0; Long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Long b = d;")); - assertEquals((long)0, exec("def d = Byte.valueOf(0); Long b = d; b")); - assertEquals((long)0, exec("def d = Short.valueOf(0); Long b = d; b")); - assertEquals((long)0, exec("def d = Character.valueOf(0); Long b = d; b")); - assertEquals((long)0, exec("def d = Integer.valueOf(0); Long b = d; b")); - assertEquals((long)0, exec("def d = Long.valueOf(0); Long b = d; b")); + assertEquals((long) 0, exec("def d = Byte.valueOf(0); Long b = d; b")); + assertEquals((long) 0, exec("def d = Short.valueOf(0); Long b = d; b")); + assertEquals((long) 0, exec("def d = Character.valueOf(0); Long b = d; b")); + assertEquals((long) 0, exec("def d = Integer.valueOf(0); Long b = d; b")); + assertEquals((long) 0, exec("def d = Long.valueOf(0); Long b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Float.valueOf(0); Long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Double.valueOf(0); Long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Long b = d;")); @@ -481,20 +481,20 @@ public class DefCastTests extends ScriptTestCase { public void testdefToFloatImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Float b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Float b = d;")); - assertEquals((float)0, exec("def d = (byte)0; Float b = d; b")); - assertEquals((float)0, exec("def d = (short)0; Float b = d; b")); - assertEquals((float)0, exec("def d = (char)0; Float b = d; b")); - assertEquals((float)0, exec("def d = 0; Float b = d; b")); - assertEquals((float)0, exec("def d = (long)0; Float b = d; b")); - assertEquals((float)0, exec("def d = (float)0; Float b = d; b")); + assertEquals((float) 0, exec("def d = (byte)0; Float b = d; b")); + assertEquals((float) 0, exec("def d = (short)0; Float b = d; b")); + assertEquals((float) 0, exec("def d = (char)0; Float b = d; b")); + assertEquals((float) 0, exec("def d = 0; Float b = d; b")); + assertEquals((float) 0, exec("def d = (long)0; Float b = d; b")); + assertEquals((float) 0, exec("def d = (float)0; Float b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = (double)0; Float b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Float b = d;")); - assertEquals((float)0, exec("def d = Byte.valueOf(0); Float b = d; b")); - assertEquals((float)0, exec("def d = Short.valueOf(0); Float b = d; b")); - assertEquals((float)0, exec("def d = Character.valueOf(0); Float b = d; b")); - assertEquals((float)0, exec("def d = Integer.valueOf(0); Float b = d; b")); - assertEquals((float)0, exec("def d = Long.valueOf(0); Float b = d; b")); - assertEquals((float)0, exec("def d = Float.valueOf(0); Float b = d; b")); + assertEquals((float) 0, exec("def d = Byte.valueOf(0); Float b = d; b")); + assertEquals((float) 0, exec("def d = Short.valueOf(0); Float b = d; b")); + assertEquals((float) 0, exec("def d = Character.valueOf(0); Float b = d; b")); + assertEquals((float) 0, exec("def d = Integer.valueOf(0); Float b = d; b")); + assertEquals((float) 0, exec("def d = Long.valueOf(0); Float b = d; b")); + assertEquals((float) 0, exec("def d = Float.valueOf(0); Float b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Double.valueOf(0); Float b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Float b = d;")); } @@ -502,21 +502,21 @@ public class DefCastTests extends ScriptTestCase { public void testdefToDoubleImplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Double b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Double b = d;")); - assertEquals((double)0, exec("def d = (byte)0; Double b = d; b")); - assertEquals((double)0, exec("def d = (short)0; Double b = d; b")); - assertEquals((double)0, exec("def d = (char)0; Double b = d; b")); - assertEquals((double)0, exec("def d = 0; Double b = d; b")); - assertEquals((double)0, exec("def d = (long)0; Double b = d; b")); - assertEquals((double)0, exec("def d = (float)0; Double b = d; b")); - assertEquals((double)0, exec("def d = (double)0; Double b = d; b")); + assertEquals((double) 0, exec("def d = (byte)0; Double b = d; b")); + assertEquals((double) 0, exec("def d = (short)0; Double b = d; b")); + assertEquals((double) 0, exec("def d = (char)0; Double b = d; b")); + assertEquals((double) 0, exec("def d = 0; Double b = d; b")); + assertEquals((double) 0, exec("def d = (long)0; Double b = d; b")); + assertEquals((double) 0, exec("def d = (float)0; Double b = d; b")); + assertEquals((double) 0, exec("def d = (double)0; Double b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Double b = d;")); - assertEquals((double)0, exec("def d = Byte.valueOf(0); Double b = d; b")); - assertEquals((double)0, exec("def d = Short.valueOf(0); Double b = d; b")); - assertEquals((double)0, exec("def d = Character.valueOf(0); Double b = d; b")); - assertEquals((double)0, exec("def d = Integer.valueOf(0); Double b = d; b")); - assertEquals((double)0, exec("def d = Long.valueOf(0); Double b = d; b")); - assertEquals((double)0, exec("def d = Float.valueOf(0); Double b = d; b")); - assertEquals((double)0, exec("def d = Double.valueOf(0); Double b = d; b")); + assertEquals((double) 0, exec("def d = Byte.valueOf(0); Double b = d; b")); + assertEquals((double) 0, exec("def d = Short.valueOf(0); Double b = d; b")); + assertEquals((double) 0, exec("def d = Character.valueOf(0); Double b = d; b")); + assertEquals((double) 0, exec("def d = Integer.valueOf(0); Double b = d; b")); + assertEquals((double) 0, exec("def d = Long.valueOf(0); Double b = d; b")); + assertEquals((double) 0, exec("def d = Float.valueOf(0); Double b = d; b")); + assertEquals((double) 0, exec("def d = Double.valueOf(0); Double b = d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Double b = d;")); } @@ -544,42 +544,42 @@ public class DefCastTests extends ScriptTestCase { public void testdefToByteExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Byte b = (Byte)d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Byte b = (Byte)d;")); - assertEquals((byte)0, exec("def d = (byte)0; Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = (short)0; Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = (char)0; Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = 0; Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = (long)0; Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = (float)0; Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = (double)0; Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = (byte)0; Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = (short)0; Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = (char)0; Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = 0; Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = (long)0; Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = (float)0; Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = (double)0; Byte b = (Byte)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Byte b = d;")); - assertEquals((byte)0, exec("def d = Byte.valueOf(0); Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = Short.valueOf(0); Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = Character.valueOf(0); Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = Integer.valueOf(0); Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = Long.valueOf(0); Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = Float.valueOf(0); Byte b = (Byte)d; b")); - assertEquals((byte)0, exec("def d = Double.valueOf(0); Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = Byte.valueOf(0); Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = Short.valueOf(0); Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = Character.valueOf(0); Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = Integer.valueOf(0); Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = Long.valueOf(0); Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = Float.valueOf(0); Byte b = (Byte)d; b")); + assertEquals((byte) 0, exec("def d = Double.valueOf(0); Byte b = (Byte)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Byte b = (Byte)d;")); } public void testdefToShortExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Short b = (Short)d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Short b = (Short)d;")); - assertEquals((short)0, exec("def d = (byte)0; Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = (short)0; Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = (char)0; Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = 0; Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = (long)0; Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = (float)0; Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = (double)0; Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = (byte)0; Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = (short)0; Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = (char)0; Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = 0; Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = (long)0; Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = (float)0; Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = (double)0; Short b = (Short)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Short b = d;")); - assertEquals((short)0, exec("def d = Byte.valueOf(0); Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = Short.valueOf(0); Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = Character.valueOf(0); Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = Integer.valueOf(0); Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = Long.valueOf(0); Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = Float.valueOf(0); Short b = (Short)d; b")); - assertEquals((short)0, exec("def d = Double.valueOf(0); Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = Byte.valueOf(0); Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = Short.valueOf(0); Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = Character.valueOf(0); Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = Integer.valueOf(0); Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = Long.valueOf(0); Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = Float.valueOf(0); Short b = (Short)d; b")); + assertEquals((short) 0, exec("def d = Double.valueOf(0); Short b = (Short)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Short b = (Short)d;")); } @@ -587,21 +587,21 @@ public class DefCastTests extends ScriptTestCase { assertEquals('s', exec("def d = 's'; Character b = (Character)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Character b = (Character)d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Character b = (Character)d;")); - assertEquals((char)0, exec("def d = (byte)0; Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = (short)0; Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = (char)0; Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = 0; Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = (long)0; Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = (float)0; Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = (double)0; Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = (byte)0; Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = (short)0; Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = (char)0; Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = 0; Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = (long)0; Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = (float)0; Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = (double)0; Character b = (Character)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Character b = d;")); - assertEquals((char)0, exec("def d = Byte.valueOf(0); Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = Short.valueOf(0); Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = Character.valueOf(0); Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = Integer.valueOf(0); Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = Long.valueOf(0); Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = Float.valueOf(0); Character b = (Character)d; b")); - assertEquals((char)0, exec("def d = Double.valueOf(0); Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = Byte.valueOf(0); Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = Short.valueOf(0); Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = Character.valueOf(0); Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = Integer.valueOf(0); Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = Long.valueOf(0); Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = Float.valueOf(0); Character b = (Character)d; b")); + assertEquals((char) 0, exec("def d = Double.valueOf(0); Character b = (Character)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Character b = (Character)d;")); } @@ -629,63 +629,63 @@ public class DefCastTests extends ScriptTestCase { public void testdefToLongExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Long b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Long b = (Long)d;")); - assertEquals((long)0, exec("def d = (byte)0; Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = (short)0; Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = (char)0; Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = 0; Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = (long)0; Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = (float)0; Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = (double)0; Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = (byte)0; Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = (short)0; Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = (char)0; Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = 0; Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = (long)0; Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = (float)0; Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = (double)0; Long b = (Long)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Long b = d;")); - assertEquals((long)0, exec("def d = Byte.valueOf(0); Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = Short.valueOf(0); Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = Character.valueOf(0); Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = Integer.valueOf(0); Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = Long.valueOf(0); Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = Float.valueOf(0); Long b = (Long)d; b")); - assertEquals((long)0, exec("def d = Double.valueOf(0); Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = Byte.valueOf(0); Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = Short.valueOf(0); Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = Character.valueOf(0); Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = Integer.valueOf(0); Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = Long.valueOf(0); Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = Float.valueOf(0); Long b = (Long)d; b")); + assertEquals((long) 0, exec("def d = Double.valueOf(0); Long b = (Long)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Long b = (Long)d;")); } public void testdefToFloatExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Float b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Float b = (Float)d;")); - assertEquals((float)0, exec("def d = (byte)0; Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = (short)0; Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = (char)0; Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = 0; Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = (long)0; Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = (float)0; Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = (double)0; Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = (byte)0; Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = (short)0; Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = (char)0; Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = 0; Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = (long)0; Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = (float)0; Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = (double)0; Float b = (Float)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Float b = d;")); - assertEquals((float)0, exec("def d = Byte.valueOf(0); Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = Short.valueOf(0); Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = Character.valueOf(0); Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = Integer.valueOf(0); Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = Long.valueOf(0); Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = Float.valueOf(0); Float b = (Float)d; b")); - assertEquals((float)0, exec("def d = Double.valueOf(0); Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = Byte.valueOf(0); Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = Short.valueOf(0); Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = Character.valueOf(0); Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = Integer.valueOf(0); Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = Long.valueOf(0); Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = Float.valueOf(0); Float b = (Float)d; b")); + assertEquals((float) 0, exec("def d = Double.valueOf(0); Float b = (Float)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Float b = (Float)d;")); } public void testdefToDoubleExplicit() { expectScriptThrows(ClassCastException.class, () -> exec("def d = 'string'; Double b = d;")); expectScriptThrows(ClassCastException.class, () -> exec("def d = true; Double b = (Double)d;")); - assertEquals((double)0, exec("def d = (byte)0; Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = (short)0; Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = (char)0; Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = 0; Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = (long)0; Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = (float)0; Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = (double)0; Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = (byte)0; Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = (short)0; Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = (char)0; Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = 0; Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = (long)0; Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = (float)0; Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = (double)0; Double b = (Double)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = Boolean.valueOf(true); Double b = d;")); - assertEquals((double)0, exec("def d = Byte.valueOf(0); Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = Short.valueOf(0); Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = Character.valueOf(0); Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = Integer.valueOf(0); Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = Long.valueOf(0); Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = Float.valueOf(0); Double b = (Double)d; b")); - assertEquals((double)0, exec("def d = Double.valueOf(0); Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = Byte.valueOf(0); Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = Short.valueOf(0); Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = Character.valueOf(0); Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = Integer.valueOf(0); Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = Long.valueOf(0); Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = Float.valueOf(0); Double b = (Double)d; b")); + assertEquals((double) 0, exec("def d = Double.valueOf(0); Double b = (Double)d; b")); expectScriptThrows(ClassCastException.class, () -> exec("def d = new ArrayList(); Double b = (Double)d;")); } @@ -698,28 +698,31 @@ public class DefCastTests extends ScriptTestCase { } public void testConstFoldingDefCast() { - assertFalse((boolean)exec("def chr = 10; return (chr == (char)'x');")); - assertFalse((boolean)exec("def chr = 10; return (chr >= (char)'x');")); - assertTrue((boolean)exec("def chr = (char)10; return (chr <= (char)'x');")); - assertTrue((boolean)exec("def chr = 10; return (chr < (char)'x');")); - assertFalse((boolean)exec("def chr = (char)10; return (chr > (char)'x');")); - assertFalse((boolean)exec("def chr = 10L; return (chr > (char)'x');")); - assertFalse((boolean)exec("def chr = 10F; return (chr > (char)'x');")); - assertFalse((boolean)exec("def chr = 10D; return (chr > (char)'x');")); - assertFalse((boolean)exec("def chr = (char)10L; return (chr > (byte)10);")); - assertFalse((boolean)exec("def chr = (char)10L; return (chr > (double)(byte)(char)10);")); + assertFalse((boolean) exec("def chr = 10; return (chr == (char)'x');")); + assertFalse((boolean) exec("def chr = 10; return (chr >= (char)'x');")); + assertTrue((boolean) exec("def chr = (char)10; return (chr <= (char)'x');")); + assertTrue((boolean) exec("def chr = 10; return (chr < (char)'x');")); + assertFalse((boolean) exec("def chr = (char)10; return (chr > (char)'x');")); + assertFalse((boolean) exec("def chr = 10L; return (chr > (char)'x');")); + assertFalse((boolean) exec("def chr = 10F; return (chr > (char)'x');")); + assertFalse((boolean) exec("def chr = 10D; return (chr > (char)'x');")); + assertFalse((boolean) exec("def chr = (char)10L; return (chr > (byte)10);")); + assertFalse((boolean) exec("def chr = (char)10L; return (chr > (double)(byte)(char)10);")); } // TODO: remove this when the transition from Joda to Java datetimes is completed public void testdefToZonedDateTime() { - assertEquals(0L, exec( - "Instant instant = Instant.ofEpochMilli(434931330000L);" + - "def d = new JodaCompatibleZonedDateTime(instant, ZoneId.of('Z'));" + - "def x = new HashMap(); x.put('dt', d);" + - "ZonedDateTime t = x['dt'];" + - "def y = t;" + - "t = y;" + - "return ChronoUnit.MILLIS.between(d, t);" - )); + assertEquals( + 0L, + exec( + "Instant instant = Instant.ofEpochMilli(434931330000L);" + + "def d = new JodaCompatibleZonedDateTime(instant, ZoneId.of('Z'));" + + "def x = new HashMap(); x.put('dt', d);" + + "ZonedDateTime t = x['dt'];" + + "def y = t;" + + "t = y;" + + "return ChronoUnit.MILLIS.between(d, t);" + ) + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DefOptimizationTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DefOptimizationTests.java index a1ca3d0a488..7ffaa5c6ef5 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DefOptimizationTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DefOptimizationTests.java @@ -187,297 +187,257 @@ public class DefOptimizationTests extends ScriptTestCase { final String script = "int x;\ndef y = new HashMap();\ny['double'] = 1.0;\nx = y.get('double');\n"; assertBytecodeExists(script, "INVOKEDYNAMIC get(Ljava/lang/Object;Ljava/lang/String;)I"); - final Exception exception = expectScriptThrows(ClassCastException.class, () -> { - exec(script); - }); + final Exception exception = expectScriptThrows(ClassCastException.class, () -> { exec(script); }); assertTrue(exception.getMessage().contains("Cannot cast java.lang.Double to java.lang.Integer")); } public void testMulOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x * y", - "INVOKEDYNAMIC mul(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x * y", "INVOKEDYNAMIC mul(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testMulOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x * y", - "INVOKEDYNAMIC mul(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x * y", "INVOKEDYNAMIC mul(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testMulOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x * y", - "INVOKEDYNAMIC mul(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x * y", "INVOKEDYNAMIC mul(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testDivOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x / y", - "INVOKEDYNAMIC div(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x / y", "INVOKEDYNAMIC div(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testDivOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x / y", - "INVOKEDYNAMIC div(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x / y", "INVOKEDYNAMIC div(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testDivOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x / y", - "INVOKEDYNAMIC div(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x / y", "INVOKEDYNAMIC div(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testRemOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x % y", - "INVOKEDYNAMIC rem(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x % y", "INVOKEDYNAMIC rem(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testRemOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x % y", - "INVOKEDYNAMIC rem(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x % y", "INVOKEDYNAMIC rem(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testRemOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x % y", - "INVOKEDYNAMIC rem(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x % y", "INVOKEDYNAMIC rem(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testAddOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x + y", - "INVOKEDYNAMIC add(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x + y", "INVOKEDYNAMIC add(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testAddOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x + y", - "INVOKEDYNAMIC add(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x + y", "INVOKEDYNAMIC add(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testAddOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x + y", - "INVOKEDYNAMIC add(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x + y", "INVOKEDYNAMIC add(Ljava/lang/Object;Ljava/lang/Object;)D"); } // horrible, sorry public void testAddOptNullGuards() { // needs null guard - assertBytecodeHasPattern("def x = 1; def y = 2; return x + y", - "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + "\\d+" - + ",\\s+" + DefBootstrap.BINARY_OPERATOR - + ",\\s+" + DefBootstrap.OPERATOR_ALLOWS_NULL + ".*"); + assertBytecodeHasPattern( + "def x = 1; def y = 2; return x + y", + "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + + "\\d+" + + ",\\s+" + + DefBootstrap.BINARY_OPERATOR + + ",\\s+" + + DefBootstrap.OPERATOR_ALLOWS_NULL + + ".*" + ); // still needs null guard, NPE is the wrong thing! - assertBytecodeHasPattern("def x = 1; def y = 2; double z = x + y", - "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + "\\d+" - + ",\\s+" + DefBootstrap.BINARY_OPERATOR - + ",\\s+" + DefBootstrap.OPERATOR_ALLOWS_NULL + ".*"); + assertBytecodeHasPattern( + "def x = 1; def y = 2; double z = x + y", + "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + + "\\d+" + + ",\\s+" + + DefBootstrap.BINARY_OPERATOR + + ",\\s+" + + DefBootstrap.OPERATOR_ALLOWS_NULL + + ".*" + ); // a primitive argument is present: no null guard needed - assertBytecodeHasPattern("def x = 1; int y = 2; return x + y", - "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + "\\d+" - + ",\\s+" + DefBootstrap.BINARY_OPERATOR - + ",\\s+" + 0 + ".*"); - assertBytecodeHasPattern("int x = 1; def y = 2; return x + y", - "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + "\\d+" - + ",\\s+" + DefBootstrap.BINARY_OPERATOR - + ",\\s+" + 0 + ".*"); + assertBytecodeHasPattern( + "def x = 1; int y = 2; return x + y", + "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + "\\d+" + ",\\s+" + DefBootstrap.BINARY_OPERATOR + ",\\s+" + 0 + ".*" + ); + assertBytecodeHasPattern( + "int x = 1; def y = 2; return x + y", + "(?s).*INVOKEDYNAMIC add.*arguments:\\s+" + "\\d+" + ",\\s+" + DefBootstrap.BINARY_OPERATOR + ",\\s+" + 0 + ".*" + ); } public void testSubOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x - y", - "INVOKEDYNAMIC sub(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x - y", "INVOKEDYNAMIC sub(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testSubOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x - y", - "INVOKEDYNAMIC sub(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x - y", "INVOKEDYNAMIC sub(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testSubOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x - y", - "INVOKEDYNAMIC sub(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x - y", "INVOKEDYNAMIC sub(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testLshOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x << y", - "INVOKEDYNAMIC lsh(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x << y", "INVOKEDYNAMIC lsh(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testLshOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x << y", - "INVOKEDYNAMIC lsh(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x << y", "INVOKEDYNAMIC lsh(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testLshOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x << y", - "INVOKEDYNAMIC lsh(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x << y", "INVOKEDYNAMIC lsh(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testRshOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x >> y", - "INVOKEDYNAMIC rsh(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x >> y", "INVOKEDYNAMIC rsh(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testRshOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x >> y", - "INVOKEDYNAMIC rsh(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x >> y", "INVOKEDYNAMIC rsh(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testRshOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x >> y", - "INVOKEDYNAMIC rsh(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x >> y", "INVOKEDYNAMIC rsh(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testUshOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x >>> y", - "INVOKEDYNAMIC ush(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x >>> y", "INVOKEDYNAMIC ush(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testUshOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x >>> y", - "INVOKEDYNAMIC ush(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x >>> y", "INVOKEDYNAMIC ush(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testUshOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x >>> y", - "INVOKEDYNAMIC ush(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x >>> y", "INVOKEDYNAMIC ush(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testAndOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x & y", - "INVOKEDYNAMIC and(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x & y", "INVOKEDYNAMIC and(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testAndOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x & y", - "INVOKEDYNAMIC and(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x & y", "INVOKEDYNAMIC and(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testAndOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x & y", - "INVOKEDYNAMIC and(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x & y", "INVOKEDYNAMIC and(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testOrOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x | y", - "INVOKEDYNAMIC or(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x | y", "INVOKEDYNAMIC or(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testOrOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x | y", - "INVOKEDYNAMIC or(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x | y", "INVOKEDYNAMIC or(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testOrOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x | y", - "INVOKEDYNAMIC or(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x | y", "INVOKEDYNAMIC or(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testXorOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x ^ y", - "INVOKEDYNAMIC xor(ILjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("int x = 1; def y = 2; return x ^ y", "INVOKEDYNAMIC xor(ILjava/lang/Object;)Ljava/lang/Object;"); } public void testXorOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x ^ y", - "INVOKEDYNAMIC xor(Ljava/lang/Object;I)Ljava/lang/Object;"); + assertBytecodeExists("def x = 1; int y = 2; return x ^ y", "INVOKEDYNAMIC xor(Ljava/lang/Object;I)Ljava/lang/Object;"); } public void testXorOptRet() { - assertBytecodeExists("def x = 1; def y = 2; double d = x ^ y", - "INVOKEDYNAMIC xor(Ljava/lang/Object;Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; def y = 2; double d = x ^ y", "INVOKEDYNAMIC xor(Ljava/lang/Object;Ljava/lang/Object;)D"); } public void testBooleanXorOptLHS() { - assertBytecodeExists("boolean x = true; def y = true; return x ^ y", - "INVOKEDYNAMIC xor(ZLjava/lang/Object;)Ljava/lang/Object;"); + assertBytecodeExists("boolean x = true; def y = true; return x ^ y", "INVOKEDYNAMIC xor(ZLjava/lang/Object;)Ljava/lang/Object;"); } public void testBooleanXorOptRHS() { - assertBytecodeExists("def x = true; boolean y = true; return x ^ y", - "INVOKEDYNAMIC xor(Ljava/lang/Object;Z)Ljava/lang/Object;"); + assertBytecodeExists("def x = true; boolean y = true; return x ^ y", "INVOKEDYNAMIC xor(Ljava/lang/Object;Z)Ljava/lang/Object;"); } public void testBooleanXorOptRet() { - assertBytecodeExists("def x = true; def y = true; boolean v = x ^ y", - "INVOKEDYNAMIC xor(Ljava/lang/Object;Ljava/lang/Object;)Z"); + assertBytecodeExists("def x = true; def y = true; boolean v = x ^ y", "INVOKEDYNAMIC xor(Ljava/lang/Object;Ljava/lang/Object;)Z"); } public void testLtOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x < y", - "INVOKEDYNAMIC lt(ILjava/lang/Object;)Z"); + assertBytecodeExists("int x = 1; def y = 2; return x < y", "INVOKEDYNAMIC lt(ILjava/lang/Object;)Z"); } public void testLtOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x < y", - "INVOKEDYNAMIC lt(Ljava/lang/Object;I)Z"); + assertBytecodeExists("def x = 1; int y = 2; return x < y", "INVOKEDYNAMIC lt(Ljava/lang/Object;I)Z"); } public void testLteOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x <= y", - "INVOKEDYNAMIC lte(ILjava/lang/Object;)Z"); + assertBytecodeExists("int x = 1; def y = 2; return x <= y", "INVOKEDYNAMIC lte(ILjava/lang/Object;)Z"); } public void testLteOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x <= y", - "INVOKEDYNAMIC lte(Ljava/lang/Object;I)Z"); + assertBytecodeExists("def x = 1; int y = 2; return x <= y", "INVOKEDYNAMIC lte(Ljava/lang/Object;I)Z"); } public void testEqOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x == y", - "INVOKEDYNAMIC eq(ILjava/lang/Object;)Z"); + assertBytecodeExists("int x = 1; def y = 2; return x == y", "INVOKEDYNAMIC eq(ILjava/lang/Object;)Z"); } public void testEqOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x == y", - "INVOKEDYNAMIC eq(Ljava/lang/Object;I)Z"); + assertBytecodeExists("def x = 1; int y = 2; return x == y", "INVOKEDYNAMIC eq(Ljava/lang/Object;I)Z"); } public void testNeqOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x != y", - "INVOKEDYNAMIC eq(ILjava/lang/Object;)Z"); + assertBytecodeExists("int x = 1; def y = 2; return x != y", "INVOKEDYNAMIC eq(ILjava/lang/Object;)Z"); } public void testNeqOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x != y", - "INVOKEDYNAMIC eq(Ljava/lang/Object;I)Z"); + assertBytecodeExists("def x = 1; int y = 2; return x != y", "INVOKEDYNAMIC eq(Ljava/lang/Object;I)Z"); } public void testGteOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x >= y", - "INVOKEDYNAMIC gte(ILjava/lang/Object;)Z"); + assertBytecodeExists("int x = 1; def y = 2; return x >= y", "INVOKEDYNAMIC gte(ILjava/lang/Object;)Z"); } public void testGteOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x >= y", - "INVOKEDYNAMIC gte(Ljava/lang/Object;I)Z"); + assertBytecodeExists("def x = 1; int y = 2; return x >= y", "INVOKEDYNAMIC gte(Ljava/lang/Object;I)Z"); } public void testGtOptLHS() { - assertBytecodeExists("int x = 1; def y = 2; return x > y", - "INVOKEDYNAMIC gt(ILjava/lang/Object;)Z"); + assertBytecodeExists("int x = 1; def y = 2; return x > y", "INVOKEDYNAMIC gt(ILjava/lang/Object;)Z"); } public void testGtOptRHS() { - assertBytecodeExists("def x = 1; int y = 2; return x > y", - "INVOKEDYNAMIC gt(Ljava/lang/Object;I)Z"); + assertBytecodeExists("def x = 1; int y = 2; return x > y", "INVOKEDYNAMIC gt(Ljava/lang/Object;I)Z"); } public void testUnaryMinusOptRet() { - assertBytecodeExists("def x = 1; double y = -x; return y", - "INVOKEDYNAMIC neg(Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; double y = -x; return y", "INVOKEDYNAMIC neg(Ljava/lang/Object;)D"); } public void testUnaryNotOptRet() { - assertBytecodeExists("def x = 1; double y = ~x; return y", - "INVOKEDYNAMIC not(Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; double y = ~x; return y", "INVOKEDYNAMIC not(Ljava/lang/Object;)D"); } public void testUnaryPlusOptRet() { - assertBytecodeExists("def x = 1; double y = +x; return y", - "INVOKEDYNAMIC plus(Ljava/lang/Object;)D"); + assertBytecodeExists("def x = 1; double y = +x; return y", "INVOKEDYNAMIC plus(Ljava/lang/Object;)D"); } public void testLambdaReturnType() { - assertBytecodeExists("List l = new ArrayList(); l.removeIf(x -> x < 10)", - "synthetic lambda$synthetic$0(Ljava/lang/Object;)Z"); + assertBytecodeExists("List l = new ArrayList(); l.removeIf(x -> x < 10)", "synthetic lambda$synthetic$0(Ljava/lang/Object;)Z"); } public void testLambdaArguments() { - assertBytecodeExists("List l = new ArrayList(); l.stream().mapToDouble(Double::valueOf).map(x -> x + 1)", - "synthetic lambda$synthetic$0(D)D"); + assertBytecodeExists( + "List l = new ArrayList(); l.stream().mapToDouble(Double::valueOf).map(x -> x + 1)", + "synthetic lambda$synthetic$0(D)D" + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DivisionTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DivisionTests.java index 9352c921d82..4d2db1c9a88 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DivisionTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DivisionTests.java @@ -33,7 +33,7 @@ package org.opensearch.painless; /** Tests for division operator across all types */ -//TODO: NaN/Inf/overflow/... +// TODO: NaN/Inf/overflow/... public class DivisionTests extends ScriptTestCase { // TODO: byte,short,char @@ -44,111 +44,103 @@ public class DivisionTests extends ScriptTestCase { } public void testInt() throws Exception { - assertEquals(1/1, exec("int x = 1; int y = 1; return x/y;")); - assertEquals(2/3, exec("int x = 2; int y = 3; return x/y;")); - assertEquals(5/10, exec("int x = 5; int y = 10; return x/y;")); - assertEquals(10/1/2, exec("int x = 10; int y = 1; int z = 2; return x/y/z;")); - assertEquals((10/1)/2, exec("int x = 10; int y = 1; int z = 2; return (x/y)/z;")); - assertEquals(10/(4/2), exec("int x = 10; int y = 4; int z = 2; return x/(y/z);")); - assertEquals(10/1, exec("int x = 10; int y = 1; return x/y;")); - assertEquals(0/1, exec("int x = 0; int y = 1; return x/y;")); + assertEquals(1 / 1, exec("int x = 1; int y = 1; return x/y;")); + assertEquals(2 / 3, exec("int x = 2; int y = 3; return x/y;")); + assertEquals(5 / 10, exec("int x = 5; int y = 10; return x/y;")); + assertEquals(10 / 1 / 2, exec("int x = 10; int y = 1; int z = 2; return x/y/z;")); + assertEquals((10 / 1) / 2, exec("int x = 10; int y = 1; int z = 2; return (x/y)/z;")); + assertEquals(10 / (4 / 2), exec("int x = 10; int y = 4; int z = 2; return x/(y/z);")); + assertEquals(10 / 1, exec("int x = 10; int y = 1; return x/y;")); + assertEquals(0 / 1, exec("int x = 0; int y = 1; return x/y;")); } public void testIntConst() throws Exception { - assertEquals(1/1, exec("return 1/1;")); - assertEquals(2/3, exec("return 2/3;")); - assertEquals(5/10, exec("return 5/10;")); - assertEquals(10/1/2, exec("return 10/1/2;")); - assertEquals((10/1)/2, exec("return (10/1)/2;")); - assertEquals(10/(4/2), exec("return 10/(4/2);")); - assertEquals(10/1, exec("return 10/1;")); - assertEquals(0/1, exec("return 0/1;")); + assertEquals(1 / 1, exec("return 1/1;")); + assertEquals(2 / 3, exec("return 2/3;")); + assertEquals(5 / 10, exec("return 5/10;")); + assertEquals(10 / 1 / 2, exec("return 10/1/2;")); + assertEquals((10 / 1) / 2, exec("return (10/1)/2;")); + assertEquals(10 / (4 / 2), exec("return 10/(4/2);")); + assertEquals(10 / 1, exec("return 10/1;")); + assertEquals(0 / 1, exec("return 0/1;")); } public void testLong() throws Exception { - assertEquals(1L/1L, exec("long x = 1; long y = 1; return x/y;")); - assertEquals(2L/3L, exec("long x = 2; long y = 3; return x/y;")); - assertEquals(5L/10L, exec("long x = 5; long y = 10; return x/y;")); - assertEquals(10L/1L/2L, exec("long x = 10; long y = 1; long z = 2; return x/y/z;")); - assertEquals((10L/1L)/2L, exec("long x = 10; long y = 1; long z = 2; return (x/y)/z;")); - assertEquals(10L/(4L/2L), exec("long x = 10; long y = 4; long z = 2; return x/(y/z);")); - assertEquals(10L/1L, exec("long x = 10; long y = 1; return x/y;")); - assertEquals(0L/1L, exec("long x = 0; long y = 1; return x/y;")); + assertEquals(1L / 1L, exec("long x = 1; long y = 1; return x/y;")); + assertEquals(2L / 3L, exec("long x = 2; long y = 3; return x/y;")); + assertEquals(5L / 10L, exec("long x = 5; long y = 10; return x/y;")); + assertEquals(10L / 1L / 2L, exec("long x = 10; long y = 1; long z = 2; return x/y/z;")); + assertEquals((10L / 1L) / 2L, exec("long x = 10; long y = 1; long z = 2; return (x/y)/z;")); + assertEquals(10L / (4L / 2L), exec("long x = 10; long y = 4; long z = 2; return x/(y/z);")); + assertEquals(10L / 1L, exec("long x = 10; long y = 1; return x/y;")); + assertEquals(0L / 1L, exec("long x = 0; long y = 1; return x/y;")); } public void testLongConst() throws Exception { - assertEquals(1L/1L, exec("return 1L/1L;")); - assertEquals(2L/3L, exec("return 2L/3L;")); - assertEquals(5L/10L, exec("return 5L/10L;")); - assertEquals(10L/1L/2L, exec("return 10L/1L/2L;")); - assertEquals((10L/1L)/2L, exec("return (10L/1L)/2L;")); - assertEquals(10L/(4L/2L), exec("return 10L/(4L/2L);")); - assertEquals(10L/1L, exec("return 10L/1L;")); - assertEquals(0L/1L, exec("return 0L/1L;")); + assertEquals(1L / 1L, exec("return 1L/1L;")); + assertEquals(2L / 3L, exec("return 2L/3L;")); + assertEquals(5L / 10L, exec("return 5L/10L;")); + assertEquals(10L / 1L / 2L, exec("return 10L/1L/2L;")); + assertEquals((10L / 1L) / 2L, exec("return (10L/1L)/2L;")); + assertEquals(10L / (4L / 2L), exec("return 10L/(4L/2L);")); + assertEquals(10L / 1L, exec("return 10L/1L;")); + assertEquals(0L / 1L, exec("return 0L/1L;")); } public void testFloat() throws Exception { - assertEquals(1F/1F, exec("float x = 1; float y = 1; return x/y;")); - assertEquals(2F/3F, exec("float x = 2; float y = 3; return x/y;")); - assertEquals(5F/10F, exec("float x = 5; float y = 10; return x/y;")); - assertEquals(10F/1F/2F, exec("float x = 10; float y = 1; float z = 2; return x/y/z;")); - assertEquals((10F/1F)/2F, exec("float x = 10; float y = 1; float z = 2; return (x/y)/z;")); - assertEquals(10F/(4F/2F), exec("float x = 10; float y = 4; float z = 2; return x/(y/z);")); - assertEquals(10F/1F, exec("float x = 10; float y = 1; return x/y;")); - assertEquals(0F/1F, exec("float x = 0; float y = 1; return x/y;")); + assertEquals(1F / 1F, exec("float x = 1; float y = 1; return x/y;")); + assertEquals(2F / 3F, exec("float x = 2; float y = 3; return x/y;")); + assertEquals(5F / 10F, exec("float x = 5; float y = 10; return x/y;")); + assertEquals(10F / 1F / 2F, exec("float x = 10; float y = 1; float z = 2; return x/y/z;")); + assertEquals((10F / 1F) / 2F, exec("float x = 10; float y = 1; float z = 2; return (x/y)/z;")); + assertEquals(10F / (4F / 2F), exec("float x = 10; float y = 4; float z = 2; return x/(y/z);")); + assertEquals(10F / 1F, exec("float x = 10; float y = 1; return x/y;")); + assertEquals(0F / 1F, exec("float x = 0; float y = 1; return x/y;")); } public void testFloatConst() throws Exception { - assertEquals(1F/1F, exec("return 1F/1F;")); - assertEquals(2F/3F, exec("return 2F/3F;")); - assertEquals(5F/10F, exec("return 5F/10F;")); - assertEquals(10F/1F/2F, exec("return 10F/1F/2F;")); - assertEquals((10F/1F)/2F, exec("return (10F/1F)/2F;")); - assertEquals(10F/(4F/2F), exec("return 10F/(4F/2F);")); - assertEquals(10F/1F, exec("return 10F/1F;")); - assertEquals(0F/1F, exec("return 0F/1F;")); + assertEquals(1F / 1F, exec("return 1F/1F;")); + assertEquals(2F / 3F, exec("return 2F/3F;")); + assertEquals(5F / 10F, exec("return 5F/10F;")); + assertEquals(10F / 1F / 2F, exec("return 10F/1F/2F;")); + assertEquals((10F / 1F) / 2F, exec("return (10F/1F)/2F;")); + assertEquals(10F / (4F / 2F), exec("return 10F/(4F/2F);")); + assertEquals(10F / 1F, exec("return 10F/1F;")); + assertEquals(0F / 1F, exec("return 0F/1F;")); } public void testDouble() throws Exception { - assertEquals(1.0/1.0, exec("double x = 1; double y = 1; return x/y;")); - assertEquals(2.0/3.0, exec("double x = 2; double y = 3; return x/y;")); - assertEquals(5.0/10.0, exec("double x = 5; double y = 10; return x/y;")); - assertEquals(10.0/1.0/2.0, exec("double x = 10; double y = 1; double z = 2; return x/y/z;")); - assertEquals((10.0/1.0)/2.0, exec("double x = 10; double y = 1; double z = 2; return (x/y)/z;")); - assertEquals(10.0/(4.0/2.0), exec("double x = 10; double y = 4; double z = 2; return x/(y/z);")); - assertEquals(10.0/1.0, exec("double x = 10; double y = 1; return x/y;")); - assertEquals(0.0/1.0, exec("double x = 0; double y = 1; return x/y;")); + assertEquals(1.0 / 1.0, exec("double x = 1; double y = 1; return x/y;")); + assertEquals(2.0 / 3.0, exec("double x = 2; double y = 3; return x/y;")); + assertEquals(5.0 / 10.0, exec("double x = 5; double y = 10; return x/y;")); + assertEquals(10.0 / 1.0 / 2.0, exec("double x = 10; double y = 1; double z = 2; return x/y/z;")); + assertEquals((10.0 / 1.0) / 2.0, exec("double x = 10; double y = 1; double z = 2; return (x/y)/z;")); + assertEquals(10.0 / (4.0 / 2.0), exec("double x = 10; double y = 4; double z = 2; return x/(y/z);")); + assertEquals(10.0 / 1.0, exec("double x = 10; double y = 1; return x/y;")); + assertEquals(0.0 / 1.0, exec("double x = 0; double y = 1; return x/y;")); } public void testDoubleConst() throws Exception { - assertEquals(1.0/1.0, exec("return 1.0/1.0;")); - assertEquals(2.0/3.0, exec("return 2.0/3.0;")); - assertEquals(5.0/10.0, exec("return 5.0/10.0;")); - assertEquals(10.0/1.0/2.0, exec("return 10.0/1.0/2.0;")); - assertEquals((10.0/1.0)/2.0, exec("return (10.0/1.0)/2.0;")); - assertEquals(10.0/(4.0/2.0), exec("return 10.0/(4.0/2.0);")); - assertEquals(10.0/1.0, exec("return 10.0/1.0;")); - assertEquals(0.0/1.0, exec("return 0.0/1.0;")); + assertEquals(1.0 / 1.0, exec("return 1.0/1.0;")); + assertEquals(2.0 / 3.0, exec("return 2.0/3.0;")); + assertEquals(5.0 / 10.0, exec("return 5.0/10.0;")); + assertEquals(10.0 / 1.0 / 2.0, exec("return 10.0/1.0/2.0;")); + assertEquals((10.0 / 1.0) / 2.0, exec("return (10.0/1.0)/2.0;")); + assertEquals(10.0 / (4.0 / 2.0), exec("return 10.0/(4.0/2.0);")); + assertEquals(10.0 / 1.0, exec("return 10.0/1.0;")); + assertEquals(0.0 / 1.0, exec("return 0.0/1.0;")); } public void testDivideByZero() throws Exception { - expectScriptThrows(ArithmeticException.class, () -> { - exec("int x = 1; int y = 0; return x / y;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("int x = 1; int y = 0; return x / y;"); }); - expectScriptThrows(ArithmeticException.class, () -> { - exec("long x = 1L; long y = 0L; return x / y;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("long x = 1L; long y = 0L; return x / y;"); }); } public void testDivideByZeroConst() throws Exception { - expectScriptThrows(ArithmeticException.class, () -> { - exec("return 1/0;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("return 1/0;"); }); - expectScriptThrows(ArithmeticException.class, () -> { - exec("return 1L/0L;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("return 1L/0L;"); }); } public void testDef() { @@ -397,33 +389,21 @@ public class DivisionTests extends ScriptTestCase { public void testCompoundAssignmentByZero() { // byte - expectScriptThrows(ArithmeticException.class, () -> { - exec("byte x = 1; x /= 0; return x;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("byte x = 1; x /= 0; return x;"); }); // short - expectScriptThrows(ArithmeticException.class, () -> { - exec("short x = 1; x /= 0; return x;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("short x = 1; x /= 0; return x;"); }); // char - expectScriptThrows(ArithmeticException.class, () -> { - exec("char x = 1; x /= 0; return x;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("char x = 1; x /= 0; return x;"); }); // int - expectScriptThrows(ArithmeticException.class, () -> { - exec("int x = 1; x /= 0; return x;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("int x = 1; x /= 0; return x;"); }); // long - expectScriptThrows(ArithmeticException.class, () -> { - exec("long x = 1; x /= 0; return x;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("long x = 1; x /= 0; return x;"); }); // def - expectScriptThrows(ArithmeticException.class, () -> { - exec("def x = 1; x /= 0; return x;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("def x = 1; x /= 0; return x;"); }); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java index e2a6e830d39..6a6634a823e 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java @@ -52,7 +52,8 @@ public class DocFieldsPhaseTests extends ScriptTestCase { Compiler compiler = new Compiler( MockDocTestScript.CONTEXT.instanceClazz, MockDocTestScript.CONTEXT.factoryClazz, - MockDocTestScript.CONTEXT.statefulFactoryClazz, lookup + MockDocTestScript.CONTEXT.statefulFactoryClazz, + lookup ); // Create our loader (which loads compiled code with no permissions). @@ -63,19 +64,19 @@ public class DocFieldsPhaseTests extends ScriptTestCase { } }); - return compiler.compile(loader,"test", script, new CompilerSettings()); + return compiler.compile(loader, "test", script, new CompilerSettings()); } public abstract static class MockDocTestScript { - public static final String[] PARAMETERS = {"doc", "other"}; + public static final String[] PARAMETERS = { "doc", "other" }; + public abstract void execute(Map doc, Map other); public interface Factory { MockDocTestScript newInstance(); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", MockDocTestScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>("test", MockDocTestScript.Factory.class); } public void testArray() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ElvisTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ElvisTests.java index 81126e242a7..9c93e2310bc 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ElvisTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ElvisTests.java @@ -77,7 +77,7 @@ public class ElvisTests extends ScriptTestCase { assertEquals(2, exec("return params.a + 1 ?: 2 + 2", singletonMap("a", 1), true)); // Yes, this is silly, but it should be valid // Weird casts - assertEquals(1, exec("int i = params.i; String s = params.s; return s ?: i", singletonMap("i", 1), true)); + assertEquals(1, exec("int i = params.i; String s = params.s; return s ?: i", singletonMap("i", 1), true)); assertEquals("str", exec("Integer i = params.i; String s = params.s; return s ?: i", singletonMap("s", "str"), true)); // Combining @@ -98,8 +98,10 @@ public class ElvisTests extends ScriptTestCase { public void testLazy() { assertEquals(1, exec("def fail() {throw new RuntimeException('test')} return params.a ?: fail()", singletonMap("a", 1), true)); - Exception e = expectScriptThrows(RuntimeException.class, () -> - exec("def fail() {throw new RuntimeException('test')} return params.a ?: fail()")); + Exception e = expectScriptThrows( + RuntimeException.class, + () -> exec("def fail() {throw new RuntimeException('test')} return params.a ?: fail()") + ); assertEquals(e.getMessage(), "test"); } @@ -121,8 +123,10 @@ public class ElvisTests extends ScriptTestCase { assertThat(disassembled, firstLookup, greaterThan(-1)); int firstElvisDestinationLabelIndex = disassembled.indexOf("IFNONNULL L", firstLookup); assertThat(disassembled, firstElvisDestinationLabelIndex, greaterThan(-1)); - String firstElvisDestinationLabel = disassembled.substring(firstElvisDestinationLabelIndex + "IFNONNULL ".length(), - disassembled.indexOf('\n', firstElvisDestinationLabelIndex)); + String firstElvisDestinationLabel = disassembled.substring( + firstElvisDestinationLabelIndex + "IFNONNULL ".length(), + disassembled.indexOf('\n', firstElvisDestinationLabelIndex) + ); int firstElvisDestionation = disassembled.indexOf(" " + firstElvisDestinationLabel); assertThat(disassembled, firstElvisDestionation, greaterThan(-1)); int ifAfterFirstElvisDestination = disassembled.indexOf("IF", firstElvisDestionation); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/EqualsTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/EqualsTests.java index e299fcee712..b99ecc190bb 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/EqualsTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/EqualsTests.java @@ -180,18 +180,18 @@ public class EqualsTests extends ScriptTestCase { * we can never be sure that the JVM hasn't configured itself to cache that Integer. It is sneaky like that. */ int uncachedAutoboxedInt = randomValueOtherThanMany(i -> Integer.valueOf(i) == Integer.valueOf(i), OpenSearchTestCase::randomInt); assertEquals(false, exec("def x = params.i; int y = params.i; return x != y;", singletonMap("i", uncachedAutoboxedInt), true)); - assertEquals(true, exec("def x = params.i; int y = params.i; return x !== y;", singletonMap("i", uncachedAutoboxedInt), true)); + assertEquals(true, exec("def x = params.i; int y = params.i; return x !== y;", singletonMap("i", uncachedAutoboxedInt), true)); assertEquals(false, exec("def x = params.i; int y = params.i; return y != x;", singletonMap("i", uncachedAutoboxedInt), true)); - assertEquals(true, exec("def x = params.i; int y = params.i; return y !== x;", singletonMap("i", uncachedAutoboxedInt), true)); + assertEquals(true, exec("def x = params.i; int y = params.i; return y !== x;", singletonMap("i", uncachedAutoboxedInt), true)); /* Now check that we use valueOf with the boxing used for comparing primitives to def. For this we need an * integer that is cached by Integer.valueOf. The JLS says 0 should always be cached. */ int cachedAutoboxedInt = 0; assertSame(Integer.valueOf(cachedAutoboxedInt), Integer.valueOf(cachedAutoboxedInt)); assertEquals(false, exec("def x = params.i; int y = params.i; return x != y;", singletonMap("i", cachedAutoboxedInt), true)); - assertEquals(false, exec("def x = params.i; int y = params.i; return x !== y;", singletonMap("i", cachedAutoboxedInt), true)); + assertEquals(false, exec("def x = params.i; int y = params.i; return x !== y;", singletonMap("i", cachedAutoboxedInt), true)); assertEquals(false, exec("def x = params.i; int y = params.i; return y != x;", singletonMap("i", cachedAutoboxedInt), true)); - assertEquals(false, exec("def x = params.i; int y = params.i; return y !== x;", singletonMap("i", cachedAutoboxedInt), true)); + assertEquals(false, exec("def x = params.i; int y = params.i; return y !== x;", singletonMap("i", cachedAutoboxedInt), true)); } public void testRightHandNull() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FactoryTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FactoryTests.java index fbe6d488bcb..7ee0bd6fc09 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FactoryTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FactoryTests.java @@ -82,8 +82,8 @@ public class FactoryTests extends ScriptTestCase { private final int y; public StatefulFactoryTestScript(int x, int y, int a, int b) { - this.x = x*a; - this.y = y*b; + this.x = x * a; + this.y = y * b; } public int getX() { @@ -91,7 +91,7 @@ public class FactoryTests extends ScriptTestCase { } public int getY() { - return y*2; + return y * 2; } public int getC() { @@ -102,22 +102,31 @@ public class FactoryTests extends ScriptTestCase { return 2; } - public static final String[] PARAMETERS = new String[] {"test"}; + public static final String[] PARAMETERS = new String[] { "test" }; + public abstract Object execute(int test); public abstract boolean needsTest(); + public abstract boolean needsNothing(); + public abstract boolean needsX(); + public abstract boolean needsC(); + public abstract boolean needsD(); public interface StatefulFactory { StatefulFactoryTestScript newInstance(int a, int b); boolean needsTest(); + boolean needsNothing(); + boolean needsX(); + boolean needsC(); + boolean needsD(); } @@ -125,19 +134,29 @@ public class FactoryTests extends ScriptTestCase { StatefulFactory newFactory(int x, int y); boolean needsTest(); + boolean needsNothing(); + boolean needsX(); + boolean needsC(); + boolean needsD(); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", StatefulFactoryTestScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>( + "test", + StatefulFactoryTestScript.Factory.class + ); } public void testStatefulFactory() { StatefulFactoryTestScript.Factory factory = getEngine().compile( - "stateful_factory_test", "test + x + y + d", StatefulFactoryTestScript.CONTEXT, Collections.emptyMap()); + "stateful_factory_test", + "test + x + y + d", + StatefulFactoryTestScript.CONTEXT, + Collections.emptyMap() + ); StatefulFactoryTestScript.StatefulFactory statefulFactory = factory.newFactory(1, 2); StatefulFactoryTestScript script = statefulFactory.newInstance(3, 4); assertEquals(24, script.execute(3)); @@ -171,18 +190,19 @@ public class FactoryTests extends ScriptTestCase { return params; } - public static final String[] PARAMETERS = new String[] {"test"}; + public static final String[] PARAMETERS = new String[] { "test" }; + public abstract Object execute(int test); public interface Factory { FactoryTestScript newInstance(Map params); boolean needsTest(); + boolean needsNothing(); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", FactoryTestScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>("test", FactoryTestScript.Factory.class); } public abstract static class DeterministicFactoryTestScript { @@ -196,23 +216,31 @@ public class FactoryTests extends ScriptTestCase { return params; } - public static final String[] PARAMETERS = new String[] {"test"}; + public static final String[] PARAMETERS = new String[] { "test" }; + public abstract Object execute(int test); - public interface Factory extends ScriptFactory{ + public interface Factory extends ScriptFactory { FactoryTestScript newInstance(Map params); boolean needsTest(); + boolean needsNothing(); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", DeterministicFactoryTestScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>( + "test", + DeterministicFactoryTestScript.Factory.class + ); } public void testFactory() { - FactoryTestScript.Factory factory = - getEngine().compile("factory_test", "test + params.get('test')", FactoryTestScript.CONTEXT, Collections.emptyMap()); + FactoryTestScript.Factory factory = getEngine().compile( + "factory_test", + "test + params.get('test')", + FactoryTestScript.CONTEXT, + Collections.emptyMap() + ); FactoryTestScript script = factory.newInstance(Collections.singletonMap("test", 2)); assertEquals(4, script.execute(2)); assertEquals(5, script.execute(3)); @@ -226,41 +254,50 @@ public class FactoryTests extends ScriptTestCase { } public void testDeterministic() { - DeterministicFactoryTestScript.Factory factory = - getEngine().compile("deterministic_test", "Integer.parseInt('123')", - DeterministicFactoryTestScript.CONTEXT, Collections.emptyMap()); + DeterministicFactoryTestScript.Factory factory = getEngine().compile( + "deterministic_test", + "Integer.parseInt('123')", + DeterministicFactoryTestScript.CONTEXT, + Collections.emptyMap() + ); assertTrue(factory.isResultDeterministic()); assertEquals(123, factory.newInstance(Collections.emptyMap()).execute(0)); } public void testNotDeterministic() { - DeterministicFactoryTestScript.Factory factory = - getEngine().compile("not_deterministic_test", "Math.random()", - DeterministicFactoryTestScript.CONTEXT, Collections.emptyMap()); + DeterministicFactoryTestScript.Factory factory = getEngine().compile( + "not_deterministic_test", + "Math.random()", + DeterministicFactoryTestScript.CONTEXT, + Collections.emptyMap() + ); assertFalse(factory.isResultDeterministic()); - Double d = (Double)factory.newInstance(Collections.emptyMap()).execute(0); + Double d = (Double) factory.newInstance(Collections.emptyMap()).execute(0); assertTrue(d >= 0.0 && d <= 1.0); } public void testMixedDeterministicIsNotDeterministic() { - DeterministicFactoryTestScript.Factory factory = - getEngine().compile("not_deterministic_test", "Integer.parseInt('123') + Math.random()", - DeterministicFactoryTestScript.CONTEXT, Collections.emptyMap()); + DeterministicFactoryTestScript.Factory factory = getEngine().compile( + "not_deterministic_test", + "Integer.parseInt('123') + Math.random()", + DeterministicFactoryTestScript.CONTEXT, + Collections.emptyMap() + ); assertFalse(factory.isResultDeterministic()); - Double d = (Double)factory.newInstance(Collections.emptyMap()).execute(0); + Double d = (Double) factory.newInstance(Collections.emptyMap()).execute(0); assertTrue(d >= 123.0 && d <= 124.0); } public abstract static class EmptyTestScript { public static final String[] PARAMETERS = {}; + public abstract Object execute(); public interface Factory { EmptyTestScript newInstance(); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", EmptyTestScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>("test", EmptyTestScript.Factory.class); } public void testEmpty() { @@ -274,8 +311,12 @@ public class FactoryTests extends ScriptTestCase { } public void testTemplate() { - TemplateScript.Factory factory = - getEngine().compile("template_test", "params['test']", TemplateScript.CONTEXT, Collections.emptyMap()); + TemplateScript.Factory factory = getEngine().compile( + "template_test", + "params['test']", + TemplateScript.CONTEXT, + Collections.emptyMap() + ); TemplateScript script = factory.newInstance(Collections.singletonMap("test", "abc")); assertEquals("abc", script.execute()); assertEquals("abc", script.execute()); @@ -285,30 +326,37 @@ public class FactoryTests extends ScriptTestCase { } public void testGetterInLambda() { - FactoryTestScript.Factory factory = - getEngine().compile("template_test", - "IntSupplier createLambda(IntSupplier s) { return s; } createLambda(() -> params['x'] + test).getAsInt()", - FactoryTestScript.CONTEXT, Collections.emptyMap()); + FactoryTestScript.Factory factory = getEngine().compile( + "template_test", + "IntSupplier createLambda(IntSupplier s) { return s; } createLambda(() -> params['x'] + test).getAsInt()", + FactoryTestScript.CONTEXT, + Collections.emptyMap() + ); FactoryTestScript script = factory.newInstance(Collections.singletonMap("x", 1)); assertEquals(2, script.execute(1)); } public abstract static class VoidReturnTestScript { - public static final String[] PARAMETERS = {"map"}; + public static final String[] PARAMETERS = { "map" }; + public abstract void execute(Map map); public interface Factory { VoidReturnTestScript newInstance(); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", VoidReturnTestScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>( + "test", + VoidReturnTestScript.Factory.class + ); } public void testVoidReturn() { getEngine().compile("void_return_test", "int x = 1 + 1; return;", VoidReturnTestScript.CONTEXT, Collections.emptyMap()); - IllegalArgumentException iae = expectScriptThrows(IllegalArgumentException.class, () -> - getEngine().compile("void_return_test", "1 + 1", VoidReturnTestScript.CONTEXT, Collections.emptyMap())); + IllegalArgumentException iae = expectScriptThrows( + IllegalArgumentException.class, + () -> getEngine().compile("void_return_test", "1 + 1", VoidReturnTestScript.CONTEXT, Collections.emptyMap()) + ); assertEquals(iae.getMessage(), "not a statement: result not used from addition operation [+]"); } @@ -323,27 +371,30 @@ public class FactoryTests extends ScriptTestCase { return params; } - public static final String[] PARAMETERS = new String[] {"test"}; + public static final String[] PARAMETERS = new String[] { "test" }; + public abstract long[] execute(int test); public interface Factory { FactoryTestConverterScript newInstance(Map params); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", FactoryTestConverterScript.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>( + "test", + FactoryTestConverterScript.Factory.class + ); public static long[] convertFromInt(int i) { - return new long[]{i}; + return new long[] { i }; } public static long[] convertFromString(String s) { - return new long[]{Long.parseLong(s)}; + return new long[] { Long.parseLong(s) }; } public static long[] convertFromList(List l) { long[] converted = new long[l.size()]; - for (int i=0; i < l.size(); i++) { + for (int i = 0; i < l.size(); i++) { Object o = l.get(i); if (o instanceof Long) { converted[i] = (Long) o; @@ -358,7 +409,7 @@ public class FactoryTests extends ScriptTestCase { public static long[] convertFromDef(Object def) { if (def instanceof String) { - return convertFromString((String)def); + return convertFromString((String) def); } else if (def instanceof Integer) { return convertFromInt(((Integer) def).intValue()); } else if (def instanceof List) { @@ -366,120 +417,126 @@ public class FactoryTests extends ScriptTestCase { } else { return (long[]) def; } - //throw new ClassCastException("Cannot convert [" + def + "] to long[]"); + // throw new ClassCastException("Cannot convert [" + def + "] to long[]"); } } - public void testConverterFactory() { - FactoryTestConverterScript.Factory factory = - getEngine().compile("converter_test", - "return test;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.Factory factory = getEngine().compile( + "converter_test", + "return test;", + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); FactoryTestConverterScript script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{2}, script.execute(2)); + assertArrayEquals(new long[] { 2 }, script.execute(2)); script = factory.newInstance(Collections.singletonMap("test", 3)); - assertArrayEquals(new long[]{3}, script.execute(3)); + assertArrayEquals(new long[] { 3 }, script.execute(3)); - factory = getEngine().compile("converter_test", - "return test + 1;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "return test + 1;", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1001}, script.execute(1000)); + assertArrayEquals(new long[] { 1001 }, script.execute(1000)); - factory = getEngine().compile("converter_test", - "return '100';", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "return '100';", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{100}, script.execute(1000)); + assertArrayEquals(new long[] { 100 }, script.execute(1000)); - factory = getEngine().compile("converter_test", + factory = getEngine().compile( + "converter_test", "long[] a = new long[]{test, 123}; return a;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1000, 123}, script.execute(1000)); + assertArrayEquals(new long[] { 1000, 123 }, script.execute(1000)); - factory = getEngine().compile("converter_test", - "return [test, 123];", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "return [test, 123];", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1000, 123}, script.execute(1000)); + assertArrayEquals(new long[] { 1000, 123 }, script.execute(1000)); - factory = getEngine().compile("converter_test", + factory = getEngine().compile( + "converter_test", "ArrayList a = new ArrayList(); a.add(test); a.add(456); a.add('789'); return a;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{123, 456, 789}, script.execute(123)); + assertArrayEquals(new long[] { 123, 456, 789 }, script.execute(123)); // autoreturn, no converter - factory = getEngine().compile("converter_test", - "new long[]{test}", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "new long[]{test}", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{123}, script.execute(123)); + assertArrayEquals(new long[] { 123 }, script.execute(123)); // autoreturn, converter - factory = getEngine().compile("converter_test", - "test", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "test", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{456}, script.execute(456)); + assertArrayEquals(new long[] { 456 }, script.execute(456)); - factory = getEngine().compile("converter_test", - "'1001'", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "'1001'", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1001}, script.execute(456)); + assertArrayEquals(new long[] { 1001 }, script.execute(456)); // def tests - factory = getEngine().compile("converter_test", + factory = getEngine().compile( + "converter_test", "def a = new long[]{test, 123}; return a;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1000, 123}, script.execute(1000)); + assertArrayEquals(new long[] { 1000, 123 }, script.execute(1000)); - factory = getEngine().compile("converter_test", + factory = getEngine().compile( + "converter_test", "def l = [test, 123]; l;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1000, 123}, script.execute(1000)); + assertArrayEquals(new long[] { 1000, 123 }, script.execute(1000)); - factory = getEngine().compile("converter_test", + factory = getEngine().compile( + "converter_test", "def a = new ArrayList(); a.add(test); a.add(456); a.add('789'); return a;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{123, 456, 789}, script.execute(123)); + assertArrayEquals(new long[] { 123, 456, 789 }, script.execute(123)); // autoreturn, no converter - factory = getEngine().compile("converter_test", + factory = getEngine().compile( + "converter_test", "def a = new long[]{test}; a;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{123}, script.execute(123)); + assertArrayEquals(new long[] { 123 }, script.execute(123)); // autoreturn, converter - factory = getEngine().compile("converter_test", - "def a = '1001'; a", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "def a = '1001'; a", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1001}, script.execute(456)); + assertArrayEquals(new long[] { 1001 }, script.execute(456)); - factory = getEngine().compile("converter_test", - "int x = 1", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + factory = getEngine().compile("converter_test", "int x = 1", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); script = factory.newInstance(Collections.singletonMap("test", 2)); assertArrayEquals(null, script.execute(123)); - factory = getEngine().compile("converter_test", + factory = getEngine().compile( + "converter_test", "short x = 1; return x", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap()); + FactoryTestConverterScript.CONTEXT, + Collections.emptyMap() + ); script = factory.newInstance(Collections.singletonMap("test", 2)); - assertArrayEquals(new long[]{1}, script.execute(123)); + assertArrayEquals(new long[] { 1 }, script.execute(123)); - ClassCastException cce = expectScriptThrows(ClassCastException.class, () -> - getEngine().compile("converter_test", - "return true;", - FactoryTestConverterScript.CONTEXT, Collections.emptyMap())); + ClassCastException cce = expectScriptThrows( + ClassCastException.class, + () -> getEngine().compile("converter_test", "return true;", FactoryTestConverterScript.CONTEXT, Collections.emptyMap()) + ); assertEquals(cce.getMessage(), "Cannot cast from [boolean] to [long[]]."); } @@ -494,27 +551,28 @@ public class FactoryTests extends ScriptTestCase { return params; } - public static final String[] PARAMETERS = new String[] {"test"}; + public static final String[] PARAMETERS = new String[] { "test" }; + public abstract long[] execute(int test); public interface Factory { FactoryTestConverterScriptBadDef newInstance(Map params); } - public static final ScriptContext CONTEXT = - new ScriptContext<>("test", FactoryTestConverterScriptBadDef.Factory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>( + "test", + FactoryTestConverterScriptBadDef.Factory.class + ); public static long[] convertFromDef(int def) { - return new long[]{def}; + return new long[] { def }; } } public void testConverterFactoryBadDef() { IllegalStateException ise = null; try { - getEngine().compile("converter_def", - "return test;", - FactoryTestConverterScriptBadDef.CONTEXT, Collections.emptyMap()); + getEngine().compile("converter_def", "return test;", FactoryTestConverterScriptBadDef.CONTEXT, Collections.emptyMap()); } catch (ScriptException e) { ise = (IllegalStateException) e.getCause(); } @@ -547,8 +605,12 @@ public class FactoryTests extends ScriptTestCase { } public void testDocFields() { - DocFieldsTestScript.Factory f = - getEngine().compile("test", "doc['cat'] + doc['dog']", DocFieldsTestScript.CONTEXT, Collections.emptyMap()); + DocFieldsTestScript.Factory f = getEngine().compile( + "test", + "doc['cat'] + doc['dog']", + DocFieldsTestScript.CONTEXT, + Collections.emptyMap() + ); assertThat(f.docFields(), equalTo(Arrays.asList("cat", "dog"))); assertThat(f.newInstance().execute(), equalTo("meowwoof")); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestAugmentationObject.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestAugmentationObject.java index 40db216cdfe..8af9de8d4cd 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestAugmentationObject.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestAugmentationObject.java @@ -53,16 +53,23 @@ public class FeatureTestAugmentationObject { } public static int augmentInjectWithLambda(FeatureTestObject ft, int injected, Function fn, short arg) { - return ft.getX()*fn.apply(arg)*injected; + return ft.getX() * fn.apply(arg) * injected; } public static int augmentInjectMultiTimesX(FeatureTestObject ft, int inject1, int inject2, short user) { return ft.getX() * (inject1 + inject2) * user; } - public static int augmentInjectMultiWithLambda(FeatureTestObject ft, - int inject1, int inject2, int inject3, int inject4, Function fn, short arg) { - return ft.getX()*fn.apply(arg)*(inject1 + inject2 + inject3 + inject4); + public static int augmentInjectMultiWithLambda( + FeatureTestObject ft, + int inject1, + int inject2, + int inject3, + int inject4, + Function fn, + short arg + ) { + return ft.getX() * fn.apply(arg) * (inject1 + inject2 + inject3 + inject4); } private FeatureTestAugmentationObject() {} diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java index 4796cf1b0d0..1bc6597b584 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject.java @@ -68,8 +68,7 @@ public class FeatureTestObject { private Integer i; /** empty ctor */ - public FeatureTestObject() { - } + public FeatureTestObject() {} /** ctor with params */ public FeatureTestObject(int x, int y) { @@ -116,7 +115,7 @@ public class FeatureTestObject { } public int injectWithLambda(int injected, Function fn, short arg) { - return this.x*fn.apply(arg)*injected; + return this.x * fn.apply(arg) * injected; } public int injectMultiTimesX(int inject1, int inject2, int inject3, short user) { @@ -124,15 +123,15 @@ public class FeatureTestObject { } public int injectMultiWithLambda(int inject1, int inject2, int inject3, Function fn, short arg) { - return this.x*fn.apply(arg)*(inject1 + inject2 + inject3); + return this.x * fn.apply(arg) * (inject1 + inject2 + inject3); } public Double mixedAdd(int i, Byte b, char c, Float f) { - return (double)(i + b + c + f); + return (double) (i + b + c + f); } /** method taking two functions! */ - public Object twoFunctionsOfX(Function f, Function g) { + public Object twoFunctionsOfX(Function f, Function g) { return f.apply(g.apply(x)); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java index 6caca2bc8fc..9fb0610bc94 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FeatureTestObject2.java @@ -34,10 +34,14 @@ package org.opensearch.painless; /** Currently just a dummy class for testing a few features not yet exposed by whitelist! */ public class FeatureTestObject2 { - public FeatureTestObject2() {super();} + public FeatureTestObject2() { + super(); + } + public static int staticNumberArgument(int injected, int userArgument) { return injected * userArgument; } + public static int staticNumberArgument2(int userArgument1, int userArgument2) { return userArgument1 * userArgument2; } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FloatOverflowTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FloatOverflowTests.java index 743976013f3..6d28635f144 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FloatOverflowTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FloatOverflowTests.java @@ -89,8 +89,10 @@ public class FloatOverflowTests extends ScriptTestCase { public void testSubtraction() throws Exception { assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;")); - assertEquals(Double.NEGATIVE_INFINITY, - exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;")); + assertEquals( + Double.NEGATIVE_INFINITY, + exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;") + ); } public void testSubtractionConst() throws Exception { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionRefTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionRefTests.java index 2407ce52af2..2ed1fa49d0b 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionRefTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionRefTests.java @@ -58,68 +58,102 @@ public class FunctionRefTests extends ScriptTestCase { } public void testQualifiedStaticMethodReference() { - assertEquals(true, - exec("List l = [true]; l.stream().map(org.opensearch.painless.FeatureTestObject::overloadedStatic).findFirst().get()")); + assertEquals( + true, + exec("List l = [true]; l.stream().map(org.opensearch.painless.FeatureTestObject::overloadedStatic).findFirst().get()") + ); } public void testQualifiedStaticMethodReferenceDef() { - assertEquals(true, - exec("def l = [true]; l.stream().map(org.opensearch.painless.FeatureTestObject::overloadedStatic).findFirst().get()")); + assertEquals( + true, + exec("def l = [true]; l.stream().map(org.opensearch.painless.FeatureTestObject::overloadedStatic).findFirst().get()") + ); } public void testQualifiedVirtualMethodReference() { long instant = randomLong(); - assertEquals(instant, exec( + assertEquals( + instant, + exec( "List l = [params.d]; return l.stream().mapToLong(Instant::toEpochMilli).sum()", - singletonMap("d", Instant.ofEpochMilli(instant)), true)); + singletonMap("d", Instant.ofEpochMilli(instant)), + true + ) + ); } public void testQualifiedVirtualMethodReferenceDef() { long instant = randomLong(); - assertEquals(instant, exec( + assertEquals( + instant, + exec( "def l = [params.d]; return l.stream().mapToLong(Instant::toEpochMilli).sum()", - singletonMap("d", Instant.ofEpochMilli(instant)), true)); + singletonMap("d", Instant.ofEpochMilli(instant)), + true + ) + ); } public void testCtorMethodReference() { - assertEquals(3.0D, - exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " + - "DoubleStream doubleStream = l.stream().mapToDouble(Double::doubleValue);" + - "DoubleSummaryStatistics stats = doubleStream.collect(DoubleSummaryStatistics::new, " + - "DoubleSummaryStatistics::accept, " + - "DoubleSummaryStatistics::combine); " + - "return stats.getSum()")); + assertEquals( + 3.0D, + exec( + "List l = new ArrayList(); l.add(1.0); l.add(2.0); " + + "DoubleStream doubleStream = l.stream().mapToDouble(Double::doubleValue);" + + "DoubleSummaryStatistics stats = doubleStream.collect(DoubleSummaryStatistics::new, " + + "DoubleSummaryStatistics::accept, " + + "DoubleSummaryStatistics::combine); " + + "return stats.getSum()" + ) + ); } public void testCtorMethodReferenceDef() { - assertEquals(3.0D, - exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " + - "def doubleStream = l.stream().mapToDouble(Double::doubleValue);" + - "def stats = doubleStream.collect(DoubleSummaryStatistics::new, " + - "DoubleSummaryStatistics::accept, " + - "DoubleSummaryStatistics::combine); " + - "return stats.getSum()")); + assertEquals( + 3.0D, + exec( + "def l = new ArrayList(); l.add(1.0); l.add(2.0); " + + "def doubleStream = l.stream().mapToDouble(Double::doubleValue);" + + "def stats = doubleStream.collect(DoubleSummaryStatistics::new, " + + "DoubleSummaryStatistics::accept, " + + "DoubleSummaryStatistics::combine); " + + "return stats.getSum()" + ) + ); } public void testCtorWithParams() { - assertArrayEquals(new Object[] { "foo", "bar" }, - (Object[]) exec("List l = new ArrayList(); l.add('foo'); l.add('bar'); " + - "Stream stream = l.stream().map(StringBuilder::new);" + - "return stream.map(Object::toString).toArray()")); + assertArrayEquals( + new Object[] { "foo", "bar" }, + (Object[]) exec( + "List l = new ArrayList(); l.add('foo'); l.add('bar'); " + + "Stream stream = l.stream().map(StringBuilder::new);" + + "return stream.map(Object::toString).toArray()" + ) + ); } public void testArrayCtorMethodRef() { - assertEquals(1.0D, - exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " + - "def[] array = l.stream().toArray(Double[]::new);" + - "return array[0];")); + assertEquals( + 1.0D, + exec( + "List l = new ArrayList(); l.add(1.0); l.add(2.0); " + + "def[] array = l.stream().toArray(Double[]::new);" + + "return array[0];" + ) + ); } public void testArrayCtorMethodRefDef() { - assertEquals(1.0D, - exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " + - "def[] array = l.stream().toArray(Double[]::new);" + - "return array[0];")); + assertEquals( + 1.0D, + exec( + "def l = new ArrayList(); l.add(1.0); l.add(2.0); " + + "def[] array = l.stream().toArray(Double[]::new);" + + "return array[0];" + ) + ); } public void testCapturingMethodReference() { @@ -143,158 +177,212 @@ public class FunctionRefTests extends ScriptTestCase { } public void testCapturingMethodReferenceMultipleLambdas() { - assertEquals("testingcdefg", exec( - "String x = 'testing';" + - "String y = 'abcdefg';" + - "org.opensearch.painless.FeatureTestObject test = new org.opensearch.painless.FeatureTestObject(2,3);" + - "return test.twoFunctionsOfX(x::concat, y::substring);")); + assertEquals( + "testingcdefg", + exec( + "String x = 'testing';" + + "String y = 'abcdefg';" + + "org.opensearch.painless.FeatureTestObject test = new org.opensearch.painless.FeatureTestObject(2,3);" + + "return test.twoFunctionsOfX(x::concat, y::substring);" + ) + ); } public void testCapturingMethodReferenceMultipleLambdasDefImpls() { - assertEquals("testingcdefg", exec( - "def x = 'testing';" + - "def y = 'abcdefg';" + - "org.opensearch.painless.FeatureTestObject test = new org.opensearch.painless.FeatureTestObject(2,3);" + - "return test.twoFunctionsOfX(x::concat, y::substring);")); + assertEquals( + "testingcdefg", + exec( + "def x = 'testing';" + + "def y = 'abcdefg';" + + "org.opensearch.painless.FeatureTestObject test = new org.opensearch.painless.FeatureTestObject(2,3);" + + "return test.twoFunctionsOfX(x::concat, y::substring);" + ) + ); } public void testCapturingMethodReferenceMultipleLambdasDefInterface() { - assertEquals("testingcdefg", exec( - "String x = 'testing';" + - "String y = 'abcdefg';" + - "def test = new org.opensearch.painless.FeatureTestObject(2,3);" + - "return test.twoFunctionsOfX(x::concat, y::substring);")); + assertEquals( + "testingcdefg", + exec( + "String x = 'testing';" + + "String y = 'abcdefg';" + + "def test = new org.opensearch.painless.FeatureTestObject(2,3);" + + "return test.twoFunctionsOfX(x::concat, y::substring);" + ) + ); } public void testCapturingMethodReferenceMultipleLambdasDefEverywhere() { - assertEquals("testingcdefg", exec( - "def x = 'testing';" + - "def y = 'abcdefg';" + - "def test = new org.opensearch.painless.FeatureTestObject(2,3);" + - "return test.twoFunctionsOfX(x::concat, y::substring);")); + assertEquals( + "testingcdefg", + exec( + "def x = 'testing';" + + "def y = 'abcdefg';" + + "def test = new org.opensearch.painless.FeatureTestObject(2,3);" + + "return test.twoFunctionsOfX(x::concat, y::substring);" + ) + ); } public void testOwnStaticMethodReference() { - assertEquals(2, exec("int mycompare(int i, int j) { j - i } " + - "List l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);")); + assertEquals( + 2, + exec( + "int mycompare(int i, int j) { j - i } " + + "List l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);" + ) + ); } public void testOwnStaticMethodReferenceDef() { - assertEquals(2, exec("int mycompare(int i, int j) { j - i } " + - "def l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);")); + assertEquals( + 2, + exec( + "int mycompare(int i, int j) { j - i } " + + "def l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);" + ) + ); } public void testInterfaceDefaultMethod() { - assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + - "Map map = new HashMap(); f(map::getOrDefault)")); + assertEquals( + "bar", + exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + "Map map = new HashMap(); f(map::getOrDefault)") + ); } public void testInterfaceDefaultMethodDef() { - assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + - "def map = new HashMap(); f(map::getOrDefault)")); + assertEquals( + "bar", + exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + "def map = new HashMap(); f(map::getOrDefault)") + ); } public void testInterfaceStaticMethod() { - assertEquals(-1, exec("Supplier get(Supplier supplier) { return supplier }" + - "Supplier s = get(Comparator::naturalOrder); s.get().compare(1, 2)")); + assertEquals( + -1, + exec( + "Supplier get(Supplier supplier) { return supplier }" + "Supplier s = get(Comparator::naturalOrder); s.get().compare(1, 2)" + ) + ); } public void testMethodMissing() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);"); - }); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);"); } + ); assertThat(e.getMessage(), containsString("function reference [Integer::bogus/2] matching [java.util.Comparator")); } public void testQualifiedMethodMissing() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("List l = [2, 1]; l.sort(java.time.Instant::bogus); return l.get(0);", false); - }); - assertThat(e.getMessage(), - containsString("function reference [java.time.Instant::bogus/2] matching [java.util.Comparator, compare/2")); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("List l = [2, 1]; l.sort(java.time.Instant::bogus); return l.get(0);", false); } + ); + assertThat( + e.getMessage(), + containsString("function reference [java.time.Instant::bogus/2] matching [java.util.Comparator, compare/2") + ); } public void testClassMissing() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("List l = [2, 1]; l.sort(Bogus::bogus); return l.get(0);", false); - }); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("List l = [2, 1]; l.sort(Bogus::bogus); return l.get(0);", false); } + ); assertThat(e.getMessage(), endsWith("variable [Bogus] is not defined")); } public void testQualifiedClassMissing() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("List l = [2, 1]; l.sort(org.joda.time.BogusDateTime::bogus); return l.get(0);", false); - }); + Exception e = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("List l = [2, 1]; l.sort(org.joda.time.BogusDateTime::bogus); return l.get(0);", false); } + ); assertEquals("variable [org.joda.time.BogusDateTime] is not defined", e.getMessage()); } public void testNotFunctionalInterface() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);"); - }); - assertThat(expected.getMessage(), - containsString("cannot convert function reference [Integer::bogus] to a non-functional interface [def]")); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);"); } + ); + assertThat( + expected.getMessage(), + containsString("cannot convert function reference [Integer::bogus] to a non-functional interface [def]") + ); } public void testIncompatible() { - expectScriptThrows(ClassCastException.class, () -> { - exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);"); - }); + expectScriptThrows( + ClassCastException.class, + () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);"); } + ); } public void testWrongArity() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("Optional.empty().orElseGet(String::startsWith);"); - }); - assertThat(expected.getMessage(), - containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("Optional.empty().orElseGet(String::startsWith);"); } + ); + assertThat( + expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier") + ); } public void testWrongArityNotEnough() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); - }); - assertThat(expected.getMessage(), containsString( - "function reference [String::isEmpty/2] matching [java.util.Comparator")); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); } + ); + assertThat(expected.getMessage(), containsString("function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testWrongArityDef() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);"); - }); - assertThat(expected.getMessage(), - containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);"); } + ); + assertThat( + expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier") + ); } public void testWrongArityNotEnoughDef() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); - }); - assertThat(expected.getMessage(), - containsString("function reference [String::isEmpty/2] matching [java.util.Comparator")); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); } + ); + assertThat(expected.getMessage(), containsString("function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testReturnVoid() { - Throwable expected = expectScriptThrows(ClassCastException.class, () -> { - exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength).sum();"); - }); + Throwable expected = expectScriptThrows( + ClassCastException.class, + () -> { exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength).sum();"); } + ); assertThat(expected.getMessage(), containsString("Cannot cast from [void] to [long].")); } public void testReturnVoidDef() { - Exception expected = expectScriptThrows(LambdaConversionException.class, () -> { - exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);"); - }); + Exception expected = expectScriptThrows( + LambdaConversionException.class, + () -> { exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);"); } + ); assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]")); - expected = expectScriptThrows(LambdaConversionException.class, () -> { - exec("def b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);"); - }); + expected = expectScriptThrows( + LambdaConversionException.class, + () -> { exec("def b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);"); } + ); assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]")); - expected = expectScriptThrows(LambdaConversionException.class, () -> { - exec("def b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength);"); - }); + expected = expectScriptThrows( + LambdaConversionException.class, + () -> { exec("def b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength);"); } + ); assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]")); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionTests.java index 373ad808f75..77b6e8f0b3d 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/FunctionTests.java @@ -62,11 +62,11 @@ public class FunctionTests extends ScriptTestCase { } public void testEmpty() { - Exception expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("void test(int x) {} test()"); - }); - assertThat(expected.getMessage(), containsString( - "invalid function definition: found no statements for function [test] with [1] parameters")); + Exception expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("void test(int x) {} test()"); }); + assertThat( + expected.getMessage(), + containsString("invalid function definition: found no statements for function [test] with [1] parameters") + ); } public void testReturnsAreUnboxedIfNeeded() { @@ -82,9 +82,10 @@ public class FunctionTests extends ScriptTestCase { } public void testDuplicates() { - Exception expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("void test(int x) {x = 2;} void test(def y) {y = 3;} test()"); - }); + Exception expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("void test(int x) {x = 2;} void test(def y) {y = 3;} test()"); } + ); assertThat(expected.getMessage(), containsString("found duplicate function")); } @@ -98,27 +99,32 @@ public class FunctionTests extends ScriptTestCase { } public void testInfiniteLoop() { - Error expected = expectScriptThrows(PainlessError.class, () -> { - exec("void test() {boolean x = true; while (x) {}} test()"); - }); - assertThat(expected.getMessage(), - containsString("The maximum number of statements that can be executed in a loop has been reached.")); + Error expected = expectScriptThrows(PainlessError.class, () -> { exec("void test() {boolean x = true; while (x) {}} test()"); }); + assertThat( + expected.getMessage(), + containsString("The maximum number of statements that can be executed in a loop has been reached.") + ); } public void testReturnVoid() { assertEquals(null, exec("void test(StringBuilder b, int i) {b.setLength(i)} test(new StringBuilder(), 1)")); - Exception expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("int test(StringBuilder b, int i) {b.setLength(i)} test(new StringBuilder(), 1)"); - }); - assertEquals("invalid function definition: " + - "not all paths provide a return value for function [test] with [2] parameters", expected.getMessage()); - expected = expectScriptThrows(ClassCastException.class, () -> { - exec("int test(StringBuilder b, int i) {return b.setLength(i)} test(new StringBuilder(), 1)"); - }); + Exception expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("int test(StringBuilder b, int i) {b.setLength(i)} test(new StringBuilder(), 1)"); } + ); + assertEquals( + "invalid function definition: " + "not all paths provide a return value for function [test] with [2] parameters", + expected.getMessage() + ); + expected = expectScriptThrows( + ClassCastException.class, + () -> { exec("int test(StringBuilder b, int i) {return b.setLength(i)} test(new StringBuilder(), 1)"); } + ); assertEquals("Cannot cast from [void] to [int].", expected.getMessage()); - expected = expectScriptThrows(ClassCastException.class, () -> { - exec("def test(StringBuilder b, int i) {return b.setLength(i)} test(new StringBuilder(), 1)"); - }); + expected = expectScriptThrows( + ClassCastException.class, + () -> { exec("def test(StringBuilder b, int i) {return b.setLength(i)} test(new StringBuilder(), 1)"); } + ); assertEquals("Cannot cast from [void] to [def].", expected.getMessage()); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/GeneralCastTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/GeneralCastTests.java index 99dc813c604..225dbd51819 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/GeneralCastTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/GeneralCastTests.java @@ -39,9 +39,9 @@ public class GeneralCastTests extends ScriptTestCase { * Unary operator with explicit cast */ public void testUnaryOperator() { - assertEquals((byte)5, exec("long x = 5L; return (byte) (+x);")); - assertEquals((short)5, exec("long x = 5L; return (short) (+x);")); - assertEquals((char)5, exec("long x = 5L; return (char) (+x);")); + assertEquals((byte) 5, exec("long x = 5L; return (byte) (+x);")); + assertEquals((short) 5, exec("long x = 5L; return (short) (+x);")); + assertEquals((char) 5, exec("long x = 5L; return (char) (+x);")); assertEquals(5, exec("long x = 5L; return (int) (+x);")); assertEquals(5F, exec("long x = 5L; return (float) (+x);")); assertEquals(5L, exec("long x = 5L; return (long) (+x);")); @@ -52,9 +52,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary operators with explicit cast */ public void testBinaryOperator() { - assertEquals((byte)6, exec("long x = 5L; return (byte) (x + 1);")); - assertEquals((short)6, exec("long x = 5L; return (short) (x + 1);")); - assertEquals((char)6, exec("long x = 5L; return (char) (x + 1);")); + assertEquals((byte) 6, exec("long x = 5L; return (byte) (x + 1);")); + assertEquals((short) 6, exec("long x = 5L; return (short) (x + 1);")); + assertEquals((char) 6, exec("long x = 5L; return (char) (x + 1);")); assertEquals(6, exec("long x = 5L; return (int) (x + 1);")); assertEquals(6F, exec("long x = 5L; return (float) (x + 1);")); assertEquals(6L, exec("long x = 5L; return (long) (x + 1);")); @@ -65,9 +65,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary compound assignment with explicit cast */ public void testBinaryCompoundAssignment() { - assertEquals((byte)6, exec("long x = 5L; return (byte) (x += 1);")); - assertEquals((short)6, exec("long x = 5L; return (short) (x += 1);")); - assertEquals((char)6, exec("long x = 5L; return (char) (x += 1);")); + assertEquals((byte) 6, exec("long x = 5L; return (byte) (x += 1);")); + assertEquals((short) 6, exec("long x = 5L; return (short) (x += 1);")); + assertEquals((char) 6, exec("long x = 5L; return (char) (x += 1);")); assertEquals(6, exec("long x = 5L; return (int) (x += 1);")); assertEquals(6F, exec("long x = 5L; return (float) (x += 1);")); assertEquals(6L, exec("long x = 5L; return (long) (x += 1);")); @@ -78,9 +78,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary compound prefix with explicit cast */ public void testBinaryPrefix() { - assertEquals((byte)6, exec("long x = 5L; return (byte) (++x);")); - assertEquals((short)6, exec("long x = 5L; return (short) (++x);")); - assertEquals((char)6, exec("long x = 5L; return (char) (++x);")); + assertEquals((byte) 6, exec("long x = 5L; return (byte) (++x);")); + assertEquals((short) 6, exec("long x = 5L; return (short) (++x);")); + assertEquals((char) 6, exec("long x = 5L; return (char) (++x);")); assertEquals(6, exec("long x = 5L; return (int) (++x);")); assertEquals(6F, exec("long x = 5L; return (float) (++x);")); assertEquals(6L, exec("long x = 5L; return (long) (++x);")); @@ -91,9 +91,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary compound postifx with explicit cast */ public void testBinaryPostfix() { - assertEquals((byte)5, exec("long x = 5L; return (byte) (x++);")); - assertEquals((short)5, exec("long x = 5L; return (short) (x++);")); - assertEquals((char)5, exec("long x = 5L; return (char) (x++);")); + assertEquals((byte) 5, exec("long x = 5L; return (byte) (x++);")); + assertEquals((short) 5, exec("long x = 5L; return (short) (x++);")); + assertEquals((char) 5, exec("long x = 5L; return (char) (x++);")); assertEquals(5, exec("long x = 5L; return (int) (x++);")); assertEquals(5F, exec("long x = 5L; return (float) (x++);")); assertEquals(5L, exec("long x = 5L; return (long) (x++);")); @@ -104,9 +104,9 @@ public class GeneralCastTests extends ScriptTestCase { * Shift operators with explicit cast */ public void testShiftOperator() { - assertEquals((byte)10, exec("long x = 5L; return (byte) (x << 1);")); - assertEquals((short)10, exec("long x = 5L; return (short) (x << 1);")); - assertEquals((char)10, exec("long x = 5L; return (char) (x << 1);")); + assertEquals((byte) 10, exec("long x = 5L; return (byte) (x << 1);")); + assertEquals((short) 10, exec("long x = 5L; return (short) (x << 1);")); + assertEquals((char) 10, exec("long x = 5L; return (char) (x << 1);")); assertEquals(10, exec("long x = 5L; return (int) (x << 1);")); assertEquals(10F, exec("long x = 5L; return (float) (x << 1);")); assertEquals(10L, exec("long x = 5L; return (long) (x << 1);")); @@ -117,9 +117,9 @@ public class GeneralCastTests extends ScriptTestCase { * Shift compound assignment with explicit cast */ public void testShiftCompoundAssignment() { - assertEquals((byte)10, exec("long x = 5L; return (byte) (x <<= 1);")); - assertEquals((short)10, exec("long x = 5L; return (short) (x <<= 1);")); - assertEquals((char)10, exec("long x = 5L; return (char) (x <<= 1);")); + assertEquals((byte) 10, exec("long x = 5L; return (byte) (x <<= 1);")); + assertEquals((short) 10, exec("long x = 5L; return (short) (x <<= 1);")); + assertEquals((char) 10, exec("long x = 5L; return (char) (x <<= 1);")); assertEquals(10, exec("long x = 5L; return (int) (x <<= 1);")); assertEquals(10F, exec("long x = 5L; return (float) (x <<= 1);")); assertEquals(10L, exec("long x = 5L; return (long) (x <<= 1);")); @@ -130,42 +130,22 @@ public class GeneralCastTests extends ScriptTestCase { * Test that without a cast, we fail when conversions would narrow. */ public void testIllegalConversions() { - expectScriptThrows(ClassCastException.class, () -> { - exec("long x = 5L; int y = +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("long x = 5L; int y = (x + x); return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = true; int y = +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = true; int y = (x ^ false); return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("long x = 5L; boolean y = +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("long x = 5L; boolean y = (x + x); return y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 5L; int y = +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 5L; int y = (x + x); return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("boolean x = true; int y = +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("boolean x = true; int y = (x ^ false); return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 5L; boolean y = +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 5L; boolean y = (x + x); return y"); }); } /** * Test that even with a cast, some things aren't allowed. */ public void testIllegalExplicitConversions() { - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = true; int y = (int) +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("boolean x = true; int y = (int) (x ^ false); return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("long x = 5L; boolean y = (boolean) +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("long x = 5L; boolean y = (boolean) (x + x); return y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("boolean x = true; int y = (int) +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("boolean x = true; int y = (int) (x ^ false); return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 5L; boolean y = (boolean) +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 5L; boolean y = (boolean) (x + x); return y"); }); } /** @@ -188,9 +168,9 @@ public class GeneralCastTests extends ScriptTestCase { * Unary operators adopt the return value */ public void testUnaryOperatorDef() { - assertEquals((byte)5, exec("def x = 5L; return (byte) (+x);")); - assertEquals((short)5, exec("def x = 5L; return (short) (+x);")); - assertEquals((char)5, exec("def x = 5L; return (char) (+x);")); + assertEquals((byte) 5, exec("def x = 5L; return (byte) (+x);")); + assertEquals((short) 5, exec("def x = 5L; return (short) (+x);")); + assertEquals((char) 5, exec("def x = 5L; return (char) (+x);")); assertEquals(5, exec("def x = 5L; return (int) (+x);")); assertEquals(5F, exec("def x = 5L; return (float) (+x);")); assertEquals(5L, exec("def x = 5L; return (long) (+x);")); @@ -201,9 +181,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary operators adopt the return value */ public void testBinaryOperatorDef() { - assertEquals((byte)6, exec("def x = 5L; return (byte) (x + 1);")); - assertEquals((short)6, exec("def x = 5L; return (short) (x + 1);")); - assertEquals((char)6, exec("def x = 5L; return (char) (x + 1);")); + assertEquals((byte) 6, exec("def x = 5L; return (byte) (x + 1);")); + assertEquals((short) 6, exec("def x = 5L; return (short) (x + 1);")); + assertEquals((char) 6, exec("def x = 5L; return (char) (x + 1);")); assertEquals(6, exec("def x = 5L; return (int) (x + 1);")); assertEquals(6F, exec("def x = 5L; return (float) (x + 1);")); assertEquals(6L, exec("def x = 5L; return (long) (x + 1);")); @@ -214,9 +194,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary operators don't yet adopt the return value with compound assignment */ public void testBinaryCompoundAssignmentDef() { - assertEquals((byte)6, exec("def x = 5L; return (byte) (x += 1);")); - assertEquals((short)6, exec("def x = 5L; return (short) (x += 1);")); - assertEquals((char)6, exec("def x = 5L; return (char) (x += 1);")); + assertEquals((byte) 6, exec("def x = 5L; return (byte) (x += 1);")); + assertEquals((short) 6, exec("def x = 5L; return (short) (x += 1);")); + assertEquals((char) 6, exec("def x = 5L; return (char) (x += 1);")); assertEquals(6, exec("def x = 5L; return (int) (x += 1);")); assertEquals(6F, exec("def x = 5L; return (float) (x += 1);")); assertEquals(6L, exec("def x = 5L; return (long) (x += 1);")); @@ -227,9 +207,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary operators don't yet adopt the return value with compound assignment */ public void testBinaryCompoundAssignmentPrefix() { - assertEquals((byte)6, exec("def x = 5L; return (byte) (++x);")); - assertEquals((short)6, exec("def x = 5L; return (short) (++x);")); - assertEquals((char)6, exec("def x = 5L; return (char) (++x);")); + assertEquals((byte) 6, exec("def x = 5L; return (byte) (++x);")); + assertEquals((short) 6, exec("def x = 5L; return (short) (++x);")); + assertEquals((char) 6, exec("def x = 5L; return (char) (++x);")); assertEquals(6, exec("def x = 5L; return (int) (++x);")); assertEquals(6F, exec("def x = 5L; return (float) (++x);")); assertEquals(6L, exec("def x = 5L; return (long) (++x);")); @@ -240,9 +220,9 @@ public class GeneralCastTests extends ScriptTestCase { * Binary operators don't yet adopt the return value with compound assignment */ public void testBinaryCompoundAssignmentPostfix() { - assertEquals((byte)5, exec("def x = 5L; return (byte) (x++);")); - assertEquals((short)5, exec("def x = 5L; return (short) (x++);")); - assertEquals((char)5, exec("def x = 5L; return (char) (x++);")); + assertEquals((byte) 5, exec("def x = 5L; return (byte) (x++);")); + assertEquals((short) 5, exec("def x = 5L; return (short) (x++);")); + assertEquals((char) 5, exec("def x = 5L; return (char) (x++);")); assertEquals(5, exec("def x = 5L; return (int) (x++);")); assertEquals(5F, exec("def x = 5L; return (float) (x++);")); assertEquals(5L, exec("def x = 5L; return (long) (x++);")); @@ -253,9 +233,9 @@ public class GeneralCastTests extends ScriptTestCase { * Shift operators adopt the return value */ public void testShiftOperatorDef() { - assertEquals((byte)10, exec("def x = 5L; return (byte) (x << 1);")); - assertEquals((short)10, exec("def x = 5L; return (short) (x << 1);")); - assertEquals((char)10, exec("def x = 5L; return (char) (x << 1);")); + assertEquals((byte) 10, exec("def x = 5L; return (byte) (x << 1);")); + assertEquals((short) 10, exec("def x = 5L; return (short) (x << 1);")); + assertEquals((char) 10, exec("def x = 5L; return (char) (x << 1);")); assertEquals(10, exec("def x = 5L; return (int) (x << 1);")); assertEquals(10F, exec("def x = 5L; return (float) (x << 1);")); assertEquals(10L, exec("def x = 5L; return (long) (x << 1);")); @@ -266,9 +246,9 @@ public class GeneralCastTests extends ScriptTestCase { * Shift operators don't yet adopt the return value with compound assignment */ public void testShiftCompoundAssignmentDef() { - assertEquals((byte)10, exec("def x = 5L; return (byte) (x <<= 1);")); - assertEquals((short)10, exec("def x = 5L; return (short) (x <<= 1);")); - assertEquals((char)10, exec("def x = 5L; return (char) (x <<= 1);")); + assertEquals((byte) 10, exec("def x = 5L; return (byte) (x <<= 1);")); + assertEquals((short) 10, exec("def x = 5L; return (short) (x <<= 1);")); + assertEquals((char) 10, exec("def x = 5L; return (char) (x <<= 1);")); assertEquals(10, exec("def x = 5L; return (int) (x <<= 1);")); assertEquals(10F, exec("def x = 5L; return (float) (x <<= 1);")); assertEquals(10L, exec("def x = 5L; return (long) (x <<= 1);")); @@ -279,24 +259,12 @@ public class GeneralCastTests extends ScriptTestCase { * Test that without a cast, we fail when conversions would narrow. */ public void testIllegalConversionsDef() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 5L; int y = +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 5L; int y = (x + x); return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = true; int y = +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = true; int y = (x ^ false); return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 5L; boolean y = +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 5L; boolean y = (x + x); return y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 5L; int y = +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 5L; int y = (x + x); return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = true; int y = +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = true; int y = (x ^ false); return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 5L; boolean y = +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 5L; boolean y = (x + x); return y"); }); } public void testUnboxMethodParameters() { @@ -318,27 +286,21 @@ public class GeneralCastTests extends ScriptTestCase { * (stuff that methodhandles explicitCastArguments would otherwise allow) */ public void testIllegalExplicitConversionsDef() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = true; int y = (int) +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = true; int y = (int) (x ^ false); return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 5L; boolean y = (boolean) +x; return y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 5L; boolean y = (boolean) (x + x); return y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = true; int y = (int) +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = true; int y = (int) (x ^ false); return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 5L; boolean y = (boolean) +x; return y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 5L; boolean y = (boolean) (x + x); return y"); }); } public void testIllegalVoidCasts() { - expectScriptThrows(IllegalArgumentException.class, () -> { - exec("def map = ['a': 1,'b': 2,'c': 3]; map.c = Collections.sort(new ArrayList(map.keySet()));"); - }); - expectScriptThrows(IllegalArgumentException.class, () -> { - exec("Map map = ['a': 1,'b': 2,'c': 3]; def x = new HashMap(); x.put(1, map.clear());"); - }); + expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("def map = ['a': 1,'b': 2,'c': 3]; map.c = Collections.sort(new ArrayList(map.keySet()));"); } + ); + expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("Map map = ['a': 1,'b': 2,'c': 3]; def x = new HashMap(); x.put(1, map.clear());"); } + ); } public void testBoxedDefCalls() { @@ -347,13 +309,16 @@ public class GeneralCastTests extends ScriptTestCase { assertEquals(1, exec("int x = 1; def y = 2.0; y.compareTo(x);")); assertEquals(-1, exec("Integer x = Integer.valueOf(3); def y = 2.0; y.compareTo(x);")); assertEquals(2, exec("def f = new org.opensearch.painless.FeatureTestObject(); f.i = (byte)2; f.i")); - assertEquals(4.0, exec( - "def x = new org.opensearch.painless.FeatureTestObject(); " + - "Byte i = Byte.valueOf(3); " + - "byte j = 1;" + - "Short s = Short.valueOf(-2);" + - "x.mixedAdd(j, i, (char)2, s)" - )); + assertEquals( + 4.0, + exec( + "def x = new org.opensearch.painless.FeatureTestObject(); " + + "Byte i = Byte.valueOf(3); " + + "byte j = 1;" + + "Short s = Short.valueOf(-2);" + + "x.mixedAdd(j, i, (char)2, s)" + ) + ); assertNull(exec("def f = new org.opensearch.painless.FeatureTestObject(); f.i = null; f.i")); expectScriptThrows(ClassCastException.class, () -> exec("def x = 2.0; def y = 1; y.compareTo(x);")); expectScriptThrows(ClassCastException.class, () -> exec("float f = 1.0f; def y = 1; y.compareTo(f);")); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/GetByPathAugmentationTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/GetByPathAugmentationTests.java index 776bc46eefd..fbe24070418 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/GetByPathAugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/GetByPathAugmentationTests.java @@ -32,7 +32,6 @@ package org.opensearch.painless; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -43,7 +42,7 @@ public class GetByPathAugmentationTests extends ScriptTestCase { private final String k001Key = "k011"; private final String k001Value = "b"; - private final Map k001Obj = new HashMap<>(); + private final Map k001Obj = new HashMap<>(); private final String k001MapStr = "['" + k001Key + "': '" + k001Value + "']"; private final String mapMapList = "['k0': ['k01': [['k010': 'a'], " + k001MapStr + "]], 'k1': ['q']]"; @@ -96,10 +95,7 @@ public class GetByPathAugmentationTests extends ScriptTestCase { } private IllegalArgumentException assertPathError(String script, String message) { - IllegalArgumentException illegal = expectScriptThrows( - IllegalArgumentException.class, - () -> exec(script) - ); + IllegalArgumentException illegal = expectScriptThrows(IllegalArgumentException.class, () -> exec(script)); assertEquals(message, illegal.getMessage()); return illegal; } @@ -229,37 +225,24 @@ public class GetByPathAugmentationTests extends ScriptTestCase { public void testBiListDefaultBadIndex() { String path = "1.k0"; - IllegalArgumentException err = assertPathError( - "[['a','b'],['c','d']]", - path, - "'foo'", - numberFormat("k0", path, 1)); + IllegalArgumentException err = assertPathError("[['a','b'],['c','d']]", path, "'foo'", numberFormat("k0", path, 1)); assertEquals(err.getCause().getClass(), NumberFormatException.class); } public void testBiMapListDefaultBadIndex() { String path = "k0.k01.k012"; - IllegalArgumentException err = assertPathError( - mapMapList, - path, - "'foo'", - numberFormat("k012", path, 2)); + IllegalArgumentException err = assertPathError(mapMapList, path, "'foo'", numberFormat("k012", path, 2)); assertEquals(err.getCause().getClass(), NumberFormatException.class); } public void testListMapBiListObjectDefaultBadIndex() { String path = "2.m2.a8"; - IllegalArgumentException err = assertPathError( - listMapListList, - path, - "'foo'", - numberFormat("a8", path, 2)); + IllegalArgumentException err = assertPathError(listMapListList, path, "'foo'", numberFormat("a8", path, 2)); assertEquals(err.getCause().getClass(), NumberFormatException.class); } public void testNonContainerDefaultBadIndex() { - assertPathError(mapMap, "a.b.c", "'foo'", - "Non-container [java.lang.String] at [c], index [2] in path [a.b.c]"); + assertPathError(mapMap, "a.b.c", "'foo'", "Non-container [java.lang.String] at [c], index [2] in path [a.b.c]"); } public void testDoubleDotDefault() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/IncrementTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/IncrementTests.java index 6354dcc9b82..831a281ad99 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/IncrementTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/IncrementTests.java @@ -37,25 +37,25 @@ public class IncrementTests extends ScriptTestCase { /** incrementing byte values */ public void testIncrementByte() { - assertEquals((byte)0, exec("byte x = (byte)0; return x++;")); - assertEquals((byte)0, exec("byte x = (byte)0; return x--;")); - assertEquals((byte)1, exec("byte x = (byte)0; return ++x;")); - assertEquals((byte)-1, exec("byte x = (byte)0; return --x;")); + assertEquals((byte) 0, exec("byte x = (byte)0; return x++;")); + assertEquals((byte) 0, exec("byte x = (byte)0; return x--;")); + assertEquals((byte) 1, exec("byte x = (byte)0; return ++x;")); + assertEquals((byte) -1, exec("byte x = (byte)0; return --x;")); } /** incrementing char values */ public void testIncrementChar() { - assertEquals((char)0, exec("char x = (char)0; return x++;")); - assertEquals((char)1, exec("char x = (char)1; return x--;")); - assertEquals((char)1, exec("char x = (char)0; return ++x;")); + assertEquals((char) 0, exec("char x = (char)0; return x++;")); + assertEquals((char) 1, exec("char x = (char)1; return x--;")); + assertEquals((char) 1, exec("char x = (char)0; return ++x;")); } /** incrementing short values */ public void testIncrementShort() { - assertEquals((short)0, exec("short x = (short)0; return x++;")); - assertEquals((short)0, exec("short x = (short)0; return x--;")); - assertEquals((short)1, exec("short x = (short)0; return ++x;")); - assertEquals((short)-1, exec("short x = (short)0; return --x;")); + assertEquals((short) 0, exec("short x = (short)0; return x++;")); + assertEquals((short) 0, exec("short x = (short)0; return x--;")); + assertEquals((short) 1, exec("short x = (short)0; return ++x;")); + assertEquals((short) -1, exec("short x = (short)0; return --x;")); } /** incrementing integer values */ @@ -92,17 +92,17 @@ public class IncrementTests extends ScriptTestCase { /** incrementing def values */ public void testIncrementDef() { - assertEquals((byte)0, exec("def x = (byte)0; return x++;")); - assertEquals((byte)0, exec("def x = (byte)0; return x--;")); - assertEquals((byte)1, exec("def x = (byte)0; return ++x;")); - assertEquals((byte)-1, exec("def x = (byte)0; return --x;")); - assertEquals((char)0, exec("def x = (char)0; return x++;")); - assertEquals((char)1, exec("def x = (char)1; return x--;")); - assertEquals((char)1, exec("def x = (char)0; return ++x;")); - assertEquals((short)0, exec("def x = (short)0; return x++;")); - assertEquals((short)0, exec("def x = (short)0; return x--;")); - assertEquals((short)1, exec("def x = (short)0; return ++x;")); - assertEquals((short)-1, exec("def x = (short)0; return --x;")); + assertEquals((byte) 0, exec("def x = (byte)0; return x++;")); + assertEquals((byte) 0, exec("def x = (byte)0; return x--;")); + assertEquals((byte) 1, exec("def x = (byte)0; return ++x;")); + assertEquals((byte) -1, exec("def x = (byte)0; return --x;")); + assertEquals((char) 0, exec("def x = (char)0; return x++;")); + assertEquals((char) 1, exec("def x = (char)1; return x--;")); + assertEquals((char) 1, exec("def x = (char)0; return ++x;")); + assertEquals((short) 0, exec("def x = (short)0; return x++;")); + assertEquals((short) 0, exec("def x = (short)0; return x--;")); + assertEquals((short) 1, exec("def x = (short)0; return ++x;")); + assertEquals((short) -1, exec("def x = (short)0; return --x;")); assertEquals(0, exec("def x = 0; return x++;")); assertEquals(0, exec("def x = 0; return x--;")); assertEquals(1, exec("def x = 0; return ++x;")); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/InitializerTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/InitializerTests.java index 175483329ca..da2e790f22e 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/InitializerTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/InitializerTests.java @@ -39,13 +39,13 @@ import java.util.Map; public class InitializerTests extends ScriptTestCase { - @SuppressWarnings({"rawtypes"}) + @SuppressWarnings({ "rawtypes" }) public void testArrayInitializers() { - int[] ints = (int[])exec("new int[] {}"); + int[] ints = (int[]) exec("new int[] {}"); assertEquals(0, ints.length); - ints = (int[])exec("new int[] {5, 7, -1, 14}"); + ints = (int[]) exec("new int[] {5, 7, -1, 14}"); assertEquals(4, ints.length); assertEquals(5, ints[0]); @@ -53,7 +53,7 @@ public class InitializerTests extends ScriptTestCase { assertEquals(-1, ints[2]); assertEquals(14, ints[3]); - ints = (int[])exec("int y = 2; int z = 3; int[] x = new int[] {y*z, y + z, y - z, y, z}; return x;"); + ints = (int[]) exec("int y = 2; int z = 3; int[] x = new int[] {y*z, y + z, y - z, y, z}; return x;"); assertEquals(5, ints.length); assertEquals(6, ints[0]); @@ -62,8 +62,9 @@ public class InitializerTests extends ScriptTestCase { assertEquals(2, ints[3]); assertEquals(3, ints[4]); - Object[] objects = (Object[])exec("int y = 2; List z = new ArrayList(); String s = 'aaa';" + - "Object[] x = new Object[] {y, z, 1 + s, s + 'aaa'}; return x;"); + Object[] objects = (Object[]) exec( + "int y = 2; List z = new ArrayList(); String s = 'aaa';" + "Object[] x = new Object[] {y, z, 1 + s, s + 'aaa'}; return x;" + ); assertEquals(4, objects.length); assertEquals(Integer.valueOf(2), objects[0]); @@ -72,13 +73,13 @@ public class InitializerTests extends ScriptTestCase { assertEquals("aaaaaa", objects[3]); } - @SuppressWarnings({"rawtypes"}) + @SuppressWarnings({ "rawtypes" }) public void testListInitializers() { - List list = (List)exec("[]"); + List list = (List) exec("[]"); assertEquals(0, list.size()); - list = (List)exec("[5, 7, -1, 14]"); + list = (List) exec("[5, 7, -1, 14]"); assertEquals(4, list.size()); assertEquals(5, list.get(0)); @@ -86,7 +87,7 @@ public class InitializerTests extends ScriptTestCase { assertEquals(-1, list.get(2)); assertEquals(14, list.get(3)); - list = (List)exec("int y = 2; int z = 3; def x = [y*z, y + z, y - z, y, z]; return x;"); + list = (List) exec("int y = 2; int z = 3; def x = [y*z, y + z, y - z, y, z]; return x;"); assertEquals(5, list.size()); assertEquals(6, list.get(0)); @@ -95,45 +96,44 @@ public class InitializerTests extends ScriptTestCase { assertEquals(2, list.get(3)); assertEquals(3, list.get(4)); - list = (List)exec("int y = 2; List z = new ArrayList(); String s = 'aaa'; List x = [y, z, 1 + s, s + 'aaa']; return x;"); + list = (List) exec("int y = 2; List z = new ArrayList(); String s = 'aaa'; List x = [y, z, 1 + s, s + 'aaa']; return x;"); assertEquals(4, list.size()); assertEquals(Integer.valueOf(2), list.get(0)); assertEquals(new ArrayList(), list.get(1)); - assertEquals("1aaa", list.get(2)); + assertEquals("1aaa", list.get(2)); assertEquals("aaaaaa", list.get(3)); } - @SuppressWarnings({"rawtypes"}) + @SuppressWarnings({ "rawtypes" }) public void testMapInitializers() { - Map map = (Map)exec("[:]"); + Map map = (Map) exec("[:]"); assertEquals(0, map.size()); - map = (Map)exec("[5 : 7, -1 : 14]"); + map = (Map) exec("[5 : 7, -1 : 14]"); assertEquals(2, map.size()); assertEquals(Integer.valueOf(7), map.get(5)); assertEquals(Integer.valueOf(14), map.get(-1)); - map = (Map)exec("int y = 2; int z = 3; Map x = [y*z : y + z, y - z : y, z : z]; return x;"); + map = (Map) exec("int y = 2; int z = 3; Map x = [y*z : y + z, y - z : y, z : z]; return x;"); assertEquals(3, map.size()); assertEquals(Integer.valueOf(5), map.get(6)); assertEquals(Integer.valueOf(2), map.get(-1)); assertEquals(Integer.valueOf(3), map.get(3)); - map = (Map)exec("int y = 2; List z = new ArrayList(); String s = 'aaa';" + - "def x = [y : z, 1 + s : s + 'aaa']; return x;"); + map = (Map) exec("int y = 2; List z = new ArrayList(); String s = 'aaa';" + "def x = [y : z, 1 + s : s + 'aaa']; return x;"); assertEquals(2, map.size()); assertEquals(new ArrayList(), map.get(2)); assertEquals("aaaaaa", map.get("1aaa")); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({ "unchecked", "rawtypes" }) public void testCrazyInitializer() { - Map map = (Map)exec("int y = 2; int z = 3; Map x = [y*z : y + z, 's' : [y, [y : [[z], [], [:]]]], z : [z, 9]]; return x;"); + Map map = (Map) exec("int y = 2; int z = 3; Map x = [y*z : y + z, 's' : [y, [y : [[z], [], [:]]]], z : [z, 9]]; return x;"); List list0 = new ArrayList(); list0.add(3); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/InjectionTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/InjectionTests.java index a5a413d5ca6..67518f9a028 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/InjectionTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/InjectionTests.java @@ -35,196 +35,260 @@ package org.opensearch.painless; public class InjectionTests extends ScriptTestCase { public void testInjection() { - assertEquals(16, - exec("org.opensearch.painless.FeatureTestObject.staticNumberArgument(8);")); + assertEquals(16, exec("org.opensearch.painless.FeatureTestObject.staticNumberArgument(8);")); } public void testInstanceInjection() { - assertEquals(1000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.injectTimesX(5)")); + assertEquals( + 1000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.injectTimesX(5)" + ) + ); } public void testInstanceInjectWithLambda() { - assertEquals(2000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.injectWithLambda(x -> 2*x, 5)")); + assertEquals( + 2000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.injectWithLambda(x -> 2*x, 5)" + ) + ); } public void testInstanceInjectWithDefLambda() { - assertEquals(2000, - exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); f.injectWithLambda(x -> 2*x, (short)5)")); + assertEquals(2000, exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); f.injectWithLambda(x -> 2*x, (short)5)")); } public void testInjectionOnDefNoInject() { - assertEquals(1000, - exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.injectTimesX((short)5)")); + assertEquals(1000, exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.injectTimesX((short)5)")); } public void testInjectionOnMethodReference() { - assertEquals(60, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "org.opensearch.painless.FeatureTestObject ft1 = " + - " new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "org.opensearch.painless.FeatureTestObject ft1 = " + + " new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)" + ) + ); } public void testInjectionOnMethodReference2() { - assertEquals(60, - exec( - "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)" + ) + ); } public void testInjectionOnMethodReference3() { - assertEquals(60, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)" + ) + ); } public void testAugmentedInstanceInjection() { - assertEquals(1000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.augmentInjectTimesX(5)")); + assertEquals( + 1000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.augmentInjectTimesX(5)" + ) + ); } public void testAugmentedInstanceInjectWithLambda() { - assertEquals(2000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.augmentInjectWithLambda(x -> 2*x, 5)")); + assertEquals( + 2000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.augmentInjectWithLambda(x -> 2*x, 5)" + ) + ); } public void testAugmentedInstanceInjectWithDefLambda() { - assertEquals(2000, - exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); f.augmentInjectWithLambda(x -> 2*x, (short)5)")); + assertEquals( + 2000, + exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); f.augmentInjectWithLambda(x -> 2*x, (short)5)") + ); } public void testAugmentedInjectionOnDefNoInject() { - assertEquals(1000, - exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.augmentInjectTimesX((short)5)")); + assertEquals(1000, exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.augmentInjectTimesX((short)5)")); } public void testAugmentedInjectionOnMethodReference() { - assertEquals(60, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "org.opensearch.painless.FeatureTestObject ft1 = " + - " new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.augmentTimesSupplier(ft0::augmentInjectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "org.opensearch.painless.FeatureTestObject ft1 = " + + " new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.augmentTimesSupplier(ft0::augmentInjectTimesX, (short)3, 5)" + ) + ); } public void testAugmentedInjectionOnMethodReference2() { - assertEquals(60, - exec( - "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.augmentTimesSupplier(ft0::augmentInjectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.augmentTimesSupplier(ft0::augmentInjectTimesX, (short)3, 5)" + ) + ); } public void testAugmentedInjectionOnMethodReference3() { - assertEquals(60, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.augmentTimesSupplier(ft0::augmentInjectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.augmentTimesSupplier(ft0::augmentInjectTimesX, (short)3, 5)" + ) + ); } public void testInstanceMultiInjection() { - assertEquals(6000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.injectMultiTimesX(5)")); + assertEquals( + 6000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.injectMultiTimesX(5)" + ) + ); } public void testInstanceMultiInjectWithLambda() { - assertEquals(8000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.injectMultiWithLambda(x -> 2*x, 5)")); + assertEquals( + 8000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.injectMultiWithLambda(x -> 2*x, 5)" + ) + ); } public void testInstanceMultiInjectWithDefLambda() { - assertEquals(2000, - exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); f.injectWithLambda(x -> 2*x, (short)5)")); + assertEquals(2000, exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); f.injectWithLambda(x -> 2*x, (short)5)")); } public void testMultiInjectionOnDefNoMultiInject() { - assertEquals(6000, - exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.injectMultiTimesX((short)5)")); + assertEquals(6000, exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.injectMultiTimesX((short)5)")); } public void testMultiInjectionOnMethodReference() { - assertEquals(60, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "org.opensearch.painless.FeatureTestObject ft1 = " + - " new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "org.opensearch.painless.FeatureTestObject ft1 = " + + " new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)" + ) + ); } public void testMultiInjectionOnMethodReference2() { - assertEquals(60, - exec( - "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)" + ) + ); } public void testMultiInjectionOnMethodReference3() { - assertEquals(60, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)")); + assertEquals( + 60, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.timesSupplier(ft0::injectTimesX, (short)3, 5)" + ) + ); } public void testAugmentedInstanceMultiInjection() { - assertEquals(5000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.augmentInjectMultiTimesX(5)")); + assertEquals( + 5000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.augmentInjectMultiTimesX(5)" + ) + ); } public void testAugmentedInstanceMultiInjectWithLambda() { - assertEquals(20000, - exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.augmentInjectMultiWithLambda(x -> 2*x, 5)")); + assertEquals( + 20000, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(100, 0); " + + "f.augmentInjectMultiWithLambda(x -> 2*x, 5)" + ) + ); } public void testAugmentedInstanceMultiInjectWithDefLambda() { - assertEquals(20000, - exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); " + - "f.augmentInjectMultiWithLambda(x -> 2*x, (short)5)")); + assertEquals( + 20000, + exec("def f = new org.opensearch.painless.FeatureTestObject(100, 0); " + "f.augmentInjectMultiWithLambda(x -> 2*x, (short)5)") + ); } public void testAugmentedMultiInjectionOnDefNoMultiInject() { - assertEquals(5000, - exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.augmentInjectMultiTimesX((short)5)")); + assertEquals(5000, exec("def d = new org.opensearch.painless.FeatureTestObject(100, 0); d.augmentInjectMultiTimesX((short)5)")); } public void testAugmentedMultiInjectionOnMethodReference() { - assertEquals(300, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "org.opensearch.painless.FeatureTestObject ft1 = " + - " new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.augmentTimesSupplier(ft0::augmentInjectMultiTimesX, (short)3, 5)")); + assertEquals( + 300, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "org.opensearch.painless.FeatureTestObject ft1 = " + + " new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.augmentTimesSupplier(ft0::augmentInjectMultiTimesX, (short)3, 5)" + ) + ); } public void testAugmentedMultiInjectionOnMethodReference2() { - assertEquals(300, - exec( - "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.augmentTimesSupplier(ft0::augmentInjectMultiTimesX, (short)3, 5)")); + assertEquals( + 300, + exec( + "org.opensearch.painless.FeatureTestObject ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.augmentTimesSupplier(ft0::augmentInjectMultiTimesX, (short)3, 5)" + ) + ); } public void testAugmentedMultiInjectionOnMethodReference3() { - assertEquals(300, - exec( - "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + - "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + - "ft1.augmentTimesSupplier(ft0::augmentInjectMultiTimesX, (short)3, 5)")); + assertEquals( + 300, + exec( + "def ft0 = new org.opensearch.painless.FeatureTestObject(2, 0); " + + "def ft1 = new org.opensearch.painless.FeatureTestObject(1000, 0); " + + "ft1.augmentTimesSupplier(ft0::augmentInjectMultiTimesX, (short)3, 5)" + ) + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/IntegerOverflowTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/IntegerOverflowTests.java index f14c263c8f6..ee8cd416a1c 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/IntegerOverflowTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/IntegerOverflowTests.java @@ -37,16 +37,16 @@ public class IntegerOverflowTests extends ScriptTestCase { public void testAssignmentAdditionOverflow() { // byte - assertEquals((byte)(0 + 128), exec("byte x = 0; x += 128; return x;")); - assertEquals((byte)(0 + -129), exec("byte x = 0; x += -129; return x;")); + assertEquals((byte) (0 + 128), exec("byte x = 0; x += 128; return x;")); + assertEquals((byte) (0 + -129), exec("byte x = 0; x += -129; return x;")); // short - assertEquals((short)(0 + 32768), exec("short x = 0; x += 32768; return x;")); - assertEquals((short)(0 + -32769), exec("short x = 0; x += -32769; return x;")); + assertEquals((short) (0 + 32768), exec("short x = 0; x += 32768; return x;")); + assertEquals((short) (0 + -32769), exec("short x = 0; x += -32769; return x;")); // char - assertEquals((char)(0 + 65536), exec("char x = 0; x += 65536; return x;")); - assertEquals((char)(0 + -65536), exec("char x = 0; x += -65536; return x;")); + assertEquals((char) (0 + 65536), exec("char x = 0; x += 65536; return x;")); + assertEquals((char) (0 + -65536), exec("char x = 0; x += -65536; return x;")); // int assertEquals(1 + 2147483647, exec("int x = 1; x += 2147483647; return x;")); @@ -59,16 +59,16 @@ public class IntegerOverflowTests extends ScriptTestCase { public void testAssignmentSubtractionOverflow() { // byte - assertEquals((byte)(0 - -128), exec("byte x = 0; x -= -128; return x;")); - assertEquals((byte)(0 - 129), exec("byte x = 0; x -= 129; return x;")); + assertEquals((byte) (0 - -128), exec("byte x = 0; x -= -128; return x;")); + assertEquals((byte) (0 - 129), exec("byte x = 0; x -= 129; return x;")); // short - assertEquals((short)(0 - -32768), exec("short x = 0; x -= -32768; return x;")); - assertEquals((short)(0 - 32769), exec("short x = 0; x -= 32769; return x;")); + assertEquals((short) (0 - -32768), exec("short x = 0; x -= -32768; return x;")); + assertEquals((short) (0 - 32769), exec("short x = 0; x -= 32769; return x;")); // char - assertEquals((char)(0 - -65536), exec("char x = 0; x -= -65536; return x;")); - assertEquals((char)(0 - 65536), exec("char x = 0; x -= 65536; return x;")); + assertEquals((char) (0 - -65536), exec("char x = 0; x -= -65536; return x;")); + assertEquals((char) (0 - 65536), exec("char x = 0; x -= 65536; return x;")); // int assertEquals(1 - -2147483647, exec("int x = 1; x -= -2147483647; return x;")); @@ -147,8 +147,10 @@ public class IntegerOverflowTests extends ScriptTestCase { public void testAddition() throws Exception { assertEquals(2147483647 + 2147483647, exec("int x = 2147483647; int y = 2147483647; return x + y;")); - assertEquals(9223372036854775807L + 9223372036854775807L, - exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;")); + assertEquals( + 9223372036854775807L + 9223372036854775807L, + exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;") + ); } public void testAdditionConst() throws Exception { @@ -168,8 +170,10 @@ public class IntegerOverflowTests extends ScriptTestCase { public void testMultiplication() throws Exception { assertEquals(2147483647 * 2147483647, exec("int x = 2147483647; int y = 2147483647; return x * y;")); - assertEquals(9223372036854775807L * 9223372036854775807L, - exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;")); + assertEquals( + 9223372036854775807L * 9223372036854775807L, + exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;") + ); } public void testMultiplicationConst() throws Exception { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/LambdaTests.java index aafc9aa1f7e..c1b19522021 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/LambdaTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/LambdaTests.java @@ -49,40 +49,48 @@ public class LambdaTests extends ScriptTestCase { } public void testLambdaWithArgs() { - assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); " - + "l.sort((a, b) -> a.length() - b.length()); return l.get(0)")); + assertEquals( + "short", + exec( + "List l = new ArrayList(); l.add('looooong'); l.add('short'); " + + "l.sort((a, b) -> a.length() - b.length()); return l.get(0)" + ) + ); } public void testLambdaWithTypedArgs() { - assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); " - + "l.sort((String a, String b) -> a.length() - b.length()); return l.get(0)")); + assertEquals( + "short", + exec( + "List l = new ArrayList(); l.add('looooong'); l.add('short'); " + + "l.sort((String a, String b) -> a.length() - b.length()); return l.get(0)" + ) + ); } public void testPrimitiveLambdas() { - assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(x -> x + 1).sum();")); + assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(x -> x + 1).sum();")); } public void testPrimitiveLambdasWithTypedArgs() { - assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(int x -> x + 1).sum();")); + assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(int x -> x + 1).sum();")); } public void testPrimitiveLambdasDef() { - assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(x -> x + 1).sum();")); + assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(x -> x + 1).sum();")); } public void testPrimitiveLambdasWithTypedArgsDef() { - assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(int x -> x + 1).sum();")); + assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(int x -> x + 1).sum();")); } public void testPrimitiveLambdasConvertible() { - assertEquals(2, exec("List l = new ArrayList(); l.add((short)1); l.add(1); " - + "return l.stream().mapToInt(long x -> (int)1).sum();")); + assertEquals( + 2, + exec("List l = new ArrayList(); l.add((short)1); l.add(1); " + "return l.stream().mapToInt(long x -> (int)1).sum();") + ); } public void testPrimitiveArgs() { @@ -107,26 +115,40 @@ public class LambdaTests extends ScriptTestCase { /** interface ignores return value */ public void testVoidReturn() { - assertEquals(2, exec("List list = new ArrayList(); " - + "list.add(2); " - + "List list2 = new ArrayList(); " - + "list.forEach(x -> list2.add(x));" - + "return list[0]")); + assertEquals( + 2, + exec( + "List list = new ArrayList(); " + + "list.add(2); " + + "List list2 = new ArrayList(); " + + "list.forEach(x -> list2.add(x));" + + "return list[0]" + ) + ); } /** interface ignores return value */ public void testVoidReturnDef() { - assertEquals(2, exec("def list = new ArrayList(); " - + "list.add(2); " - + "List list2 = new ArrayList(); " - + "list.forEach(x -> list2.add(x));" - + "return list[0]")); + assertEquals( + 2, + exec( + "def list = new ArrayList(); " + + "list.add(2); " + + "List list2 = new ArrayList(); " + + "list.forEach(x -> list2.add(x));" + + "return list[0]" + ) + ); } public void testTwoLambdas() { - assertEquals("testingcdefg", exec( - "org.opensearch.painless.FeatureTestObject test = new org.opensearch.painless.FeatureTestObject(2,3);" + - "return test.twoFunctionsOfX(x -> 'testing'.concat(x), y -> 'abcdefg'.substring(y))")); + assertEquals( + "testingcdefg", + exec( + "org.opensearch.painless.FeatureTestObject test = new org.opensearch.painless.FeatureTestObject(2,3);" + + "return test.twoFunctionsOfX(x -> 'testing'.concat(x), y -> 'abcdefg'.substring(y))" + ) + ); } public void testNestedLambdas() { @@ -134,11 +156,12 @@ public class LambdaTests extends ScriptTestCase { } public void testLambdaInLoop() { - assertEquals(100, exec("int sum = 0; " + - "for (int i = 0; i < 100; i++) {" + - " sum += Optional.empty().orElseGet(() -> 1);" + - "}" + - "return sum;")); + assertEquals( + 100, + exec( + "int sum = 0; " + "for (int i = 0; i < 100; i++) {" + " sum += Optional.empty().orElseGet(() -> 1);" + "}" + "return sum;" + ) + ); } public void testCapture() { @@ -150,19 +173,28 @@ public class LambdaTests extends ScriptTestCase { } public void testCapturesAreReadOnly() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("List l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(x -> { l = null; return x + 1 }).sum();"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { + exec( + "List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(x -> { l = null; return x + 1 }).sum();" + ); + } + ); assertTrue(expected.getMessage().contains("is read-only")); } /** Lambda parameters shouldn't be able to mask a variable already in scope */ public void testNoParamMasking() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("int x = 0; List l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(x -> { x += 1; return x }).sum();"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { + exec( + "int x = 0; List l = new ArrayList(); l.add(1); l.add(1); " + + "return l.stream().mapToInt(x -> { x += 1; return x }).sum();" + ); + } + ); assertTrue(expected.getMessage().contains("already defined")); } @@ -171,42 +203,47 @@ public class LambdaTests extends ScriptTestCase { } public void testNestedCapture() { - assertEquals(1, exec("boolean x = false; int y = 1;" + - "return Optional.empty().orElseGet(() -> x ? 5 : Optional.empty().orElseGet(() -> y));")); + assertEquals( + 1, + exec("boolean x = false; int y = 1;" + "return Optional.empty().orElseGet(() -> x ? 5 : Optional.empty().orElseGet(() -> y));") + ); } public void testNestedCaptureParams() { - assertEquals(2, exec("int foo(Function f) { return f.apply(1) }" + - "return foo(x -> foo(y -> x + 1))")); + assertEquals(2, exec("int foo(Function f) { return f.apply(1) }" + "return foo(x -> foo(y -> x + 1))")); } public void testWrongArity() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> { - exec("Optional.empty().orElseGet(x -> x);"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> { exec("Optional.empty().orElseGet(x -> x);"); } + ); assertTrue(expected.getMessage().contains("Incorrect number of parameters")); } public void testWrongArityDef() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("def y = Optional.empty(); return y.orElseGet(x -> x);"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("def y = Optional.empty(); return y.orElseGet(x -> x);"); } + ); assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } public void testWrongArityNotEnough() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> { - exec("List l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(() -> 5).sum();"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + false, + () -> { exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(() -> 5).sum();"); } + ); assertTrue(expected.getMessage().contains("Incorrect number of parameters")); } public void testWrongArityNotEnoughDef() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("def l = new ArrayList(); l.add(1); l.add(1); " - + "return l.stream().mapToInt(() -> 5).sum();"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(() -> 5).sum();"); } + ); assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } @@ -232,34 +269,68 @@ public class LambdaTests extends ScriptTestCase { assertEquals(true, exec(compare + "compare(() -> { return params['nokey'] }, null)", params, true)); assertEquals(true, exec(compare + "compare(() -> { return params['number'] }, 2)", params, true)); assertEquals(false, exec(compare + "compare(() -> { return params['number'] }, 'value')", params, true)); - assertEquals(false, exec(compare + "compare(() -> { if (params['number'] == 2) { return params['number'] }" + - "else { return params['key'] } }, 'value')", params, true)); - assertEquals(true, exec(compare + "compare(() -> { if (params['number'] == 2) { return params['number'] }" + - "else { return params['key'] } }, 2)", params, true)); - assertEquals(true, exec(compare + "compare(() -> { if (params['number'] == 1) { return params['number'] }" + - "else { return params['key'] } }, 'value')", params, true)); - assertEquals(false, exec(compare + "compare(() -> { if (params['number'] == 1) { return params['number'] }" + - "else { return params['key'] } }, 2)", params, true)); + assertEquals( + false, + exec( + compare + + "compare(() -> { if (params['number'] == 2) { return params['number'] }" + + "else { return params['key'] } }, 'value')", + params, + true + ) + ); + assertEquals( + true, + exec( + compare + "compare(() -> { if (params['number'] == 2) { return params['number'] }" + "else { return params['key'] } }, 2)", + params, + true + ) + ); + assertEquals( + true, + exec( + compare + + "compare(() -> { if (params['number'] == 1) { return params['number'] }" + + "else { return params['key'] } }, 'value')", + params, + true + ) + ); + assertEquals( + false, + exec( + compare + "compare(() -> { if (params['number'] == 1) { return params['number'] }" + "else { return params['key'] } }, 2)", + params, + true + ) + ); } public void testReturnVoid() { - Throwable expected = expectScriptThrows(ClassCastException.class, () -> { - exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))"); - }); + Throwable expected = expectScriptThrows( + ClassCastException.class, + () -> { exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))"); } + ); assertThat(expected.getMessage(), containsString("Cannot cast from [void] to [long].")); } public void testReturnVoidDef() { // If we can catch the error at compile time we do - Exception expected = expectScriptThrows(ClassCastException.class, () -> { - exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))"); - }); + Exception expected = expectScriptThrows( + ClassCastException.class, + () -> { exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(i -> b.setLength(i))"); } + ); assertThat(expected.getMessage(), containsString("Cannot cast from [void] to [def].")); // Otherwise we convert the void into a null - assertEquals(Arrays.asList(null, null), - exec("def b = new StringBuilder(); def l = [1, 2]; l.stream().map(i -> b.setLength(i)).collect(Collectors.toList())")); - assertEquals(Arrays.asList(null, null), - exec("def b = new StringBuilder(); List l = [1, 2]; l.stream().map(i -> b.setLength(i)).collect(Collectors.toList())")); + assertEquals( + Arrays.asList(null, null), + exec("def b = new StringBuilder(); def l = [1, 2]; l.stream().map(i -> b.setLength(i)).collect(Collectors.toList())") + ); + assertEquals( + Arrays.asList(null, null), + exec("def b = new StringBuilder(); List l = [1, 2]; l.stream().map(i -> b.setLength(i)).collect(Collectors.toList())") + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ListTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ListTests.java index a06f2132b8a..5a54e7dd4d7 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ListTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ListTests.java @@ -56,13 +56,20 @@ public class ListTests extends ArrayLikeObjectTestCase { private String fillValue(String valueType) { switch (valueType) { - case "int": return "0"; - case "long": return "0L"; - case "short": return "(short) 0"; - case "byte": return "(byte) 0"; - case "float": return "0.0f"; - case "double": return "0.0"; // Double is implicit for decimal constants - default: return null; + case "int": + return "0"; + case "long": + return "0L"; + case "short": + return "(short) 0"; + case "byte": + return "(byte) 0"; + case "float": + return "0.0f"; + case "double": + return "0.0"; // Double is implicit for decimal constants + default: + return null; } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/MapTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/MapTests.java index c22b894645e..cbe62758665 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/MapTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/MapTests.java @@ -43,9 +43,9 @@ public class MapTests extends ScriptTestCase { assertEquals(2, exec(decl + "; return x[0];", true)); assertEquals(1, exec(decl + "; return x['a'];", true)); assertEquals(12, exec(decl + "; return x[123.1];", true)); - assertEquals(val, exec(decl + "; x[ 0] = params.val; return x[ 0];", singletonMap("val", val), true)); + assertEquals(val, exec(decl + "; x[ 0] = params.val; return x[ 0];", singletonMap("val", val), true)); assertEquals("slot", exec(decl + "; x[ 0] = params.val; return x[-5];", singletonMap("val", val), true)); - assertEquals(val, exec(decl + "; x[-5] = params.val; return x[-5];", singletonMap("val", val), true)); + assertEquals(val, exec(decl + "; x[-5] = params.val; return x[-5];", singletonMap("val", val), true)); } public void testMapInDefAccesses() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/MultiplicationTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/MultiplicationTests.java index 13f86a5c3a1..d9c1684128b 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/MultiplicationTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/MultiplicationTests.java @@ -33,7 +33,7 @@ package org.opensearch.painless; /** Tests for multiplication operator across all types */ -//TODO: NaN/Inf/overflow/... +// TODO: NaN/Inf/overflow/... public class MultiplicationTests extends ScriptTestCase { // TODO: short,byte,char @@ -43,102 +43,102 @@ public class MultiplicationTests extends ScriptTestCase { } public void testInt() throws Exception { - assertEquals(1*1, exec("int x = 1; int y = 1; return x*y;")); - assertEquals(2*3, exec("int x = 2; int y = 3; return x*y;")); - assertEquals(5*10, exec("int x = 5; int y = 10; return x*y;")); - assertEquals(1*1*2, exec("int x = 1; int y = 1; int z = 2; return x*y*z;")); - assertEquals((1*1)*2, exec("int x = 1; int y = 1; int z = 2; return (x*y)*z;")); - assertEquals(1*(1*2), exec("int x = 1; int y = 1; int z = 2; return x*(y*z);")); - assertEquals(10*0, exec("int x = 10; int y = 0; return x*y;")); - assertEquals(0*0, exec("int x = 0; int y = 0; return x*x;")); + assertEquals(1 * 1, exec("int x = 1; int y = 1; return x*y;")); + assertEquals(2 * 3, exec("int x = 2; int y = 3; return x*y;")); + assertEquals(5 * 10, exec("int x = 5; int y = 10; return x*y;")); + assertEquals(1 * 1 * 2, exec("int x = 1; int y = 1; int z = 2; return x*y*z;")); + assertEquals((1 * 1) * 2, exec("int x = 1; int y = 1; int z = 2; return (x*y)*z;")); + assertEquals(1 * (1 * 2), exec("int x = 1; int y = 1; int z = 2; return x*(y*z);")); + assertEquals(10 * 0, exec("int x = 10; int y = 0; return x*y;")); + assertEquals(0 * 0, exec("int x = 0; int y = 0; return x*x;")); } public void testIntConst() throws Exception { - assertEquals(1*1, exec("return 1*1;")); - assertEquals(2*3, exec("return 2*3;")); - assertEquals(5*10, exec("return 5*10;")); - assertEquals(1*1*2, exec("return 1*1*2;")); - assertEquals((1*1)*2, exec("return (1*1)*2;")); - assertEquals(1*(1*2), exec("return 1*(1*2);")); - assertEquals(10*0, exec("return 10*0;")); - assertEquals(0*0, exec("return 0*0;")); + assertEquals(1 * 1, exec("return 1*1;")); + assertEquals(2 * 3, exec("return 2*3;")); + assertEquals(5 * 10, exec("return 5*10;")); + assertEquals(1 * 1 * 2, exec("return 1*1*2;")); + assertEquals((1 * 1) * 2, exec("return (1*1)*2;")); + assertEquals(1 * (1 * 2), exec("return 1*(1*2);")); + assertEquals(10 * 0, exec("return 10*0;")); + assertEquals(0 * 0, exec("return 0*0;")); } public void testByte() throws Exception { - assertEquals((byte)1*(byte)1, exec("byte x = 1; byte y = 1; return x*y;")); - assertEquals((byte)2*(byte)3, exec("byte x = 2; byte y = 3; return x*y;")); - assertEquals((byte)5*(byte)10, exec("byte x = 5; byte y = 10; return x*y;")); - assertEquals((byte)1*(byte)1*(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return x*y*z;")); - assertEquals(((byte)1*(byte)1)*(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return (x*y)*z;")); - assertEquals((byte)1*((byte)1*(byte)2), exec("byte x = 1; byte y = 1; byte z = 2; return x*(y*z);")); - assertEquals((byte)10*(byte)0, exec("byte x = 10; byte y = 0; return x*y;")); - assertEquals((byte)0*(byte)0, exec("byte x = 0; byte y = 0; return x*x;")); + assertEquals((byte) 1 * (byte) 1, exec("byte x = 1; byte y = 1; return x*y;")); + assertEquals((byte) 2 * (byte) 3, exec("byte x = 2; byte y = 3; return x*y;")); + assertEquals((byte) 5 * (byte) 10, exec("byte x = 5; byte y = 10; return x*y;")); + assertEquals((byte) 1 * (byte) 1 * (byte) 2, exec("byte x = 1; byte y = 1; byte z = 2; return x*y*z;")); + assertEquals(((byte) 1 * (byte) 1) * (byte) 2, exec("byte x = 1; byte y = 1; byte z = 2; return (x*y)*z;")); + assertEquals((byte) 1 * ((byte) 1 * (byte) 2), exec("byte x = 1; byte y = 1; byte z = 2; return x*(y*z);")); + assertEquals((byte) 10 * (byte) 0, exec("byte x = 10; byte y = 0; return x*y;")); + assertEquals((byte) 0 * (byte) 0, exec("byte x = 0; byte y = 0; return x*x;")); } public void testLong() throws Exception { - assertEquals(1L*1L, exec("long x = 1; long y = 1; return x*y;")); - assertEquals(2L*3L, exec("long x = 2; long y = 3; return x*y;")); - assertEquals(5L*10L, exec("long x = 5; long y = 10; return x*y;")); - assertEquals(1L*1L*2L, exec("long x = 1; long y = 1; int z = 2; return x*y*z;")); - assertEquals((1L*1L)*2L, exec("long x = 1; long y = 1; int z = 2; return (x*y)*z;")); - assertEquals(1L*(1L*2L), exec("long x = 1; long y = 1; int z = 2; return x*(y*z);")); - assertEquals(10L*0L, exec("long x = 10; long y = 0; return x*y;")); - assertEquals(0L*0L, exec("long x = 0; long y = 0; return x*x;")); + assertEquals(1L * 1L, exec("long x = 1; long y = 1; return x*y;")); + assertEquals(2L * 3L, exec("long x = 2; long y = 3; return x*y;")); + assertEquals(5L * 10L, exec("long x = 5; long y = 10; return x*y;")); + assertEquals(1L * 1L * 2L, exec("long x = 1; long y = 1; int z = 2; return x*y*z;")); + assertEquals((1L * 1L) * 2L, exec("long x = 1; long y = 1; int z = 2; return (x*y)*z;")); + assertEquals(1L * (1L * 2L), exec("long x = 1; long y = 1; int z = 2; return x*(y*z);")); + assertEquals(10L * 0L, exec("long x = 10; long y = 0; return x*y;")); + assertEquals(0L * 0L, exec("long x = 0; long y = 0; return x*x;")); } public void testLongConst() throws Exception { - assertEquals(1L*1L, exec("return 1L*1L;")); - assertEquals(2L*3L, exec("return 2L*3L;")); - assertEquals(5L*10L, exec("return 5L*10L;")); - assertEquals(1L*1L*2L, exec("return 1L*1L*2L;")); - assertEquals((1L*1L)*2L, exec("return (1L*1L)*2L;")); - assertEquals(1L*(1L*2L), exec("return 1L*(1L*2L);")); - assertEquals(10L*0L, exec("return 10L*0L;")); - assertEquals(0L*0L, exec("return 0L*0L;")); + assertEquals(1L * 1L, exec("return 1L*1L;")); + assertEquals(2L * 3L, exec("return 2L*3L;")); + assertEquals(5L * 10L, exec("return 5L*10L;")); + assertEquals(1L * 1L * 2L, exec("return 1L*1L*2L;")); + assertEquals((1L * 1L) * 2L, exec("return (1L*1L)*2L;")); + assertEquals(1L * (1L * 2L), exec("return 1L*(1L*2L);")); + assertEquals(10L * 0L, exec("return 10L*0L;")); + assertEquals(0L * 0L, exec("return 0L*0L;")); } public void testFloat() throws Exception { - assertEquals(1F*1F, exec("float x = 1; float y = 1; return x*y;")); - assertEquals(2F*3F, exec("float x = 2; float y = 3; return x*y;")); - assertEquals(5F*10F, exec("float x = 5; float y = 10; return x*y;")); - assertEquals(1F*1F*2F, exec("float x = 1; float y = 1; float z = 2; return x*y*z;")); - assertEquals((1F*1F)*2F, exec("float x = 1; float y = 1; float z = 2; return (x*y)*z;")); - assertEquals(1F*(1F*2F), exec("float x = 1; float y = 1; float z = 2; return x*(y*z);")); - assertEquals(10F*0F, exec("float x = 10; float y = 0; return x*y;")); - assertEquals(0F*0F, exec("float x = 0; float y = 0; return x*x;")); + assertEquals(1F * 1F, exec("float x = 1; float y = 1; return x*y;")); + assertEquals(2F * 3F, exec("float x = 2; float y = 3; return x*y;")); + assertEquals(5F * 10F, exec("float x = 5; float y = 10; return x*y;")); + assertEquals(1F * 1F * 2F, exec("float x = 1; float y = 1; float z = 2; return x*y*z;")); + assertEquals((1F * 1F) * 2F, exec("float x = 1; float y = 1; float z = 2; return (x*y)*z;")); + assertEquals(1F * (1F * 2F), exec("float x = 1; float y = 1; float z = 2; return x*(y*z);")); + assertEquals(10F * 0F, exec("float x = 10; float y = 0; return x*y;")); + assertEquals(0F * 0F, exec("float x = 0; float y = 0; return x*x;")); } public void testFloatConst() throws Exception { - assertEquals(1F*1F, exec("return 1F*1F;")); - assertEquals(2F*3F, exec("return 2F*3F;")); - assertEquals(5F*10F, exec("return 5F*10F;")); - assertEquals(1F*1F*2F, exec("return 1F*1F*2F;")); - assertEquals((1F*1F)*2F, exec("return (1F*1F)*2F;")); - assertEquals(1F*(1F*2F), exec("return 1F*(1F*2F);")); - assertEquals(10F*0F, exec("return 10F*0F;")); - assertEquals(0F*0F, exec("return 0F*0F;")); + assertEquals(1F * 1F, exec("return 1F*1F;")); + assertEquals(2F * 3F, exec("return 2F*3F;")); + assertEquals(5F * 10F, exec("return 5F*10F;")); + assertEquals(1F * 1F * 2F, exec("return 1F*1F*2F;")); + assertEquals((1F * 1F) * 2F, exec("return (1F*1F)*2F;")); + assertEquals(1F * (1F * 2F), exec("return 1F*(1F*2F);")); + assertEquals(10F * 0F, exec("return 10F*0F;")); + assertEquals(0F * 0F, exec("return 0F*0F;")); } public void testDouble() throws Exception { - assertEquals(1D*1D, exec("double x = 1; double y = 1; return x*y;")); - assertEquals(2D*3D, exec("double x = 2; double y = 3; return x*y;")); - assertEquals(5D*10D, exec("double x = 5; double y = 10; return x*y;")); - assertEquals(1D*1D*2D, exec("double x = 1; double y = 1; double z = 2; return x*y*z;")); - assertEquals((1D*1D)*2D, exec("double x = 1; double y = 1; double z = 2; return (x*y)*z;")); - assertEquals(1D*(1D*2D), exec("double x = 1; double y = 1; double z = 2; return x*(y*z);")); - assertEquals(10D*0D, exec("double x = 10; float y = 0; return x*y;")); - assertEquals(0D*0D, exec("double x = 0; float y = 0; return x*x;")); + assertEquals(1D * 1D, exec("double x = 1; double y = 1; return x*y;")); + assertEquals(2D * 3D, exec("double x = 2; double y = 3; return x*y;")); + assertEquals(5D * 10D, exec("double x = 5; double y = 10; return x*y;")); + assertEquals(1D * 1D * 2D, exec("double x = 1; double y = 1; double z = 2; return x*y*z;")); + assertEquals((1D * 1D) * 2D, exec("double x = 1; double y = 1; double z = 2; return (x*y)*z;")); + assertEquals(1D * (1D * 2D), exec("double x = 1; double y = 1; double z = 2; return x*(y*z);")); + assertEquals(10D * 0D, exec("double x = 10; float y = 0; return x*y;")); + assertEquals(0D * 0D, exec("double x = 0; float y = 0; return x*x;")); } public void testDoubleConst() throws Exception { - assertEquals(1.0*1.0, exec("return 1.0*1.0;")); - assertEquals(2.0*3.0, exec("return 2.0*3.0;")); - assertEquals(5.0*10.0, exec("return 5.0*10.0;")); - assertEquals(1.0*1.0*2.0, exec("return 1.0*1.0*2.0;")); - assertEquals((1.0*1.0)*2.0, exec("return (1.0*1.0)*2.0;")); - assertEquals(1.0*(1.0*2.0), exec("return 1.0*(1.0*2.0);")); - assertEquals(10.0*0.0, exec("return 10.0*0.0;")); - assertEquals(0.0*0.0, exec("return 0.0*0.0;")); + assertEquals(1.0 * 1.0, exec("return 1.0*1.0;")); + assertEquals(2.0 * 3.0, exec("return 2.0*3.0;")); + assertEquals(5.0 * 10.0, exec("return 5.0*10.0;")); + assertEquals(1.0 * 1.0 * 2.0, exec("return 1.0*1.0*2.0;")); + assertEquals((1.0 * 1.0) * 2.0, exec("return (1.0*1.0)*2.0;")); + assertEquals(1.0 * (1.0 * 2.0), exec("return 1.0*(1.0*2.0);")); + assertEquals(10.0 * 0.0, exec("return 10.0*0.0;")); + assertEquals(0.0 * 0.0, exec("return 0.0*0.0;")); } public void testDef() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/NoSemiColonTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/NoSemiColonTests.java index 06982d2fac1..f22709bdfd6 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/NoSemiColonTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/NoSemiColonTests.java @@ -39,9 +39,9 @@ import java.util.Map; public class NoSemiColonTests extends ScriptTestCase { public void testDeclarationStatement() { - assertEquals((byte)2, exec("byte a = 2; return a")); - assertEquals((short)2, exec("short a = 2; return a")); - assertEquals((char)2, exec("char a = 2; return a")); + assertEquals((byte) 2, exec("byte a = 2; return a")); + assertEquals((short) 2, exec("short a = 2; return a")); + assertEquals((char) 2, exec("char a = 2; return a")); assertEquals(2, exec("int a = 2; return a")); assertEquals(2L, exec("long a = 2; return a")); assertEquals(2F, exec("float a = 2; return a")); @@ -85,7 +85,7 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(10, exec("return 10")); assertEquals(5, exec("int x = 5; return x")); assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1]")); - assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s"))[1]); - assertEquals(10, ((Map)exec("Map s = new HashMap(); s.put(\"x\", 10); return s")).get("x")); + assertEquals(5, ((short[]) exec("short[] s = new short[3]; s[1] = 5; return s"))[1]); + assertEquals(10, ((Map) exec("Map s = new HashMap(); s.put(\"x\", 10); return s")).get("x")); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/OrTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/OrTests.java index ccb10f968a2..b9995afd36e 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/OrTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/OrTests.java @@ -67,21 +67,13 @@ public class OrTests extends ScriptTestCase { } public void testIllegal() throws Exception { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = (float)4; int y = 1; return x | y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = (double)4; int y = 1; return x | y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; int y = 1; return x | y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; int y = 1; return x | y"); }); } public void testDef() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (float)4; def y = (byte)1; return x | y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (double)4; def y = (byte)1; return x | y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; def y = (byte)1; return x | y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; def y = (byte)1; return x | y"); }); assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x | y")); assertEquals(5, exec("def x = (short)4; def y = (byte)1; return x | y")); assertEquals(5, exec("def x = (char)4; def y = (byte)1; return x | y")); @@ -118,19 +110,15 @@ public class OrTests extends ScriptTestCase { assertEquals(5, exec("def x = (int)4; def y = (int)1; return x | y")); assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x | y")); - assertEquals(true, exec("def x = true; def y = true; return x | y")); - assertEquals(true, exec("def x = true; def y = false; return x | y")); - assertEquals(true, exec("def x = false; def y = true; return x | y")); + assertEquals(true, exec("def x = true; def y = true; return x | y")); + assertEquals(true, exec("def x = true; def y = false; return x | y")); + assertEquals(true, exec("def x = false; def y = true; return x | y")); assertEquals(false, exec("def x = false; def y = false; return x | y")); } public void testDefTypedLHS() { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = (float)4; def y = (byte)1; return x | y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = (double)4; def y = (byte)1; return x | y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; def y = (byte)1; return x | y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; def y = (byte)1; return x | y"); }); assertEquals(5, exec("byte x = (byte)4; def y = (byte)1; return x | y")); assertEquals(5, exec("short x = (short)4; def y = (byte)1; return x | y")); assertEquals(5, exec("char x = (char)4; def y = (byte)1; return x | y")); @@ -167,19 +155,15 @@ public class OrTests extends ScriptTestCase { assertEquals(5, exec("int x = (int)4; def y = (int)1; return x | y")); assertEquals(5L, exec("long x = (long)4; def y = (long)1; return x | y")); - assertEquals(true, exec("boolean x = true; def y = true; return x | y")); - assertEquals(true, exec("boolean x = true; def y = false; return x | y")); - assertEquals(true, exec("boolean x = false; def y = true; return x | y")); + assertEquals(true, exec("boolean x = true; def y = true; return x | y")); + assertEquals(true, exec("boolean x = true; def y = false; return x | y")); + assertEquals(true, exec("boolean x = false; def y = true; return x | y")); assertEquals(false, exec("boolean x = false; def y = false; return x | y")); } public void testDefTypedRHS() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (float)4; byte y = (byte)1; return x | y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (double)4; byte y = (byte)1; return x | y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; byte y = (byte)1; return x | y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; byte y = (byte)1; return x | y"); }); assertEquals(5, exec("def x = (byte)4; byte y = (byte)1; return x | y")); assertEquals(5, exec("def x = (short)4; byte y = (byte)1; return x | y")); assertEquals(5, exec("def x = (char)4; byte y = (byte)1; return x | y")); @@ -216,9 +200,9 @@ public class OrTests extends ScriptTestCase { assertEquals(5, exec("def x = (int)4; int y = (int)1; return x | y")); assertEquals(5L, exec("def x = (long)4; long y = (long)1; return x | y")); - assertEquals(true, exec("def x = true; boolean y = true; return x | y")); - assertEquals(true, exec("def x = true; boolean y = false; return x | y")); - assertEquals(true, exec("def x = false; boolean y = true; return x | y")); + assertEquals(true, exec("def x = true; boolean y = true; return x | y")); + assertEquals(true, exec("def x = true; boolean y = false; return x | y")); + assertEquals(true, exec("def x = false; boolean y = true; return x | y")); assertEquals(false, exec("def x = false; boolean y = false; return x | y")); } @@ -246,18 +230,10 @@ public class OrTests extends ScriptTestCase { } public void testBogusCompoundAssignment() { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = 4; int y = 1; x |= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = 4; int y = 1; x |= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; float y = 1; x |= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; double y = 1; x |= y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 4; int y = 1; x |= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 4; int y = 1; x |= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; float y = 1; x |= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; double y = 1; x |= y"); }); } public void testDefCompoundAssignment() { @@ -284,17 +260,9 @@ public class OrTests extends ScriptTestCase { } public void testDefBogusCompoundAssignment() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4F; int y = 1; x |= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4D; int y = 1; x |= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4; float y = 1; x |= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4; double y = 1; x |= y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4F; int y = 1; x |= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4D; int y = 1; x |= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4; float y = 1; x |= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4; double y = 1; x |= y"); }); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/OverloadTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/OverloadTests.java index 75988dcfb10..16f3c16a595 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/OverloadTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/OverloadTests.java @@ -36,28 +36,40 @@ package org.opensearch.painless; public class OverloadTests extends ScriptTestCase { public void testMethod() { - //assertEquals(2, exec("return 'abc123abc'.indexOf('c');")); - //assertEquals(8, exec("return 'abc123abc'.indexOf('c', 3);")); - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("return 'abc123abc'.indexOf('c', 3, 'bogus');"); - }); + // assertEquals(2, exec("return 'abc123abc'.indexOf('c');")); + // assertEquals(8, exec("return 'abc123abc'.indexOf('c', 3);")); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("return 'abc123abc'.indexOf('c', 3, 'bogus');"); } + ); assertTrue(expected.getMessage().contains("[java.lang.String, indexOf/3]")); } public void testMethodDynamic() { assertEquals(2, exec("def x = 'abc123abc'; return x.indexOf('c');")); assertEquals(8, exec("def x = 'abc123abc'; return x.indexOf('c', 3);")); - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("def x = 'abc123abc'; return x.indexOf('c', 3, 'bogus');"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("def x = 'abc123abc'; return x.indexOf('c', 3, 'bogus');"); } + ); assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, indexOf/3] not found")); } public void testConstructor() { - assertEquals(true, exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject();" + - "return f.x == 0 && f.y == 0;")); - assertEquals(true, exec("org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(1, 2);" + - "return f.x == 1 && f.y == 2;")); + assertEquals( + true, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject();" + + "return f.x == 0 && f.y == 0;" + ) + ); + assertEquals( + true, + exec( + "org.opensearch.painless.FeatureTestObject f = new org.opensearch.painless.FeatureTestObject(1, 2);" + + "return f.x == 1 && f.y == 2;" + ) + ); } public void testStatic() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/PostfixTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/PostfixTests.java index da1f137cc9a..715d0b7b4cc 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/PostfixTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/PostfixTests.java @@ -43,11 +43,14 @@ public class PostfixTests extends ScriptTestCase { public void testConditionalPostfixes() { assertEquals("5", exec("boolean b = false; (b ? 4 : 5).toString()")); - assertEquals(3, exec( - "Map x = new HashMap(); x['test'] = 3;" + - "Map y = new HashMap(); y['test'] = 4;" + - "boolean b = true;" + - "return (int)(b ? x : y).get('test')") + assertEquals( + 3, + exec( + "Map x = new HashMap(); x['test'] = 3;" + + "Map y = new HashMap(); y['test'] = 4;" + + "boolean b = true;" + + "return (int)(b ? x : y).get('test')" + ) ); } @@ -60,11 +63,14 @@ public class PostfixTests extends ScriptTestCase { public void testDefConditionalPostfixes() { assertEquals("5", exec("def b = false; (b ? 4 : 5).toString()")); - assertEquals(3, exec( - "def x = new HashMap(); x['test'] = 3;" + - "def y = new HashMap(); y['test'] = 4;" + - "boolean b = true;" + - "return (b ? x : y).get('test')") + assertEquals( + 3, + exec( + "def x = new HashMap(); x['test'] = 3;" + + "def y = new HashMap(); y['test'] = 4;" + + "boolean b = true;" + + "return (b ? x : y).get('test')" + ) ); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/PromotionTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/PromotionTests.java index 3febabdbe16..e9694877a6d 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/PromotionTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/PromotionTests.java @@ -35,56 +35,56 @@ package org.opensearch.painless; public class PromotionTests extends ScriptTestCase { public void testBinaryPromotion() throws Exception { // byte/byte - assertEquals((byte)1 + (byte)1, exec("byte x = 1; byte y = 1; return x+y;")); + assertEquals((byte) 1 + (byte) 1, exec("byte x = 1; byte y = 1; return x+y;")); // byte/char - assertEquals((byte)1 + (char)1, exec("byte x = 1; char y = 1; return x+y;")); + assertEquals((byte) 1 + (char) 1, exec("byte x = 1; char y = 1; return x+y;")); // byte/short - assertEquals((byte)1 + (short)1, exec("byte x = 1; short y = 1; return x+y;")); + assertEquals((byte) 1 + (short) 1, exec("byte x = 1; short y = 1; return x+y;")); // byte/int - assertEquals((byte)1 + 1, exec("byte x = 1; int y = 1; return x+y;")); + assertEquals((byte) 1 + 1, exec("byte x = 1; int y = 1; return x+y;")); // byte/long - assertEquals((byte)1 + 1L, exec("byte x = 1; long y = 1; return x+y;")); + assertEquals((byte) 1 + 1L, exec("byte x = 1; long y = 1; return x+y;")); // byte/float - assertEquals((byte)1 + 1F, exec("byte x = 1; float y = 1; return x+y;")); + assertEquals((byte) 1 + 1F, exec("byte x = 1; float y = 1; return x+y;")); // byte/double - assertEquals((byte)1 + 1.0, exec("byte x = 1; double y = 1; return x+y;")); + assertEquals((byte) 1 + 1.0, exec("byte x = 1; double y = 1; return x+y;")); // char/byte - assertEquals((char)1 + (byte)1, exec("char x = 1; byte y = 1; return x+y;")); + assertEquals((char) 1 + (byte) 1, exec("char x = 1; byte y = 1; return x+y;")); // char/char - assertEquals((char)1 + (char)1, exec("char x = 1; char y = 1; return x+y;")); + assertEquals((char) 1 + (char) 1, exec("char x = 1; char y = 1; return x+y;")); // char/short - assertEquals((char)1 + (short)1, exec("char x = 1; short y = 1; return x+y;")); + assertEquals((char) 1 + (short) 1, exec("char x = 1; short y = 1; return x+y;")); // char/int - assertEquals((char)1 + 1, exec("char x = 1; int y = 1; return x+y;")); + assertEquals((char) 1 + 1, exec("char x = 1; int y = 1; return x+y;")); // char/long - assertEquals((char)1 + 1L, exec("char x = 1; long y = 1; return x+y;")); + assertEquals((char) 1 + 1L, exec("char x = 1; long y = 1; return x+y;")); // char/float - assertEquals((char)1 + 1F, exec("char x = 1; float y = 1; return x+y;")); + assertEquals((char) 1 + 1F, exec("char x = 1; float y = 1; return x+y;")); // char/double - assertEquals((char)1 + 1.0, exec("char x = 1; double y = 1; return x+y;")); + assertEquals((char) 1 + 1.0, exec("char x = 1; double y = 1; return x+y;")); // short/byte - assertEquals((short)1 + (byte)1, exec("short x = 1; byte y = 1; return x+y;")); + assertEquals((short) 1 + (byte) 1, exec("short x = 1; byte y = 1; return x+y;")); // short/char - assertEquals((short)1 + (char)1, exec("short x = 1; char y = 1; return x+y;")); + assertEquals((short) 1 + (char) 1, exec("short x = 1; char y = 1; return x+y;")); // short/short - assertEquals((short)1 + (short)1, exec("short x = 1; short y = 1; return x+y;")); + assertEquals((short) 1 + (short) 1, exec("short x = 1; short y = 1; return x+y;")); // short/int - assertEquals((short)1 + 1, exec("short x = 1; int y = 1; return x+y;")); + assertEquals((short) 1 + 1, exec("short x = 1; int y = 1; return x+y;")); // short/long - assertEquals((short)1 + 1L, exec("short x = 1; long y = 1; return x+y;")); + assertEquals((short) 1 + 1L, exec("short x = 1; long y = 1; return x+y;")); // short/float - assertEquals((short)1 + 1F, exec("short x = 1; float y = 1; return x+y;")); + assertEquals((short) 1 + 1F, exec("short x = 1; float y = 1; return x+y;")); // short/double - assertEquals((short)1 + 1.0, exec("short x = 1; double y = 1; return x+y;")); + assertEquals((short) 1 + 1.0, exec("short x = 1; double y = 1; return x+y;")); // int/byte - assertEquals(1 + (byte)1, exec("int x = 1; byte y = 1; return x+y;")); + assertEquals(1 + (byte) 1, exec("int x = 1; byte y = 1; return x+y;")); // int/char - assertEquals(1 + (char)1, exec("int x = 1; char y = 1; return x+y;")); + assertEquals(1 + (char) 1, exec("int x = 1; char y = 1; return x+y;")); // int/short - assertEquals(1 + (short)1, exec("int x = 1; short y = 1; return x+y;")); + assertEquals(1 + (short) 1, exec("int x = 1; short y = 1; return x+y;")); // int/int assertEquals(1 + 1, exec("int x = 1; int y = 1; return x+y;")); // int/long @@ -95,11 +95,11 @@ public class PromotionTests extends ScriptTestCase { assertEquals(1 + 1.0, exec("int x = 1; double y = 1; return x+y;")); // long/byte - assertEquals(1L + (byte)1, exec("long x = 1; byte y = 1; return x+y;")); + assertEquals(1L + (byte) 1, exec("long x = 1; byte y = 1; return x+y;")); // long/char - assertEquals(1L + (char)1, exec("long x = 1; char y = 1; return x+y;")); + assertEquals(1L + (char) 1, exec("long x = 1; char y = 1; return x+y;")); // long/short - assertEquals(1L + (short)1, exec("long x = 1; short y = 1; return x+y;")); + assertEquals(1L + (short) 1, exec("long x = 1; short y = 1; return x+y;")); // long/int assertEquals(1L + 1, exec("long x = 1; int y = 1; return x+y;")); // long/long @@ -110,11 +110,11 @@ public class PromotionTests extends ScriptTestCase { assertEquals(1L + 1.0, exec("long x = 1; double y = 1; return x+y;")); // float/byte - assertEquals(1F + (byte)1, exec("float x = 1; byte y = 1; return x+y;")); + assertEquals(1F + (byte) 1, exec("float x = 1; byte y = 1; return x+y;")); // float/char - assertEquals(1F + (char)1, exec("float x = 1; char y = 1; return x+y;")); + assertEquals(1F + (char) 1, exec("float x = 1; char y = 1; return x+y;")); // float/short - assertEquals(1F + (short)1, exec("float x = 1; short y = 1; return x+y;")); + assertEquals(1F + (short) 1, exec("float x = 1; short y = 1; return x+y;")); // float/int assertEquals(1F + 1, exec("float x = 1; int y = 1; return x+y;")); // float/long @@ -125,11 +125,11 @@ public class PromotionTests extends ScriptTestCase { assertEquals(1F + 1.0, exec("float x = 1; double y = 1; return x+y;")); // double/byte - assertEquals(1.0 + (byte)1, exec("double x = 1; byte y = 1; return x+y;")); + assertEquals(1.0 + (byte) 1, exec("double x = 1; byte y = 1; return x+y;")); // double/char - assertEquals(1.0 + (char)1, exec("double x = 1; char y = 1; return x+y;")); + assertEquals(1.0 + (char) 1, exec("double x = 1; char y = 1; return x+y;")); // double/short - assertEquals(1.0 + (short)1, exec("double x = 1; short y = 1; return x+y;")); + assertEquals(1.0 + (short) 1, exec("double x = 1; short y = 1; return x+y;")); // double/int assertEquals(1.0 + 1, exec("double x = 1; int y = 1; return x+y;")); // double/long @@ -142,56 +142,56 @@ public class PromotionTests extends ScriptTestCase { public void testBinaryPromotionConst() throws Exception { // byte/byte - assertEquals((byte)1 + (byte)1, exec("return (byte)1 + (byte)1;")); + assertEquals((byte) 1 + (byte) 1, exec("return (byte)1 + (byte)1;")); // byte/char - assertEquals((byte)1 + (char)1, exec("return (byte)1 + (char)1;")); + assertEquals((byte) 1 + (char) 1, exec("return (byte)1 + (char)1;")); // byte/short - assertEquals((byte)1 + (short)1, exec("return (byte)1 + (short)1;")); + assertEquals((byte) 1 + (short) 1, exec("return (byte)1 + (short)1;")); // byte/int - assertEquals((byte)1 + 1, exec("return (byte)1 + 1;")); + assertEquals((byte) 1 + 1, exec("return (byte)1 + 1;")); // byte/long - assertEquals((byte)1 + 1L, exec("return (byte)1 + 1L;")); + assertEquals((byte) 1 + 1L, exec("return (byte)1 + 1L;")); // byte/float - assertEquals((byte)1 + 1F, exec("return (byte)1 + 1F;")); + assertEquals((byte) 1 + 1F, exec("return (byte)1 + 1F;")); // byte/double - assertEquals((byte)1 + 1.0, exec("return (byte)1 + 1.0;")); + assertEquals((byte) 1 + 1.0, exec("return (byte)1 + 1.0;")); // char/byte - assertEquals((char)1 + (byte)1, exec("return (char)1 + (byte)1;")); + assertEquals((char) 1 + (byte) 1, exec("return (char)1 + (byte)1;")); // char/char - assertEquals((char)1 + (char)1, exec("return (char)1 + (char)1;")); + assertEquals((char) 1 + (char) 1, exec("return (char)1 + (char)1;")); // char/short - assertEquals((char)1 + (short)1, exec("return (char)1 + (short)1;")); + assertEquals((char) 1 + (short) 1, exec("return (char)1 + (short)1;")); // char/int - assertEquals((char)1 + 1, exec("return (char)1 + 1;")); + assertEquals((char) 1 + 1, exec("return (char)1 + 1;")); // char/long - assertEquals((char)1 + 1L, exec("return (char)1 + 1L;")); + assertEquals((char) 1 + 1L, exec("return (char)1 + 1L;")); // char/float - assertEquals((char)1 + 1F, exec("return (char)1 + 1F;")); + assertEquals((char) 1 + 1F, exec("return (char)1 + 1F;")); // char/double - assertEquals((char)1 + 1.0, exec("return (char)1 + 1.0;")); + assertEquals((char) 1 + 1.0, exec("return (char)1 + 1.0;")); // short/byte - assertEquals((short)1 + (byte)1, exec("return (short)1 + (byte)1;")); + assertEquals((short) 1 + (byte) 1, exec("return (short)1 + (byte)1;")); // short/char - assertEquals((short)1 + (char)1, exec("return (short)1 + (char)1;")); + assertEquals((short) 1 + (char) 1, exec("return (short)1 + (char)1;")); // short/short - assertEquals((short)1 + (short)1, exec("return (short)1 + (short)1;")); + assertEquals((short) 1 + (short) 1, exec("return (short)1 + (short)1;")); // short/int - assertEquals((short)1 + 1, exec("return (short)1 + 1;")); + assertEquals((short) 1 + 1, exec("return (short)1 + 1;")); // short/long - assertEquals((short)1 + 1L, exec("return (short)1 + 1L;")); + assertEquals((short) 1 + 1L, exec("return (short)1 + 1L;")); // short/float - assertEquals((short)1 + 1F, exec("return (short)1 + 1F;")); + assertEquals((short) 1 + 1F, exec("return (short)1 + 1F;")); // short/double - assertEquals((short)1 + 1.0, exec("return (short)1 + 1.0;")); + assertEquals((short) 1 + 1.0, exec("return (short)1 + 1.0;")); // int/byte - assertEquals(1 + (byte)1, exec("return 1 + (byte)1;")); + assertEquals(1 + (byte) 1, exec("return 1 + (byte)1;")); // int/char - assertEquals(1 + (char)1, exec("return 1 + (char)1;")); + assertEquals(1 + (char) 1, exec("return 1 + (char)1;")); // int/short - assertEquals(1 + (short)1, exec("return 1 + (short)1;")); + assertEquals(1 + (short) 1, exec("return 1 + (short)1;")); // int/int assertEquals(1 + 1, exec("return 1 + 1;")); // int/long @@ -202,11 +202,11 @@ public class PromotionTests extends ScriptTestCase { assertEquals(1 + 1.0, exec("return 1 + 1.0;")); // long/byte - assertEquals(1L + (byte)1, exec("return 1L + (byte)1;")); + assertEquals(1L + (byte) 1, exec("return 1L + (byte)1;")); // long/char - assertEquals(1L + (char)1, exec("return 1L + (char)1;")); + assertEquals(1L + (char) 1, exec("return 1L + (char)1;")); // long/short - assertEquals(1L + (short)1, exec("return 1L + (short)1;")); + assertEquals(1L + (short) 1, exec("return 1L + (short)1;")); // long/int assertEquals(1L + 1, exec("return 1L + 1;")); // long/long @@ -217,11 +217,11 @@ public class PromotionTests extends ScriptTestCase { assertEquals(1L + 1.0, exec("return 1L + 1.0;")); // float/byte - assertEquals(1F + (byte)1, exec("return 1F + (byte)1;")); + assertEquals(1F + (byte) 1, exec("return 1F + (byte)1;")); // float/char - assertEquals(1F + (char)1, exec("return 1F + (char)1;")); + assertEquals(1F + (char) 1, exec("return 1F + (char)1;")); // float/short - assertEquals(1F + (short)1, exec("return 1F + (short)1;")); + assertEquals(1F + (short) 1, exec("return 1F + (short)1;")); // float/int assertEquals(1F + 1, exec("return 1F + 1;")); // float/long @@ -232,11 +232,11 @@ public class PromotionTests extends ScriptTestCase { assertEquals(1F + 1.0, exec("return 1F + 1.0;")); // double/byte - assertEquals(1.0 + (byte)1, exec("return 1.0 + (byte)1;")); + assertEquals(1.0 + (byte) 1, exec("return 1.0 + (byte)1;")); // double/char - assertEquals(1.0 + (char)1, exec("return 1.0 + (char)1;")); + assertEquals(1.0 + (char) 1, exec("return 1.0 + (char)1;")); // double/short - assertEquals(1.0 + (short)1, exec("return 1.0 + (short)1;")); + assertEquals(1.0 + (short) 1, exec("return 1.0 + (short)1;")); // double/int assertEquals(1.0 + 1, exec("return 1.0 + 1;")); // double/long diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimit2Tests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimit2Tests.java index a85b64aeeb7..1d08fdac9c5 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimit2Tests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimit2Tests.java @@ -31,6 +31,7 @@ */ package org.opensearch.painless; + import org.junit.AfterClass; import org.junit.BeforeClass; import org.opensearch.common.settings.Settings; @@ -47,7 +48,7 @@ public class RegexLimit2Tests extends ScriptTestCase { Settings settings = Settings.builder().put(CompilerSettings.REGEX_LIMIT_FACTOR.getKey(), 2).build(); SCRIPT_ENGINE = new PainlessScriptEngine(settings, newDefaultContexts()); } - + @AfterClass public static void afterClass() { SCRIPT_ENGINE = null; @@ -57,124 +58,153 @@ public class RegexLimit2Tests extends ScriptTestCase { protected PainlessScriptEngine getEngine() { return SCRIPT_ENGINE; } - + public void testRegexInject_Matcher() { - String[] scripts = new String[]{PATTERN + ".matcher(" + CHAR_SEQUENCE + ").matches()", - "Matcher m = " + PATTERN + ".matcher(" + CHAR_SEQUENCE + "); m.matches()"}; + String[] scripts = new String[] { + PATTERN + ".matcher(" + CHAR_SEQUENCE + ").matches()", + "Matcher m = " + PATTERN + ".matcher(" + CHAR_SEQUENCE + "); m.matches()" }; for (String script : scripts) { assertEquals(Boolean.TRUE, exec(script)); } } public void testRegexInject_Def_Matcher() { - String[] scripts = new String[]{"def p = " + PATTERN + "; p.matcher(" + CHAR_SEQUENCE + ").matches()", - "def p = " + PATTERN + "; def m = p.matcher(" + CHAR_SEQUENCE + "); m.matches()"}; + String[] scripts = new String[] { + "def p = " + PATTERN + "; p.matcher(" + CHAR_SEQUENCE + ").matches()", + "def p = " + PATTERN + "; def m = p.matcher(" + CHAR_SEQUENCE + "); m.matches()" }; for (String script : scripts) { assertEquals(Boolean.TRUE, exec(script)); } } public void testMethodRegexInject_Ref_Matcher() { - String script = - "boolean isMatch(Function func) { func.apply(" + CHAR_SEQUENCE +").matches(); } " + - "Pattern pattern = " + PATTERN + ";" + - "isMatch(pattern::matcher)"; + String script = "boolean isMatch(Function func) { func.apply(" + + CHAR_SEQUENCE + + ").matches(); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "isMatch(pattern::matcher)"; assertEquals(Boolean.TRUE, exec(script)); } public void testRegexInject_DefMethodRef_Matcher() { - String script = - "boolean isMatch(Function func) { func.apply(" + CHAR_SEQUENCE +").matches(); } " + - "def pattern = " + PATTERN + ";" + - "isMatch(pattern::matcher)"; + String script = "boolean isMatch(Function func) { func.apply(" + + CHAR_SEQUENCE + + ").matches(); } " + + "def pattern = " + + PATTERN + + ";" + + "isMatch(pattern::matcher)"; assertEquals(Boolean.TRUE, exec(script)); } public void testRegexInject_SplitLimit() { - String[] scripts = new String[]{PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ", 2)", - "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ", 2)"}; + String[] scripts = new String[] { + PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ", 2)", + "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ", 2)" }; for (String script : scripts) { - assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script)); + assertArrayEquals(new String[] { "0-", "-X-abc-2-def-Y-abc-3-def-Z-abc" }, (String[]) exec(script)); } } public void testRegexInject_Def_SplitLimit() { String script = "def p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ", 2)"; - assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script)); + assertArrayEquals(new String[] { "0-", "-X-abc-2-def-Y-abc-3-def-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_Ref_SplitLimit() { - String script = - "String[] splitLimit(BiFunction func) { func.apply(" + SPLIT_CHAR_SEQUENCE + ", 2); } " + - "Pattern pattern = " + PATTERN + ";" + - "splitLimit(pattern::split)"; - assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script)); + String script = "String[] splitLimit(BiFunction func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + ", 2); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "splitLimit(pattern::split)"; + assertArrayEquals(new String[] { "0-", "-X-abc-2-def-Y-abc-3-def-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_DefMethodRef_SplitLimit() { - String script = - "String[] splitLimit(BiFunction func) { func.apply(" + SPLIT_CHAR_SEQUENCE + ", 2); } " + - "def pattern = " + PATTERN + ";" + - "splitLimit(pattern::split)"; - assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script)); + String script = "String[] splitLimit(BiFunction func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + ", 2); } " + + "def pattern = " + + PATTERN + + ";" + + "splitLimit(pattern::split)"; + assertArrayEquals(new String[] { "0-", "-X-abc-2-def-Y-abc-3-def-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_Split() { - String[] scripts = new String[]{PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ")", - "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ")"}; + String[] scripts = new String[] { + PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ")", + "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ")" }; for (String script : scripts) { - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script)); + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } } public void testRegexInject_Def_Split() { String script = "def p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ")"; - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script)); + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_Ref_Split() { - String script = - "String[] split(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE + "); } " + - "Pattern pattern = " + PATTERN + ";" + - "split(pattern::split)"; - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script)); + String script = "String[] split(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "split(pattern::split)"; + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_DefMethodRef_Split() { - String script = - "String[] split(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE +"); } " + - "def pattern = " + PATTERN + ";" + - "split(pattern::split)"; - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script)); + String script = "String[] split(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "def pattern = " + + PATTERN + + ";" + + "split(pattern::split)"; + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_SplitAsStream() { - String[] scripts = new String[]{PATTERN + ".splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)", - "Pattern p = " + PATTERN + "; p.splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)"}; + String[] scripts = new String[] { + PATTERN + ".splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)", + "Pattern p = " + PATTERN + "; p.splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)" }; for (String script : scripts) { - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script)); + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } } public void testRegexInject_Def_SplitAsStream() { String script = "def p = " + PATTERN + "; p.splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)"; - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script)); + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_Ref_SplitAsStream() { - String script = - "Stream splitStream(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE +"); } " + - "Pattern pattern = " + PATTERN + ";" + - "splitStream(pattern::splitAsStream).toArray(String[]::new)"; - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script)); + String script = "Stream splitStream(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "splitStream(pattern::splitAsStream).toArray(String[]::new)"; + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } public void testRegexInject_DefMethodRef_SplitAsStream() { - String script = - "Stream splitStream(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE +"); } " + - "def pattern = " + PATTERN + ";" + - "splitStream(pattern::splitAsStream).toArray(String[]::new)"; - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script)); + String script = "Stream splitStream(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "def pattern = " + + PATTERN + + ";" + + "splitStream(pattern::splitAsStream).toArray(String[]::new)"; + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } public void testRegexInjectFindOperator() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java index b41f940a888..c3233bc0d92 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java @@ -31,6 +31,7 @@ */ package org.opensearch.painless; + import org.junit.AfterClass; import org.junit.BeforeClass; import org.opensearch.common.breaker.CircuitBreakingException; @@ -63,8 +64,9 @@ public class RegexLimitTests extends ScriptTestCase { } public void testRegexInject_Matcher() { - String[] scripts = new String[]{PATTERN + ".matcher(" + CHAR_SEQUENCE + ").matches()", - "Matcher m = " + PATTERN + ".matcher(" + CHAR_SEQUENCE + "); m.matches()"}; + String[] scripts = new String[] { + PATTERN + ".matcher(" + CHAR_SEQUENCE + ").matches()", + "Matcher m = " + PATTERN + ".matcher(" + CHAR_SEQUENCE + "); m.matches()" }; for (String script : scripts) { // Backtracking means the regular expression will fail with limit factor 1 (don't consider more than each char once) CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); @@ -73,8 +75,9 @@ public class RegexLimitTests extends ScriptTestCase { } public void testRegexInject_Def_Matcher() { - String[] scripts = new String[]{"def p = " + PATTERN + "; p.matcher(" + CHAR_SEQUENCE + ").matches()", - "def p = " + PATTERN + "; def m = p.matcher(" + CHAR_SEQUENCE + "); m.matches()"}; + String[] scripts = new String[] { + "def p = " + PATTERN + "; p.matcher(" + CHAR_SEQUENCE + ").matches()", + "def p = " + PATTERN + "; def m = p.matcher(" + CHAR_SEQUENCE + "); m.matches()" }; for (String script : scripts) { CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); @@ -82,26 +85,33 @@ public class RegexLimitTests extends ScriptTestCase { } public void testMethodRegexInject_Ref_Matcher() { - String script = - "boolean isMatch(Function func) { func.apply(" + CHAR_SEQUENCE +").matches(); } " + - "Pattern pattern = " + PATTERN + ";" + - "isMatch(pattern::matcher)"; + String script = "boolean isMatch(Function func) { func.apply(" + + CHAR_SEQUENCE + + ").matches(); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "isMatch(pattern::matcher)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } public void testRegexInject_DefMethodRef_Matcher() { - String script = - "boolean isMatch(Function func) { func.apply(" + CHAR_SEQUENCE +").matches(); } " + - "def pattern = " + PATTERN + ";" + - "isMatch(pattern::matcher)"; + String script = "boolean isMatch(Function func) { func.apply(" + + CHAR_SEQUENCE + + ").matches(); } " + + "def pattern = " + + PATTERN + + ";" + + "isMatch(pattern::matcher)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } public void testRegexInject_SplitLimit() { - String[] scripts = new String[]{PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ", 2)", - "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ", 2)"}; + String[] scripts = new String[] { + PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ", 2)", + "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ", 2)" }; for (String script : scripts) { CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); @@ -115,26 +125,33 @@ public class RegexLimitTests extends ScriptTestCase { } public void testRegexInject_Ref_SplitLimit() { - String script = - "String[] splitLimit(BiFunction func) { func.apply(" + SPLIT_CHAR_SEQUENCE + ", 2); } " + - "Pattern pattern = " + PATTERN + ";" + - "splitLimit(pattern::split)"; + String script = "String[] splitLimit(BiFunction func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + ", 2); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "splitLimit(pattern::split)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } public void testRegexInject_DefMethodRef_SplitLimit() { - String script = - "String[] splitLimit(BiFunction func) { func.apply(" + SPLIT_CHAR_SEQUENCE + ", 2); } " + - "def pattern = " + PATTERN + ";" + - "splitLimit(pattern::split)"; + String script = "String[] splitLimit(BiFunction func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + ", 2); } " + + "def pattern = " + + PATTERN + + ";" + + "splitLimit(pattern::split)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } public void testRegexInject_Split() { - String[] scripts = new String[]{PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ")", - "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ")"}; + String[] scripts = new String[] { + PATTERN + ".split(" + SPLIT_CHAR_SEQUENCE + ")", + "Pattern p = " + PATTERN + "; p.split(" + SPLIT_CHAR_SEQUENCE + ")" }; for (String script : scripts) { CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); @@ -148,26 +165,33 @@ public class RegexLimitTests extends ScriptTestCase { } public void testRegexInject_Ref_Split() { - String script = - "String[] split(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE + "); } " + - "Pattern pattern = " + PATTERN + ";" + - "split(pattern::split)"; + String script = "String[] split(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "split(pattern::split)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } public void testRegexInject_DefMethodRef_Split() { - String script = - "String[] split(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE +"); } " + - "def pattern = " + PATTERN + ";" + - "split(pattern::split)"; + String script = "String[] split(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "def pattern = " + + PATTERN + + ";" + + "split(pattern::split)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } public void testRegexInject_SplitAsStream() { - String[] scripts = new String[]{PATTERN + ".splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)", - "Pattern p = " + PATTERN + "; p.splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)"}; + String[] scripts = new String[] { + PATTERN + ".splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)", + "Pattern p = " + PATTERN + "; p.splitAsStream(" + SPLIT_CHAR_SEQUENCE + ").toArray(String[]::new)" }; for (String script : scripts) { CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); @@ -181,19 +205,25 @@ public class RegexLimitTests extends ScriptTestCase { } public void testRegexInject_Ref_SplitAsStream() { - String script = - "Stream splitStream(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE +"); } " + - "Pattern pattern = " + PATTERN + ";" + - "splitStream(pattern::splitAsStream).toArray(String[]::new)"; + String script = "Stream splitStream(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "Pattern pattern = " + + PATTERN + + ";" + + "splitStream(pattern::splitAsStream).toArray(String[]::new)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } public void testRegexInject_DefMethodRef_SplitAsStream() { - String script = - "Stream splitStream(Function func) { func.apply(" + SPLIT_CHAR_SEQUENCE +"); } " + - "def pattern = " + PATTERN + ";" + - "splitStream(pattern::splitAsStream).toArray(String[]::new)"; + String script = "Stream splitStream(Function func) { func.apply(" + + SPLIT_CHAR_SEQUENCE + + "); } " + + "def pattern = " + + PATTERN + + ";" + + "splitStream(pattern::splitAsStream).toArray(String[]::new)"; CircuitBreakingException cbe = expectScriptThrows(CircuitBreakingException.class, () -> exec(script)); assertTrue(cbe.getMessage().contains(REGEX_CIRCUIT_MESSAGE)); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java index d571c8f4f8c..cb8296a3f23 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexTests.java @@ -49,9 +49,7 @@ public class RegexTests extends ScriptTestCase { @BeforeClass public static void beforeClass() { - Settings settings = Settings.builder() - .put(CompilerSettings.REGEX_ENABLED.getKey(), true) - .build(); + Settings settings = Settings.builder().put(CompilerSettings.REGEX_ENABLED.getKey(), true).build(); SCRIPT_ENGINE = new PainlessScriptEngine(settings, newDefaultContexts()); } @@ -104,19 +102,19 @@ public class RegexTests extends ScriptTestCase { assertEquals(true, exec("return !/foo/.matcher('bar').matches()")); } - public void testInTernaryCondition() { + public void testInTernaryCondition() { assertEquals(true, exec("return /foo/.matcher('foo').matches() ? true : false")); assertEquals(1, exec("def i = 0; i += /foo/.matcher('foo').matches() ? 1 : 1; return i")); assertEquals(true, exec("return 'foo' ==~ /foo/ ? true : false")); assertEquals(1, exec("def i = 0; i += 'foo' ==~ /foo/ ? 1 : 1; return i")); } - public void testInTernaryTrueArm() { + public void testInTernaryTrueArm() { assertEquals(true, exec("def i = true; return i ? /foo/.matcher('foo').matches() : false")); assertEquals(true, exec("def i = true; return i ? 'foo' ==~ /foo/ : false")); } - public void testInTernaryFalseArm() { + public void testInTernaryFalseArm() { assertEquals(true, exec("def i = false; return i ? false : 'foo' ==~ /foo/")); } @@ -176,7 +174,7 @@ public class RegexTests extends ScriptTestCase { // Make sure some methods on Pattern are whitelisted public void testSplit() { - assertArrayEquals(new String[] {"cat", "dog"}, (String[]) exec("/,/.split('cat,dog')")); + assertArrayEquals(new String[] { "cat", "dog" }, (String[]) exec("/,/.split('cat,dog')")); } public void testSplitAsStream() { @@ -226,8 +224,10 @@ public class RegexTests extends ScriptTestCase { public void testReplaceAllMatchesCharSequence() { CharSequence charSequence = CharBuffer.wrap("the quick brown fox"); - assertEquals("thE qUIck brOwn fOx", - exec("params.a.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); + assertEquals( + "thE qUIck brOwn fOx", + exec("params.a.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true) + ); } public void testReplaceAllNoMatchString() { @@ -236,26 +236,33 @@ public class RegexTests extends ScriptTestCase { public void testReplaceAllNoMatchCharSequence() { CharSequence charSequence = CharBuffer.wrap("i am cat"); - assertEquals("i am cat", - exec("params.a.replaceAll(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); + assertEquals( + "i am cat", + exec("params.a.replaceAll(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true) + ); } public void testReplaceAllQuoteReplacement() { - assertEquals("th/E q/U/Ick br/Own f/Ox", - exec("'the quick brown fox'.replaceAll(/[aeiou]/, m -> '/' + m.group().toUpperCase(Locale.ROOT))")); - assertEquals("th$E q$U$Ick br$Own f$Ox", - exec("'the quick brown fox'.replaceAll(/[aeiou]/, m -> '$' + m.group().toUpperCase(Locale.ROOT))")); + assertEquals( + "th/E q/U/Ick br/Own f/Ox", + exec("'the quick brown fox'.replaceAll(/[aeiou]/, m -> '/' + m.group().toUpperCase(Locale.ROOT))") + ); + assertEquals( + "th$E q$U$Ick br$Own f$Ox", + exec("'the quick brown fox'.replaceAll(/[aeiou]/, m -> '$' + m.group().toUpperCase(Locale.ROOT))") + ); } public void testReplaceFirstMatchesString() { - assertEquals("thE quick brown fox", - exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))")); + assertEquals("thE quick brown fox", exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))")); } public void testReplaceFirstMatchesCharSequence() { CharSequence charSequence = CharBuffer.wrap("the quick brown fox"); - assertEquals("thE quick brown fox", - exec("params.a.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); + assertEquals( + "thE quick brown fox", + exec("params.a.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true) + ); } public void testReplaceFirstNoMatchString() { @@ -264,21 +271,25 @@ public class RegexTests extends ScriptTestCase { public void testReplaceFirstNoMatchCharSequence() { CharSequence charSequence = CharBuffer.wrap("i am cat"); - assertEquals("i am cat", - exec("params.a.replaceFirst(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true)); + assertEquals( + "i am cat", + exec("params.a.replaceFirst(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence), true) + ); } public void testReplaceFirstQuoteReplacement() { - assertEquals("th/E quick brown fox", - exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> '/' + m.group().toUpperCase(Locale.ROOT))")); - assertEquals("th$E quick brown fox", - exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> '$' + m.group().toUpperCase(Locale.ROOT))")); + assertEquals( + "th/E quick brown fox", + exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> '/' + m.group().toUpperCase(Locale.ROOT))") + ); + assertEquals( + "th$E quick brown fox", + exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> '$' + m.group().toUpperCase(Locale.ROOT))") + ); } public void testCantUsePatternCompile() { - IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("Pattern.compile('aa')"); - }); + IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Pattern.compile('aa')"); }); assertTrue(e.getMessage().contains("[java.util.regex.Pattern, compile/1]")); } @@ -289,15 +300,11 @@ public class RegexTests extends ScriptTestCase { assertEquals("invalid regular expression: could not compile regex constant [\\ujjjj] with flags []", e.getCause().getMessage()); // And make sure the location of the error points to the offset inside the pattern - assertScriptStack(e, - "/\\ujjjj/", - " ^---- HERE"); + assertScriptStack(e, "/\\ujjjj/", " ^---- HERE"); } public void testRegexAgainstNumber() { - ClassCastException e = expectScriptThrows(ClassCastException.class, () -> { - exec("12 ==~ /cat/"); - }); + ClassCastException e = expectScriptThrows(ClassCastException.class, () -> { exec("12 ==~ /cat/"); }); assertEquals("Cannot cast from [int] to [java.lang.String].", e.getMessage()); } @@ -314,34 +321,38 @@ public class RegexTests extends ScriptTestCase { private final String splitCharSequence = "'0-abc-1-def-X-abc-2-def-Y-abc-3-def-Z-abc'"; public void testRegexInjectUnlimited_Matcher() { - String[] scripts = new String[]{pattern + ".matcher(" + charSequence + ").matches()", - "Matcher m = " + pattern + ".matcher(" + charSequence + "); m.matches()"}; + String[] scripts = new String[] { + pattern + ".matcher(" + charSequence + ").matches()", + "Matcher m = " + pattern + ".matcher(" + charSequence + "); m.matches()" }; for (String script : scripts) { assertEquals(Boolean.TRUE, exec(script)); } } public void testRegexInjectUnlimited_SplitLimit() { - String[] scripts = new String[]{pattern + ".split(" + splitCharSequence + ", 2)", - "Pattern p = " + pattern + "; p.split(" + splitCharSequence + ", 2)"}; + String[] scripts = new String[] { + pattern + ".split(" + splitCharSequence + ", 2)", + "Pattern p = " + pattern + "; p.split(" + splitCharSequence + ", 2)" }; for (String script : scripts) { - assertArrayEquals(new String[]{"0-", "-X-abc-2-def-Y-abc-3-def-Z-abc"}, (String[])exec(script)); + assertArrayEquals(new String[] { "0-", "-X-abc-2-def-Y-abc-3-def-Z-abc" }, (String[]) exec(script)); } } public void testRegexInjectUnlimited_Split() { - String[] scripts = new String[]{pattern + ".split(" + splitCharSequence + ")", - "Pattern p = " + pattern + "; p.split(" + splitCharSequence + ")"}; + String[] scripts = new String[] { + pattern + ".split(" + splitCharSequence + ")", + "Pattern p = " + pattern + "; p.split(" + splitCharSequence + ")" }; for (String script : scripts) { - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[])exec(script)); + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } } public void testRegexInjectUnlimited_SplitAsStream() { - String[] scripts = new String[]{pattern + ".splitAsStream(" + splitCharSequence + ").toArray(String[]::new)", - "Pattern p = " + pattern + "; p.splitAsStream(" + splitCharSequence + ").toArray(String[]::new)"}; + String[] scripts = new String[] { + pattern + ".splitAsStream(" + splitCharSequence + ").toArray(String[]::new)", + "Pattern p = " + pattern + "; p.splitAsStream(" + splitCharSequence + ").toArray(String[]::new)" }; for (String script : scripts) { - assertArrayEquals(new String[]{"0-", "-X-", "-Y-", "-Z-abc"}, (String[]) exec(script)); + assertArrayEquals(new String[] { "0-", "-X-", "-Y-", "-Z-abc" }, (String[]) exec(script)); } } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RemainderTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RemainderTests.java index cf39a3eace4..4ea87819493 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/RemainderTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RemainderTests.java @@ -33,7 +33,7 @@ package org.opensearch.painless; /** Tests for division operator across all types */ -//TODO: NaN/Inf/overflow/... +// TODO: NaN/Inf/overflow/... public class RemainderTests extends ScriptTestCase { // TODO: byte,short,char @@ -44,111 +44,103 @@ public class RemainderTests extends ScriptTestCase { } public void testInt() throws Exception { - assertEquals(1%1, exec("int x = 1; int y = 1; return x%y;")); - assertEquals(2%3, exec("int x = 2; int y = 3; return x%y;")); - assertEquals(5%10, exec("int x = 5; int y = 10; return x%y;")); - assertEquals(10%1%2, exec("int x = 10; int y = 1; int z = 2; return x%y%z;")); - assertEquals((10%1)%2, exec("int x = 10; int y = 1; int z = 2; return (x%y)%z;")); - assertEquals(10%(4%3), exec("int x = 10; int y = 4; int z = 3; return x%(y%z);")); - assertEquals(10%1, exec("int x = 10; int y = 1; return x%y;")); - assertEquals(0%1, exec("int x = 0; int y = 1; return x%y;")); + assertEquals(1 % 1, exec("int x = 1; int y = 1; return x%y;")); + assertEquals(2 % 3, exec("int x = 2; int y = 3; return x%y;")); + assertEquals(5 % 10, exec("int x = 5; int y = 10; return x%y;")); + assertEquals(10 % 1 % 2, exec("int x = 10; int y = 1; int z = 2; return x%y%z;")); + assertEquals((10 % 1) % 2, exec("int x = 10; int y = 1; int z = 2; return (x%y)%z;")); + assertEquals(10 % (4 % 3), exec("int x = 10; int y = 4; int z = 3; return x%(y%z);")); + assertEquals(10 % 1, exec("int x = 10; int y = 1; return x%y;")); + assertEquals(0 % 1, exec("int x = 0; int y = 1; return x%y;")); } public void testIntConst() throws Exception { - assertEquals(1%1, exec("return 1%1;")); - assertEquals(2%3, exec("return 2%3;")); - assertEquals(5%10, exec("return 5%10;")); - assertEquals(10%1%2, exec("return 10%1%2;")); - assertEquals((10%1)%2, exec("return (10%1)%2;")); - assertEquals(10%(4%3), exec("return 10%(4%3);")); - assertEquals(10%1, exec("return 10%1;")); - assertEquals(0%1, exec("return 0%1;")); + assertEquals(1 % 1, exec("return 1%1;")); + assertEquals(2 % 3, exec("return 2%3;")); + assertEquals(5 % 10, exec("return 5%10;")); + assertEquals(10 % 1 % 2, exec("return 10%1%2;")); + assertEquals((10 % 1) % 2, exec("return (10%1)%2;")); + assertEquals(10 % (4 % 3), exec("return 10%(4%3);")); + assertEquals(10 % 1, exec("return 10%1;")); + assertEquals(0 % 1, exec("return 0%1;")); } public void testLong() throws Exception { - assertEquals(1L%1L, exec("long x = 1; long y = 1; return x%y;")); - assertEquals(2L%3L, exec("long x = 2; long y = 3; return x%y;")); - assertEquals(5L%10L, exec("long x = 5; long y = 10; return x%y;")); - assertEquals(10L%1L%2L, exec("long x = 10; long y = 1; long z = 2; return x%y%z;")); - assertEquals((10L%1L)%2L, exec("long x = 10; long y = 1; long z = 2; return (x%y)%z;")); - assertEquals(10L%(4L%3L), exec("long x = 10; long y = 4; long z = 3; return x%(y%z);")); - assertEquals(10L%1L, exec("long x = 10; long y = 1; return x%y;")); - assertEquals(0L%1L, exec("long x = 0; long y = 1; return x%y;")); + assertEquals(1L % 1L, exec("long x = 1; long y = 1; return x%y;")); + assertEquals(2L % 3L, exec("long x = 2; long y = 3; return x%y;")); + assertEquals(5L % 10L, exec("long x = 5; long y = 10; return x%y;")); + assertEquals(10L % 1L % 2L, exec("long x = 10; long y = 1; long z = 2; return x%y%z;")); + assertEquals((10L % 1L) % 2L, exec("long x = 10; long y = 1; long z = 2; return (x%y)%z;")); + assertEquals(10L % (4L % 3L), exec("long x = 10; long y = 4; long z = 3; return x%(y%z);")); + assertEquals(10L % 1L, exec("long x = 10; long y = 1; return x%y;")); + assertEquals(0L % 1L, exec("long x = 0; long y = 1; return x%y;")); } public void testLongConst() throws Exception { - assertEquals(1L%1L, exec("return 1L%1L;")); - assertEquals(2L%3L, exec("return 2L%3L;")); - assertEquals(5L%10L, exec("return 5L%10L;")); - assertEquals(10L%1L%2L, exec("return 10L%1L%2L;")); - assertEquals((10L%1L)%2L, exec("return (10L%1L)%2L;")); - assertEquals(10L%(4L%3L), exec("return 10L%(4L%3L);")); - assertEquals(10L%1L, exec("return 10L%1L;")); - assertEquals(0L%1L, exec("return 0L%1L;")); + assertEquals(1L % 1L, exec("return 1L%1L;")); + assertEquals(2L % 3L, exec("return 2L%3L;")); + assertEquals(5L % 10L, exec("return 5L%10L;")); + assertEquals(10L % 1L % 2L, exec("return 10L%1L%2L;")); + assertEquals((10L % 1L) % 2L, exec("return (10L%1L)%2L;")); + assertEquals(10L % (4L % 3L), exec("return 10L%(4L%3L);")); + assertEquals(10L % 1L, exec("return 10L%1L;")); + assertEquals(0L % 1L, exec("return 0L%1L;")); } public void testFloat() throws Exception { - assertEquals(1F%1F, exec("float x = 1; float y = 1; return x%y;")); - assertEquals(2F%3F, exec("float x = 2; float y = 3; return x%y;")); - assertEquals(5F%10F, exec("float x = 5; float y = 10; return x%y;")); - assertEquals(10F%1F%2F, exec("float x = 10; float y = 1; float z = 2; return x%y%z;")); - assertEquals((10F%1F)%2F, exec("float x = 10; float y = 1; float z = 2; return (x%y)%z;")); - assertEquals(10F%(4F%3F), exec("float x = 10; float y = 4; float z = 3; return x%(y%z);")); - assertEquals(10F%1F, exec("float x = 10; float y = 1; return x%y;")); - assertEquals(0F%1F, exec("float x = 0; float y = 1; return x%y;")); + assertEquals(1F % 1F, exec("float x = 1; float y = 1; return x%y;")); + assertEquals(2F % 3F, exec("float x = 2; float y = 3; return x%y;")); + assertEquals(5F % 10F, exec("float x = 5; float y = 10; return x%y;")); + assertEquals(10F % 1F % 2F, exec("float x = 10; float y = 1; float z = 2; return x%y%z;")); + assertEquals((10F % 1F) % 2F, exec("float x = 10; float y = 1; float z = 2; return (x%y)%z;")); + assertEquals(10F % (4F % 3F), exec("float x = 10; float y = 4; float z = 3; return x%(y%z);")); + assertEquals(10F % 1F, exec("float x = 10; float y = 1; return x%y;")); + assertEquals(0F % 1F, exec("float x = 0; float y = 1; return x%y;")); } public void testFloatConst() throws Exception { - assertEquals(1F%1F, exec("return 1F%1F;")); - assertEquals(2F%3F, exec("return 2F%3F;")); - assertEquals(5F%10F, exec("return 5F%10F;")); - assertEquals(10F%1F%2F, exec("return 10F%1F%2F;")); - assertEquals((10F%1F)%2F, exec("return (10F%1F)%2F;")); - assertEquals(10F%(4F%3F), exec("return 10F%(4F%3F);")); - assertEquals(10F%1F, exec("return 10F%1F;")); - assertEquals(0F%1F, exec("return 0F%1F;")); + assertEquals(1F % 1F, exec("return 1F%1F;")); + assertEquals(2F % 3F, exec("return 2F%3F;")); + assertEquals(5F % 10F, exec("return 5F%10F;")); + assertEquals(10F % 1F % 2F, exec("return 10F%1F%2F;")); + assertEquals((10F % 1F) % 2F, exec("return (10F%1F)%2F;")); + assertEquals(10F % (4F % 3F), exec("return 10F%(4F%3F);")); + assertEquals(10F % 1F, exec("return 10F%1F;")); + assertEquals(0F % 1F, exec("return 0F%1F;")); } public void testDouble() throws Exception { - assertEquals(1.0%1.0, exec("double x = 1; double y = 1; return x%y;")); - assertEquals(2.0%3.0, exec("double x = 2; double y = 3; return x%y;")); - assertEquals(5.0%10.0, exec("double x = 5; double y = 10; return x%y;")); - assertEquals(10.0%1.0%2.0, exec("double x = 10; double y = 1; double z = 2; return x%y%z;")); - assertEquals((10.0%1.0)%2.0, exec("double x = 10; double y = 1; double z = 2; return (x%y)%z;")); - assertEquals(10.0%(4.0%3.0), exec("double x = 10; double y = 4; double z = 3; return x%(y%z);")); - assertEquals(10.0%1.0, exec("double x = 10; double y = 1; return x%y;")); - assertEquals(0.0%1.0, exec("double x = 0; double y = 1; return x%y;")); + assertEquals(1.0 % 1.0, exec("double x = 1; double y = 1; return x%y;")); + assertEquals(2.0 % 3.0, exec("double x = 2; double y = 3; return x%y;")); + assertEquals(5.0 % 10.0, exec("double x = 5; double y = 10; return x%y;")); + assertEquals(10.0 % 1.0 % 2.0, exec("double x = 10; double y = 1; double z = 2; return x%y%z;")); + assertEquals((10.0 % 1.0) % 2.0, exec("double x = 10; double y = 1; double z = 2; return (x%y)%z;")); + assertEquals(10.0 % (4.0 % 3.0), exec("double x = 10; double y = 4; double z = 3; return x%(y%z);")); + assertEquals(10.0 % 1.0, exec("double x = 10; double y = 1; return x%y;")); + assertEquals(0.0 % 1.0, exec("double x = 0; double y = 1; return x%y;")); } public void testDoubleConst() throws Exception { - assertEquals(1.0%1.0, exec("return 1.0%1.0;")); - assertEquals(2.0%3.0, exec("return 2.0%3.0;")); - assertEquals(5.0%10.0, exec("return 5.0%10.0;")); - assertEquals(10.0%1.0%2.0, exec("return 10.0%1.0%2.0;")); - assertEquals((10.0%1.0)%2.0, exec("return (10.0%1.0)%2.0;")); - assertEquals(10.0%(4.0%3.0), exec("return 10.0%(4.0%3.0);")); - assertEquals(10.0%1.0, exec("return 10.0%1.0;")); - assertEquals(0.0%1.0, exec("return 0.0%1.0;")); + assertEquals(1.0 % 1.0, exec("return 1.0%1.0;")); + assertEquals(2.0 % 3.0, exec("return 2.0%3.0;")); + assertEquals(5.0 % 10.0, exec("return 5.0%10.0;")); + assertEquals(10.0 % 1.0 % 2.0, exec("return 10.0%1.0%2.0;")); + assertEquals((10.0 % 1.0) % 2.0, exec("return (10.0%1.0)%2.0;")); + assertEquals(10.0 % (4.0 % 3.0), exec("return 10.0%(4.0%3.0);")); + assertEquals(10.0 % 1.0, exec("return 10.0%1.0;")); + assertEquals(0.0 % 1.0, exec("return 0.0%1.0;")); } public void testDivideByZero() throws Exception { - expectScriptThrows(ArithmeticException.class, () -> { - exec("int x = 1; int y = 0; return x % y;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("int x = 1; int y = 0; return x % y;"); }); - expectScriptThrows(ArithmeticException.class, () -> { - exec("long x = 1L; long y = 0L; return x % y;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("long x = 1L; long y = 0L; return x % y;"); }); } public void testDivideByZeroConst() throws Exception { - expectScriptThrows(ArithmeticException.class, () -> { - exec("return 1%0;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("return 1%0;"); }); - expectScriptThrows(ArithmeticException.class, () -> { - exec("return 1L%0L;"); - }); + expectScriptThrows(ArithmeticException.class, () -> { exec("return 1L%0L;"); }); } public void testDef() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptEngineTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptEngineTests.java index 0f4d18855ae..bb011aca8d5 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptEngineTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptEngineTests.java @@ -40,7 +40,7 @@ public class ScriptEngineTests extends ScriptTestCase { public void testSimpleEquation() { final Object value = exec("return 1 + 2;"); - assertEquals(3, ((Number)value).intValue()); + assertEquals(3, ((Number) value).intValue()); } @SuppressWarnings("unchecked") // We know its Map because we put them there in the test @@ -55,7 +55,7 @@ public class ScriptEngineTests extends ScriptTestCase { vars.put("obj1", obj1); Object value = exec("return params['obj1'];", vars, true); - obj1 = (Map)value; + obj1 = (Map) value; assertEquals("value1", obj1.get("prop1")); assertEquals("value2", ((Map) obj1.get("obj2")).get("prop2")); @@ -77,9 +77,9 @@ public class ScriptEngineTests extends ScriptTestCase { assertEquals("1", exec("return params.l.0;", vars, true)); Object value = exec("return params.l.3;", vars, true); - obj1 = (Map)value; + obj1 = (Map) value; assertEquals("value1", obj1.get("prop1")); - assertEquals("value2", ((Map)obj1.get("obj2")).get("prop2")); + assertEquals("value2", ((Map) obj1.get("obj2")).get("prop2")); assertEquals("value1", exec("return params.l.3.prop1;", vars, true)); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java index 2726bdcd153..488c01c6d1a 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptTestCase.java @@ -83,7 +83,7 @@ public abstract class ScriptTestCase extends OpenSearchTestCase { */ public static final void assertBytecodeExists(String script, String bytecode) { final String asm = Debugger.toString(script); - assertTrue("bytecode not found, got: \n" + asm , asm.contains(bytecode)); + assertTrue("bytecode not found, got: \n" + asm, asm.contains(bytecode)); } /** @@ -92,7 +92,7 @@ public abstract class ScriptTestCase extends OpenSearchTestCase { */ public static final void assertBytecodeHasPattern(String script, String pattern) { final String asm = Debugger.toString(script); - assertTrue("bytecode not found, got: \n" + asm , asm.matches(pattern)); + assertTrue("bytecode not found, got: \n" + asm, asm.matches(pattern)); } /** Checks a specific exception class is thrown (boxed inside ScriptException) and returns it. */ @@ -101,8 +101,11 @@ public abstract class ScriptTestCase extends OpenSearchTestCase { } /** Checks a specific exception class is thrown (boxed inside ScriptException) and returns it. */ - public static final T expectScriptThrows(Class expectedType, boolean shouldHaveScriptStack, - ThrowingRunnable runnable) { + public static final T expectScriptThrows( + Class expectedType, + boolean shouldHaveScriptStack, + ThrowingRunnable runnable + ) { try { runnable.run(); } catch (Throwable e) { @@ -130,8 +133,9 @@ public abstract class ScriptTestCase extends OpenSearchTestCase { assertion.initCause(e); throw assertion; } - AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " - + expectedType.getSimpleName()); + AssertionFailedError assertion = new AssertionFailedError( + "Unexpected exception type, expected " + expectedType.getSimpleName() + ); assertion.initCause(e); throw assertion; } @@ -166,13 +170,13 @@ public abstract class ScriptTestCase extends OpenSearchTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} */ public final Object exec(String script, Map vars, boolean picky) { - Map compilerSettings = new HashMap<>(); + Map compilerSettings = new HashMap<>(); compilerSettings.put(CompilerSettings.INITIAL_CALL_SITE_DEPTH, random().nextBoolean() ? "0" : "10"); return exec(script, vars, compilerSettings, picky); } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ - public final Object exec(String script, Map vars, Map compileParams, boolean picky) { + public final Object exec(String script, Map vars, Map compileParams, boolean picky) { // test for ambiguity errors before running the actual script if picky is true if (picky) { CompilerSettings pickySettings = new CompilerSettings(); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptedMetricAggContextsTests.java index bfc9e9fa8ca..af28a3f850d 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ScriptedMetricAggContextsTests.java @@ -79,8 +79,12 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { } public void testInitBasic() { - ScriptedMetricAggContexts.InitScript.Factory factory = getEngine().compile("test", - "state.testField = params.initialVal", ScriptedMetricAggContexts.InitScript.CONTEXT, Collections.emptyMap()); + ScriptedMetricAggContexts.InitScript.Factory factory = getEngine().compile( + "test", + "state.testField = params.initialVal", + ScriptedMetricAggContexts.InitScript.CONTEXT, + Collections.emptyMap() + ); Map params = new HashMap<>(); Map state = new HashMap<>(); @@ -90,23 +94,31 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { ScriptedMetricAggContexts.InitScript script = factory.newInstance(params, state); script.execute(); - assert(state.containsKey("testField")); + assert (state.containsKey("testField")); assertEquals(10, state.get("testField")); } public void testMapBasic() throws IOException { - ScriptedMetricAggContexts.MapScript.Factory factory = getEngine().compile("test", - "state.testField = 2*_score", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + ScriptedMetricAggContexts.MapScript.Factory factory = getEngine().compile( + "test", + "state.testField = 2*_score", + ScriptedMetricAggContexts.MapScript.CONTEXT, + Collections.emptyMap() + ); Map params = new HashMap<>(); Map state = new HashMap<>(); Scorable scorer = new Scorable() { @Override - public int docID() { return 0; } + public int docID() { + return 0; + } @Override - public float score() { return 0.5f; } + public float score() { + return 0.5f; + } }; ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null); @@ -115,13 +127,17 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { script.setScorer(scorer); script.execute(); - assert(state.containsKey("testField")); + assert (state.containsKey("testField")); assertEquals(1.0, state.get("testField")); } public void testReturnSource() throws IOException { - ScriptedMetricAggContexts.MapScript.Factory factory = getEngine().compile("test", - "state._source = params._source", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + ScriptedMetricAggContexts.MapScript.Factory factory = getEngine().compile( + "test", + "state._source = params._source", + ScriptedMetricAggContexts.MapScript.CONTEXT, + Collections.emptyMap() + ); Map params = new HashMap<>(); Map state = new HashMap<>(); @@ -142,13 +158,17 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { script.execute(); assertTrue(state.containsKey("_source")); - assertTrue(state.get("_source") instanceof Map && ((Map)state.get("_source")).containsKey("test")); - assertEquals(1, ((Map)state.get("_source")).get("test")); + assertTrue(state.get("_source") instanceof Map && ((Map) state.get("_source")).containsKey("test")); + assertEquals(1, ((Map) state.get("_source")).get("test")); } public void testMapSourceAccess() throws IOException { - ScriptedMetricAggContexts.MapScript.Factory factory = getEngine().compile("test", - "state.testField = params._source.three", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + ScriptedMetricAggContexts.MapScript.Factory factory = getEngine().compile( + "test", + "state.testField = params._source.three", + ScriptedMetricAggContexts.MapScript.CONTEXT, + Collections.emptyMap() + ); Map params = new HashMap<>(); Map state = new HashMap<>(); @@ -173,9 +193,12 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { } public void testCombineBasic() { - ScriptedMetricAggContexts.CombineScript.Factory factory = getEngine().compile("test", - "state.testField = params.initialVal; return state.testField + params.inc", ScriptedMetricAggContexts.CombineScript.CONTEXT, - Collections.emptyMap()); + ScriptedMetricAggContexts.CombineScript.Factory factory = getEngine().compile( + "test", + "state.testField = params.initialVal; return state.testField + params.inc", + ScriptedMetricAggContexts.CombineScript.CONTEXT, + Collections.emptyMap() + ); Map params = new HashMap<>(); Map state = new HashMap<>(); @@ -186,14 +209,18 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { ScriptedMetricAggContexts.CombineScript script = factory.newInstance(params, state); Object res = script.execute(); - assert(state.containsKey("testField")); + assert (state.containsKey("testField")); assertEquals(10, state.get("testField")); assertEquals(12, res); } public void testReduceBasic() { - ScriptedMetricAggContexts.ReduceScript.Factory factory = getEngine().compile("test", - "states[0].testField + states[1].testField", ScriptedMetricAggContexts.ReduceScript.CONTEXT, Collections.emptyMap()); + ScriptedMetricAggContexts.ReduceScript.Factory factory = getEngine().compile( + "test", + "states[0].testField + states[1].testField", + ScriptedMetricAggContexts.ReduceScript.CONTEXT, + Collections.emptyMap() + ); Map params = new HashMap<>(); List states = new ArrayList<>(); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ShiftTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ShiftTests.java index ea773cc391a..825bc5137c3 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ShiftTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ShiftTests.java @@ -63,33 +63,17 @@ public class ShiftTests extends ScriptTestCase { } public void testBogusShifts() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("long x = 1L; float y = 2; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("int x = 1; double y = 2L; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("float x = 1F; int y = 2; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("double x = 1D; int y = 2L; return x << y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 1L; float y = 2; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 1; double y = 2L; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 1F; int y = 2; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 1D; int y = 2L; return x << y;"); }); } public void testBogusShiftsConst() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("return 1L << 2F;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("return 1L << 2.0;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("return 1F << 2;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("return 1D << 2L"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("return 1L << 2F;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("return 1L << 2.0;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("return 1F << 2;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("return 1D << 2L"); }); } public void testLshDef() { @@ -435,126 +419,54 @@ public class ShiftTests extends ScriptTestCase { } public void testBogusDefShifts() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1L; def y = 2F; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1; def y = 2D; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1F; def y = 2; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1D; def y = 2L; return x << y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1L; def y = 2F; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1; def y = 2D; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1F; def y = 2; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1D; def y = 2L; return x << y;"); }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1L; def y = 2F; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1; def y = 2D; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1F; def y = 2; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1D; def y = 2L; return x >> y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1L; def y = 2F; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1; def y = 2D; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1F; def y = 2; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1D; def y = 2L; return x >> y;"); }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1L; def y = 2F; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1; def y = 2D; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1F; def y = 2; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1D; def y = 2L; return x >>> y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1L; def y = 2F; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1; def y = 2D; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1F; def y = 2; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1D; def y = 2L; return x >>> y;"); }); } public void testBogusDefShiftsTypedLHS() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("long x = 1L; def y = 2F; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("int x = 1; def y = 2D; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("float x = 1F; def y = 2; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("double x = 1D; def y = 2L; return x << y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 1L; def y = 2F; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 1; def y = 2D; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 1F; def y = 2; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 1D; def y = 2L; return x << y;"); }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("long x = 1L; def y = 2F; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("int x = 1; def y = 2D; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("float x = 1F; def y = 2; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("double x = 1D; def y = 2L; return x >> y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 1L; def y = 2F; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 1; def y = 2D; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 1F; def y = 2; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 1D; def y = 2L; return x >> y;"); }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("long x = 1L; def y = 2F; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("int x = 1; def y = 2D; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("float x = 1F; def y = 2; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("double x = 1D; def y = 2L; return x >>> y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 1L; def y = 2F; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 1; def y = 2D; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 1F; def y = 2; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 1D; def y = 2L; return x >>> y;"); }); } public void testBogusDefShiftsTypedRHS() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1L; float y = 2F; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1; double y = 2D; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1F; int y = 2; return x << y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1D; long y = 2L; return x << y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1L; float y = 2F; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1; double y = 2D; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1F; int y = 2; return x << y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1D; long y = 2L; return x << y;"); }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1L; float y = 2F; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1; double y = 2D; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1F; int y = 2; return x >> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1D; long y = 2L; return x >> y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1L; float y = 2F; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1; double y = 2D; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1F; int y = 2; return x >> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1D; long y = 2L; return x >> y;"); }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1L; float y = 2F; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1; double y = 2D; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1F; int y = 2; return x >>> y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1D; long y = 2L; return x >>> y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1L; float y = 2F; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1; double y = 2D; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1F; int y = 2; return x >>> y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1D; long y = 2L; return x >>> y;"); }); } public void testLshCompoundAssignment() { @@ -618,47 +530,23 @@ public class ShiftTests extends ScriptTestCase { } public void testBogusCompoundAssignment() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("long x = 1L; float y = 2; x <<= y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("int x = 1; double y = 2L; x <<= y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("float x = 1F; int y = 2; x <<= y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("double x = 1D; int y = 2L; x <<= y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("long x = 1L; float y = 2; x <<= y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 1; double y = 2L; x <<= y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 1F; int y = 2; x <<= y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 1D; int y = 2L; x <<= y;"); }); } public void testBogusCompoundAssignmentConst() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("int x = 1L; x <<= 2F;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("int x = 1L; x <<= 2.0;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("float x = 1F; x <<= 2;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("double x = 1D; x <<= 2L;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 1L; x <<= 2F;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 1L; x <<= 2.0;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 1F; x <<= 2;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 1D; x <<= 2L;"); }); } public void testBogusCompoundAssignmentDef() { - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1L; float y = 2; x <<= y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("def x = 1; double y = 2L; x <<= y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("float x = 1F; def y = 2; x <<= y;"); - }); - expectScriptThrows(ClassCastException.class, ()-> { - exec("double x = 1D; def y = 2L; x <<= y;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1L; float y = 2; x <<= y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 1; double y = 2L; x <<= y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 1F; def y = 2; x <<= y;"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 1D; def y = 2L; x <<= y;"); }); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/SimilarityScriptTests.java index d8bdc478fcb..9b404853cf7 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/SimilarityScriptTests.java @@ -87,7 +87,11 @@ public class SimilarityScriptTests extends ScriptTestCase { public void testBasics() throws IOException { SimilarityScript.Factory factory = getEngine().compile( - "foobar", "return query.boost * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap()); + "foobar", + "return query.boost * doc.freq / doc.length", + SimilarityScript.CONTEXT, + Collections.emptyMap() + ); ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", factory::newInstance, true); Directory dir = new ByteBuffersDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); @@ -111,10 +115,12 @@ public class SimilarityScriptTests extends ScriptTestCase { w.close(); IndexSearcher searcher = new IndexSearcher(r); searcher.setSimilarity(sim); - Query query = new BoostQuery(new BooleanQuery.Builder() - .add(new TermQuery(new Term("f", "foo")), Occur.SHOULD) + Query query = new BoostQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("f", "foo")), Occur.SHOULD) .add(new TermQuery(new Term("match", "yes")), Occur.FILTER) - .build(), 3.2f); + .build(), + 3.2f + ); TopDocs topDocs = searcher.search(query, 1); assertEquals(1, topDocs.totalHits.value); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); @@ -124,9 +130,17 @@ public class SimilarityScriptTests extends ScriptTestCase { public void testWeightScript() throws IOException { SimilarityWeightScript.Factory weightFactory = getEngine().compile( - "foobar", "return query.boost", SimilarityWeightScript.CONTEXT, Collections.emptyMap()); + "foobar", + "return query.boost", + SimilarityWeightScript.CONTEXT, + Collections.emptyMap() + ); SimilarityScript.Factory factory = getEngine().compile( - "foobar", "return weight * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap()); + "foobar", + "return weight * doc.freq / doc.length", + SimilarityScript.CONTEXT, + Collections.emptyMap() + ); ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightFactory::newInstance, "foobaz", factory::newInstance, true); Directory dir = new ByteBuffersDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); @@ -150,10 +164,12 @@ public class SimilarityScriptTests extends ScriptTestCase { w.close(); IndexSearcher searcher = new IndexSearcher(r); searcher.setSimilarity(sim); - Query query = new BoostQuery(new BooleanQuery.Builder() - .add(new TermQuery(new Term("f", "foo")), Occur.SHOULD) + Query query = new BoostQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("f", "foo")), Occur.SHOULD) .add(new TermQuery(new Term("match", "yes")), Occur.FILTER) - .build(), 3.2f); + .build(), + 3.2f + ); TopDocs topDocs = searcher.search(query, 1); assertEquals(1, topDocs.totalHits.value); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/StringTests.java index 840559fc4ab..d344a9c2a31 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/StringTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/StringTests.java @@ -47,12 +47,12 @@ public class StringTests extends ScriptTestCase { // boolean assertEquals("cat" + true, exec("String s = \"cat\"; return s + true;")); // byte - assertEquals("cat" + (byte)3, exec("String s = \"cat\"; return s + (byte)3;")); + assertEquals("cat" + (byte) 3, exec("String s = \"cat\"; return s + (byte)3;")); // short - assertEquals("cat" + (short)3, exec("String s = \"cat\"; return s + (short)3;")); + assertEquals("cat" + (short) 3, exec("String s = \"cat\"; return s + (short)3;")); // char assertEquals("cat" + 't', exec("String s = \"cat\"; return s + 't';")); - assertEquals("cat" + (char)40, exec("String s = \"cat\"; return s + (char)40;")); + assertEquals("cat" + (char) 40, exec("String s = \"cat\"; return s + (char)40;")); // int assertEquals("cat" + 2, exec("String s = \"cat\"; return s + 2;")); // long @@ -67,12 +67,12 @@ public class StringTests extends ScriptTestCase { // boolean assertEquals("cat" + true, exec("String s = 'cat'; return s + true;")); // byte - assertEquals("cat" + (byte)3, exec("String s = 'cat'; return s + (byte)3;")); + assertEquals("cat" + (byte) 3, exec("String s = 'cat'; return s + (byte)3;")); // short - assertEquals("cat" + (short)3, exec("String s = 'cat'; return s + (short)3;")); + assertEquals("cat" + (short) 3, exec("String s = 'cat'; return s + (short)3;")); // char assertEquals("cat" + 't', exec("String s = 'cat'; return s + 't';")); - assertEquals("cat" + (char)40, exec("String s = 'cat'; return s + (char)40;")); + assertEquals("cat" + (char) 40, exec("String s = 'cat'; return s + (char)40;")); // int assertEquals("cat" + 2, exec("String s = 'cat'; return s + 2;")); // long @@ -104,8 +104,10 @@ public class StringTests extends ScriptTestCase { result.append(s); } final String s = script.toString(); - assertTrue("every string part should be separately pushed to stack.", - Debugger.toString(s).contains(String.format(Locale.ROOT, "LDC \"%03d\"", count/2))); + assertTrue( + "every string part should be separately pushed to stack.", + Debugger.toString(s).contains(String.format(Locale.ROOT, "LDC \"%03d\"", count / 2)) + ); assertEquals(result.toString(), exec(s)); } @@ -126,7 +128,7 @@ public class StringTests extends ScriptTestCase { assertEquals("cdcde", exec("String t = \"abcde\"; return t.replace(\"ab\", \"cd\");")); assertEquals(false, exec("String s = \"xy\"; return s.startsWith(\"y\");")); assertEquals("e", exec("String t = \"abcde\"; return t.substring(4, 5);")); - assertEquals(97, ((char[])exec("String s = \"a\"; return s.toCharArray();"))[0]); + assertEquals(97, ((char[]) exec("String s = \"a\"; return s.toCharArray();"))[0]); assertEquals("a", exec("String s = \" a \"; return s.trim();")); assertEquals('x', exec("return \"x\".charAt(0);")); assertEquals(120, exec("return \"x\".codePointAt(0);")); @@ -139,7 +141,7 @@ public class StringTests extends ScriptTestCase { assertEquals("cdcde", exec("return \"abcde\".replace(\"ab\", \"cd\");")); assertEquals(false, exec("return \"xy\".startsWith(\"y\");")); assertEquals("e", exec("return \"abcde\".substring(4, 5);")); - assertEquals(97, ((char[])exec("return \"a\".toCharArray();"))[0]); + assertEquals(97, ((char[]) exec("return \"a\".toCharArray();"))[0]); assertEquals("a", exec("return \" a \".trim();")); assertEquals("", exec("return new String();")); @@ -154,7 +156,7 @@ public class StringTests extends ScriptTestCase { assertEquals("cdcde", exec("String t = 'abcde'; return t.replace('ab', 'cd');")); assertEquals(false, exec("String s = 'xy'; return s.startsWith('y');")); assertEquals("e", exec("String t = 'abcde'; return t.substring(4, 5);")); - assertEquals(97, ((char[])exec("String s = 'a'; return s.toCharArray();"))[0]); + assertEquals(97, ((char[]) exec("String s = 'a'; return s.toCharArray();"))[0]); assertEquals("a", exec("String s = ' a '; return s.trim();")); assertEquals('x', exec("return 'x'.charAt(0);")); assertEquals(120, exec("return 'x'.codePointAt(0);")); @@ -167,7 +169,7 @@ public class StringTests extends ScriptTestCase { assertEquals("cdcde", exec("return 'abcde'.replace('ab', 'cd');")); assertEquals(false, exec("return 'xy'.startsWith('y');")); assertEquals("e", exec("return 'abcde'.substring(4, 5);")); - assertEquals(97, ((char[])exec("return 'a'.toCharArray();"))[0]); + assertEquals(97, ((char[]) exec("return 'a'.toCharArray();"))[0]); assertEquals("a", exec("return ' a '.trim();")); } @@ -180,54 +182,48 @@ public class StringTests extends ScriptTestCase { assertEquals('c', exec("String s = \"c\"; (char)s")); assertEquals('c', exec("String s = 'c'; (char)s")); - ClassCastException expected = expectScriptThrows(ClassCastException.class, false, () -> { - assertEquals("cc", exec("return (String)(char)\"cc\"")); - }); + ClassCastException expected = expectScriptThrows( + ClassCastException.class, + false, + () -> { assertEquals("cc", exec("return (String)(char)\"cc\"")); } + ); assertTrue(expected.getMessage().contains("cannot cast java.lang.String with length not equal to one to char")); - expected = expectScriptThrows(ClassCastException.class, false, () -> { - assertEquals("cc", exec("return (String)(char)'cc'")); - }); + expected = expectScriptThrows(ClassCastException.class, false, () -> { assertEquals("cc", exec("return (String)(char)'cc'")); }); assertTrue(expected.getMessage().contains("cannot cast java.lang.String with length not equal to one to char")); - expected = expectScriptThrows(ClassCastException.class, () -> { - assertEquals('c', exec("String s = \"cc\"; (char)s")); - }); + expected = expectScriptThrows(ClassCastException.class, () -> { assertEquals('c', exec("String s = \"cc\"; (char)s")); }); assertTrue(expected.getMessage().contains("cannot cast java.lang.String with length not equal to one to char")); - expected = expectScriptThrows(ClassCastException.class, () -> { - assertEquals('c', exec("String s = 'cc'; (char)s")); - }); + expected = expectScriptThrows(ClassCastException.class, () -> { assertEquals('c', exec("String s = 'cc'; (char)s")); }); assertTrue(expected.getMessage().contains("cannot cast java.lang.String with length not equal to one to char")); } public void testDefConcat() { - assertEquals("a" + (byte)2, exec("def x = 'a'; def y = (byte)2; return x + y")); - assertEquals("a" + (short)2, exec("def x = 'a'; def y = (short)2; return x + y")); - assertEquals("a" + (char)2, exec("def x = 'a'; def y = (char)2; return x + y")); + assertEquals("a" + (byte) 2, exec("def x = 'a'; def y = (byte)2; return x + y")); + assertEquals("a" + (short) 2, exec("def x = 'a'; def y = (short)2; return x + y")); + assertEquals("a" + (char) 2, exec("def x = 'a'; def y = (char)2; return x + y")); assertEquals("a" + 2, exec("def x = 'a'; def y = (int)2; return x + y")); assertEquals("a" + 2L, exec("def x = 'a'; def y = (long)2; return x + y")); assertEquals("a" + 2F, exec("def x = 'a'; def y = (float)2; return x + y")); assertEquals("a" + 2D, exec("def x = 'a'; def y = (double)2; return x + y")); assertEquals("ab", exec("def x = 'a'; def y = 'b'; return x + y")); - assertEquals((byte)2 + "a", exec("def x = 'a'; def y = (byte)2; return y + x")); - assertEquals((short)2 + "a", exec("def x = 'a'; def y = (short)2; return y + x")); - assertEquals((char)2 + "a", exec("def x = 'a'; def y = (char)2; return y + x")); + assertEquals((byte) 2 + "a", exec("def x = 'a'; def y = (byte)2; return y + x")); + assertEquals((short) 2 + "a", exec("def x = 'a'; def y = (short)2; return y + x")); + assertEquals((char) 2 + "a", exec("def x = 'a'; def y = (char)2; return y + x")); assertEquals(2 + "a", exec("def x = 'a'; def y = (int)2; return y + x")); assertEquals(2L + "a", exec("def x = 'a'; def y = (long)2; return y + x")); assertEquals(2F + "a", exec("def x = 'a'; def y = (float)2; return y + x")); assertEquals(2D + "a", exec("def x = 'a'; def y = (double)2; return y + x")); assertEquals("anull", exec("def x = 'a'; def y = null; return x + y")); assertEquals("nullb", exec("def x = null; def y = 'b'; return x + y")); - expectScriptThrows(NullPointerException.class, () -> { - exec("def x = null; def y = null; return x + y"); - }); + expectScriptThrows(NullPointerException.class, () -> { exec("def x = null; def y = null; return x + y"); }); } public void testDefCompoundAssignment() { - assertEquals("a" + (byte)2, exec("def x = 'a'; x += (byte)2; return x")); - assertEquals("a" + (short)2, exec("def x = 'a'; x += (short)2; return x")); - assertEquals("a" + (char)2, exec("def x = 'a'; x += (char)2; return x")); + assertEquals("a" + (byte) 2, exec("def x = 'a'; x += (byte)2; return x")); + assertEquals("a" + (short) 2, exec("def x = 'a'; x += (short)2; return x")); + assertEquals("a" + (char) 2, exec("def x = 'a'; x += (char)2; return x")); assertEquals("a" + 2, exec("def x = 'a'; x += (int)2; return x")); assertEquals("a" + 2L, exec("def x = 'a'; x += (long)2; return x")); assertEquals("a" + 2F, exec("def x = 'a'; x += (float)2; return x")); @@ -235,9 +231,7 @@ public class StringTests extends ScriptTestCase { assertEquals("ab", exec("def x = 'a'; def y = 'b'; x += y; return x")); assertEquals("anull", exec("def x = 'a'; x += null; return x")); assertEquals("nullb", exec("def x = null; x += 'b'; return x")); - expectScriptThrows(NullPointerException.class, () -> { - exec("def x = null; def y = null; x += y"); - }); + expectScriptThrows(NullPointerException.class, () -> { exec("def x = null; def y = null; x += y"); }); } public void testComplexCompoundAssignment() { @@ -268,15 +262,19 @@ public class StringTests extends ScriptTestCase { public void testJava9ConstantStringConcatBytecode() { assumeTrue("Needs Java 9 to test indified String concat", Constants.JRE_IS_MINIMUM_JAVA9); assertNotNull(WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE); - assertBytecodeExists("String s = \"cat\"; return s + true + 'abc' + null;", - "INVOKEDYNAMIC concat(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;"); + assertBytecodeExists( + "String s = \"cat\"; return s + true + 'abc' + null;", + "INVOKEDYNAMIC concat(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;" + ); } public void testJava9StringConcatBytecode() { assumeTrue("Needs Java 9 to test indified String concat", Constants.JRE_IS_MINIMUM_JAVA9); assertNotNull(WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE); - assertBytecodeExists("String s = \"cat\"; boolean t = true; Object u = null; return s + t + 'abc' + u;", - "INVOKEDYNAMIC concat(Ljava/lang/String;ZLjava/lang/String;Ljava/lang/Object;)Ljava/lang/String;"); + assertBytecodeExists( + "String s = \"cat\"; boolean t = true; Object u = null; return s + t + 'abc' + u;", + "INVOKEDYNAMIC concat(Ljava/lang/String;ZLjava/lang/String;Ljava/lang/Object;)Ljava/lang/String;" + ); } public void testNullStringConcat() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/SubtractionTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/SubtractionTests.java index 6db580061d2..5df34a1f754 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/SubtractionTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/SubtractionTests.java @@ -33,7 +33,7 @@ package org.opensearch.painless; /** Tests for subtraction operator across all types */ -//TODO: NaN/Inf/overflow/... +// TODO: NaN/Inf/overflow/... public class SubtractionTests extends ScriptTestCase { public void testBasics() throws Exception { @@ -42,157 +42,157 @@ public class SubtractionTests extends ScriptTestCase { } public void testInt() throws Exception { - assertEquals(1-1, exec("int x = 1; int y = 1; return x-y;")); - assertEquals(2-3, exec("int x = 2; int y = 3; return x-y;")); - assertEquals(5-10, exec("int x = 5; int y = 10; return x-y;")); - assertEquals(1-1-2, exec("int x = 1; int y = 1; int z = 2; return x-y-z;")); - assertEquals((1-1)-2, exec("int x = 1; int y = 1; int z = 2; return (x-y)-z;")); - assertEquals(1-(1-2), exec("int x = 1; int y = 1; int z = 2; return x-(y-z);")); - assertEquals(10-0, exec("int x = 10; int y = 0; return x-y;")); - assertEquals(0-0, exec("int x = 0; int y = 0; return x-x;")); + assertEquals(1 - 1, exec("int x = 1; int y = 1; return x-y;")); + assertEquals(2 - 3, exec("int x = 2; int y = 3; return x-y;")); + assertEquals(5 - 10, exec("int x = 5; int y = 10; return x-y;")); + assertEquals(1 - 1 - 2, exec("int x = 1; int y = 1; int z = 2; return x-y-z;")); + assertEquals((1 - 1) - 2, exec("int x = 1; int y = 1; int z = 2; return (x-y)-z;")); + assertEquals(1 - (1 - 2), exec("int x = 1; int y = 1; int z = 2; return x-(y-z);")); + assertEquals(10 - 0, exec("int x = 10; int y = 0; return x-y;")); + assertEquals(0 - 0, exec("int x = 0; int y = 0; return x-x;")); } public void testIntConst() throws Exception { - assertEquals(1-1, exec("return 1-1;")); - assertEquals(2-3, exec("return 2-3;")); - assertEquals(5-10, exec("return 5-10;")); - assertEquals(1-1-2, exec("return 1-1-2;")); - assertEquals((1-1)-2, exec("return (1-1)-2;")); - assertEquals(1-(1-2), exec("return 1-(1-2);")); - assertEquals(10-0, exec("return 10-0;")); - assertEquals(0-0, exec("return 0-0;")); + assertEquals(1 - 1, exec("return 1-1;")); + assertEquals(2 - 3, exec("return 2-3;")); + assertEquals(5 - 10, exec("return 5-10;")); + assertEquals(1 - 1 - 2, exec("return 1-1-2;")); + assertEquals((1 - 1) - 2, exec("return (1-1)-2;")); + assertEquals(1 - (1 - 2), exec("return 1-(1-2);")); + assertEquals(10 - 0, exec("return 10-0;")); + assertEquals(0 - 0, exec("return 0-0;")); } public void testByte() throws Exception { - assertEquals((byte)1-(byte)1, exec("byte x = 1; byte y = 1; return x-y;")); - assertEquals((byte)2-(byte)3, exec("byte x = 2; byte y = 3; return x-y;")); - assertEquals((byte)5-(byte)10, exec("byte x = 5; byte y = 10; return x-y;")); - assertEquals((byte)1-(byte)1-(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return x-y-z;")); - assertEquals(((byte)1-(byte)1)-(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return (x-y)-z;")); - assertEquals((byte)1-((byte)1-(byte)2), exec("byte x = 1; byte y = 1; byte z = 2; return x-(y-z);")); - assertEquals((byte)10-(byte)1, exec("byte x = 10; byte y = 1; return x-y;")); - assertEquals((byte)0-(byte)0, exec("byte x = 0; byte y = 0; return x-y;")); + assertEquals((byte) 1 - (byte) 1, exec("byte x = 1; byte y = 1; return x-y;")); + assertEquals((byte) 2 - (byte) 3, exec("byte x = 2; byte y = 3; return x-y;")); + assertEquals((byte) 5 - (byte) 10, exec("byte x = 5; byte y = 10; return x-y;")); + assertEquals((byte) 1 - (byte) 1 - (byte) 2, exec("byte x = 1; byte y = 1; byte z = 2; return x-y-z;")); + assertEquals(((byte) 1 - (byte) 1) - (byte) 2, exec("byte x = 1; byte y = 1; byte z = 2; return (x-y)-z;")); + assertEquals((byte) 1 - ((byte) 1 - (byte) 2), exec("byte x = 1; byte y = 1; byte z = 2; return x-(y-z);")); + assertEquals((byte) 10 - (byte) 1, exec("byte x = 10; byte y = 1; return x-y;")); + assertEquals((byte) 0 - (byte) 0, exec("byte x = 0; byte y = 0; return x-y;")); } public void testByteConst() throws Exception { - assertEquals((byte)1-(byte)1, exec("return (byte)1-(byte)1;")); - assertEquals((byte)2-(byte)3, exec("return (byte)2-(byte)3;")); - assertEquals((byte)5-(byte)10, exec("return (byte)5-(byte)10;")); - assertEquals((byte)1-(byte)1-(byte)2, exec("return (byte)1-(byte)1-(byte)2;")); - assertEquals(((byte)1-(byte)1)-(byte)2, exec("return ((byte)1-(byte)1)-(byte)2;")); - assertEquals((byte)1-((byte)1-(byte)2), exec("return (byte)1-((byte)1-(byte)2);")); - assertEquals((byte)10-(byte)1, exec("return (byte)10-(byte)1;")); - assertEquals((byte)0-(byte)0, exec("return (byte)0-(byte)0;")); + assertEquals((byte) 1 - (byte) 1, exec("return (byte)1-(byte)1;")); + assertEquals((byte) 2 - (byte) 3, exec("return (byte)2-(byte)3;")); + assertEquals((byte) 5 - (byte) 10, exec("return (byte)5-(byte)10;")); + assertEquals((byte) 1 - (byte) 1 - (byte) 2, exec("return (byte)1-(byte)1-(byte)2;")); + assertEquals(((byte) 1 - (byte) 1) - (byte) 2, exec("return ((byte)1-(byte)1)-(byte)2;")); + assertEquals((byte) 1 - ((byte) 1 - (byte) 2), exec("return (byte)1-((byte)1-(byte)2);")); + assertEquals((byte) 10 - (byte) 1, exec("return (byte)10-(byte)1;")); + assertEquals((byte) 0 - (byte) 0, exec("return (byte)0-(byte)0;")); } public void testChar() throws Exception { - assertEquals((char)1-(char)1, exec("char x = 1; char y = 1; return x-y;")); - assertEquals((char)2-(char)3, exec("char x = 2; char y = 3; return x-y;")); - assertEquals((char)5-(char)10, exec("char x = 5; char y = 10; return x-y;")); - assertEquals((char)1-(char)1-(char)2, exec("char x = 1; char y = 1; char z = 2; return x-y-z;")); - assertEquals(((char)1-(char)1)-(char)2, exec("char x = 1; char y = 1; char z = 2; return (x-y)-z;")); - assertEquals((char)1-((char)1-(char)2), exec("char x = 1; char y = 1; char z = 2; return x-(y-z);")); - assertEquals((char)10-(char)1, exec("char x = 10; char y = 1; return x-y;")); - assertEquals((char)0-(char)0, exec("char x = 0; char y = 0; return x-y;")); + assertEquals((char) 1 - (char) 1, exec("char x = 1; char y = 1; return x-y;")); + assertEquals((char) 2 - (char) 3, exec("char x = 2; char y = 3; return x-y;")); + assertEquals((char) 5 - (char) 10, exec("char x = 5; char y = 10; return x-y;")); + assertEquals((char) 1 - (char) 1 - (char) 2, exec("char x = 1; char y = 1; char z = 2; return x-y-z;")); + assertEquals(((char) 1 - (char) 1) - (char) 2, exec("char x = 1; char y = 1; char z = 2; return (x-y)-z;")); + assertEquals((char) 1 - ((char) 1 - (char) 2), exec("char x = 1; char y = 1; char z = 2; return x-(y-z);")); + assertEquals((char) 10 - (char) 1, exec("char x = 10; char y = 1; return x-y;")); + assertEquals((char) 0 - (char) 0, exec("char x = 0; char y = 0; return x-y;")); } public void testCharConst() throws Exception { - assertEquals((char)1-(char)1, exec("return (char)1-(char)1;")); - assertEquals((char)2-(char)3, exec("return (char)2-(char)3;")); - assertEquals((char)5-(char)10, exec("return (char)5-(char)10;")); - assertEquals((char)1-(char)1-(char)2, exec("return (char)1-(char)1-(char)2;")); - assertEquals(((char)1-(char)1)-(char)2, exec("return ((char)1-(char)1)-(char)2;")); - assertEquals((char)1-((char)1-(char)2), exec("return (char)1-((char)1-(char)2);")); - assertEquals((char)10-(char)1, exec("return (char)10-(char)1;")); - assertEquals((char)0-(char)0, exec("return (char)0-(char)0;")); + assertEquals((char) 1 - (char) 1, exec("return (char)1-(char)1;")); + assertEquals((char) 2 - (char) 3, exec("return (char)2-(char)3;")); + assertEquals((char) 5 - (char) 10, exec("return (char)5-(char)10;")); + assertEquals((char) 1 - (char) 1 - (char) 2, exec("return (char)1-(char)1-(char)2;")); + assertEquals(((char) 1 - (char) 1) - (char) 2, exec("return ((char)1-(char)1)-(char)2;")); + assertEquals((char) 1 - ((char) 1 - (char) 2), exec("return (char)1-((char)1-(char)2);")); + assertEquals((char) 10 - (char) 1, exec("return (char)10-(char)1;")); + assertEquals((char) 0 - (char) 0, exec("return (char)0-(char)0;")); } public void testShort() throws Exception { - assertEquals((short)1-(short)1, exec("short x = 1; short y = 1; return x-y;")); - assertEquals((short)2-(short)3, exec("short x = 2; short y = 3; return x-y;")); - assertEquals((short)5-(short)10, exec("short x = 5; short y = 10; return x-y;")); - assertEquals((short)1-(short)1-(short)2, exec("short x = 1; short y = 1; short z = 2; return x-y-z;")); - assertEquals(((short)1-(short)1)-(short)2, exec("short x = 1; short y = 1; short z = 2; return (x-y)-z;")); - assertEquals((short)1-((short)1-(short)2), exec("short x = 1; short y = 1; short z = 2; return x-(y-z);")); - assertEquals((short)10-(short)1, exec("short x = 10; short y = 1; return x-y;")); - assertEquals((short)0-(short)0, exec("short x = 0; short y = 0; return x-y;")); + assertEquals((short) 1 - (short) 1, exec("short x = 1; short y = 1; return x-y;")); + assertEquals((short) 2 - (short) 3, exec("short x = 2; short y = 3; return x-y;")); + assertEquals((short) 5 - (short) 10, exec("short x = 5; short y = 10; return x-y;")); + assertEquals((short) 1 - (short) 1 - (short) 2, exec("short x = 1; short y = 1; short z = 2; return x-y-z;")); + assertEquals(((short) 1 - (short) 1) - (short) 2, exec("short x = 1; short y = 1; short z = 2; return (x-y)-z;")); + assertEquals((short) 1 - ((short) 1 - (short) 2), exec("short x = 1; short y = 1; short z = 2; return x-(y-z);")); + assertEquals((short) 10 - (short) 1, exec("short x = 10; short y = 1; return x-y;")); + assertEquals((short) 0 - (short) 0, exec("short x = 0; short y = 0; return x-y;")); } public void testShortConst() throws Exception { - assertEquals((short)1-(short)1, exec("return (short)1-(short)1;")); - assertEquals((short)2-(short)3, exec("return (short)2-(short)3;")); - assertEquals((short)5-(short)10, exec("return (short)5-(short)10;")); - assertEquals((short)1-(short)1-(short)2, exec("return (short)1-(short)1-(short)2;")); - assertEquals(((short)1-(short)1)-(short)2, exec("return ((short)1-(short)1)-(short)2;")); - assertEquals((short)1-((short)1-(short)2), exec("return (short)1-((short)1-(short)2);")); - assertEquals((short)10-(short)1, exec("return (short)10-(short)1;")); - assertEquals((short)0-(short)0, exec("return (short)0-(short)0;")); + assertEquals((short) 1 - (short) 1, exec("return (short)1-(short)1;")); + assertEquals((short) 2 - (short) 3, exec("return (short)2-(short)3;")); + assertEquals((short) 5 - (short) 10, exec("return (short)5-(short)10;")); + assertEquals((short) 1 - (short) 1 - (short) 2, exec("return (short)1-(short)1-(short)2;")); + assertEquals(((short) 1 - (short) 1) - (short) 2, exec("return ((short)1-(short)1)-(short)2;")); + assertEquals((short) 1 - ((short) 1 - (short) 2), exec("return (short)1-((short)1-(short)2);")); + assertEquals((short) 10 - (short) 1, exec("return (short)10-(short)1;")); + assertEquals((short) 0 - (short) 0, exec("return (short)0-(short)0;")); } public void testLong() throws Exception { - assertEquals(1L-1L, exec("long x = 1; long y = 1; return x-y;")); - assertEquals(2L-3L, exec("long x = 2; long y = 3; return x-y;")); - assertEquals(5L-10L, exec("long x = 5; long y = 10; return x-y;")); - assertEquals(1L-1L-2L, exec("long x = 1; long y = 1; int z = 2; return x-y-z;")); - assertEquals((1L-1L)-2L, exec("long x = 1; long y = 1; int z = 2; return (x-y)-z;")); - assertEquals(1L-(1L-2L), exec("long x = 1; long y = 1; int z = 2; return x-(y-z);")); - assertEquals(10L-0L, exec("long x = 10; long y = 0; return x-y;")); - assertEquals(0L-0L, exec("long x = 0; long y = 0; return x-x;")); + assertEquals(1L - 1L, exec("long x = 1; long y = 1; return x-y;")); + assertEquals(2L - 3L, exec("long x = 2; long y = 3; return x-y;")); + assertEquals(5L - 10L, exec("long x = 5; long y = 10; return x-y;")); + assertEquals(1L - 1L - 2L, exec("long x = 1; long y = 1; int z = 2; return x-y-z;")); + assertEquals((1L - 1L) - 2L, exec("long x = 1; long y = 1; int z = 2; return (x-y)-z;")); + assertEquals(1L - (1L - 2L), exec("long x = 1; long y = 1; int z = 2; return x-(y-z);")); + assertEquals(10L - 0L, exec("long x = 10; long y = 0; return x-y;")); + assertEquals(0L - 0L, exec("long x = 0; long y = 0; return x-x;")); } public void testLongConst() throws Exception { - assertEquals(1L-1L, exec("return 1L-1L;")); - assertEquals(2L-3L, exec("return 2L-3L;")); - assertEquals(5L-10L, exec("return 5L-10L;")); - assertEquals(1L-1L-2L, exec("return 1L-1L-2L;")); - assertEquals((1L-1L)-2L, exec("return (1L-1L)-2L;")); - assertEquals(1L-(1L-2L), exec("return 1L-(1L-2L);")); - assertEquals(10L-0L, exec("return 10L-0L;")); - assertEquals(0L-0L, exec("return 0L-0L;")); + assertEquals(1L - 1L, exec("return 1L-1L;")); + assertEquals(2L - 3L, exec("return 2L-3L;")); + assertEquals(5L - 10L, exec("return 5L-10L;")); + assertEquals(1L - 1L - 2L, exec("return 1L-1L-2L;")); + assertEquals((1L - 1L) - 2L, exec("return (1L-1L)-2L;")); + assertEquals(1L - (1L - 2L), exec("return 1L-(1L-2L);")); + assertEquals(10L - 0L, exec("return 10L-0L;")); + assertEquals(0L - 0L, exec("return 0L-0L;")); } public void testFloat() throws Exception { - assertEquals(1F-1F, exec("float x = 1; float y = 1; return x-y;")); - assertEquals(2F-3F, exec("float x = 2; float y = 3; return x-y;")); - assertEquals(5F-10F, exec("float x = 5; float y = 10; return x-y;")); - assertEquals(1F-1F-2F, exec("float x = 1; float y = 1; float z = 2; return x-y-z;")); - assertEquals((1F-1F)-2F, exec("float x = 1; float y = 1; float z = 2; return (x-y)-z;")); - assertEquals(1F-(1F-2F), exec("float x = 1; float y = 1; float z = 2; return x-(y-z);")); - assertEquals(10F-0F, exec("float x = 10; float y = 0; return x-y;")); - assertEquals(0F-0F, exec("float x = 0; float y = 0; return x-x;")); + assertEquals(1F - 1F, exec("float x = 1; float y = 1; return x-y;")); + assertEquals(2F - 3F, exec("float x = 2; float y = 3; return x-y;")); + assertEquals(5F - 10F, exec("float x = 5; float y = 10; return x-y;")); + assertEquals(1F - 1F - 2F, exec("float x = 1; float y = 1; float z = 2; return x-y-z;")); + assertEquals((1F - 1F) - 2F, exec("float x = 1; float y = 1; float z = 2; return (x-y)-z;")); + assertEquals(1F - (1F - 2F), exec("float x = 1; float y = 1; float z = 2; return x-(y-z);")); + assertEquals(10F - 0F, exec("float x = 10; float y = 0; return x-y;")); + assertEquals(0F - 0F, exec("float x = 0; float y = 0; return x-x;")); } public void testFloatConst() throws Exception { - assertEquals(1F-1F, exec("return 1F-1F;")); - assertEquals(2F-3F, exec("return 2F-3F;")); - assertEquals(5F-10F, exec("return 5F-10F;")); - assertEquals(1F-1F-2F, exec("return 1F-1F-2F;")); - assertEquals((1F-1F)-2F, exec("return (1F-1F)-2F;")); - assertEquals(1F-(1F-2F), exec("return 1F-(1F-2F);")); - assertEquals(10F-0F, exec("return 10F-0F;")); - assertEquals(0F-0F, exec("return 0F-0F;")); + assertEquals(1F - 1F, exec("return 1F-1F;")); + assertEquals(2F - 3F, exec("return 2F-3F;")); + assertEquals(5F - 10F, exec("return 5F-10F;")); + assertEquals(1F - 1F - 2F, exec("return 1F-1F-2F;")); + assertEquals((1F - 1F) - 2F, exec("return (1F-1F)-2F;")); + assertEquals(1F - (1F - 2F), exec("return 1F-(1F-2F);")); + assertEquals(10F - 0F, exec("return 10F-0F;")); + assertEquals(0F - 0F, exec("return 0F-0F;")); } public void testDouble() throws Exception { - assertEquals(1D-1D, exec("double x = 1; double y = 1; return x-y;")); - assertEquals(2D-3D, exec("double x = 2; double y = 3; return x-y;")); - assertEquals(5D-10D, exec("double x = 5; double y = 10; return x-y;")); - assertEquals(1D-1D-2D, exec("double x = 1; double y = 1; double z = 2; return x-y-z;")); - assertEquals((1D-1D)-2D, exec("double x = 1; double y = 1; double z = 2; return (x-y)-z;")); - assertEquals(1D-(1D-2D), exec("double x = 1; double y = 1; double z = 2; return x-(y-z);")); - assertEquals(10D-0D, exec("double x = 10; float y = 0; return x-y;")); - assertEquals(0D-0D, exec("double x = 0; float y = 0; return x-x;")); + assertEquals(1D - 1D, exec("double x = 1; double y = 1; return x-y;")); + assertEquals(2D - 3D, exec("double x = 2; double y = 3; return x-y;")); + assertEquals(5D - 10D, exec("double x = 5; double y = 10; return x-y;")); + assertEquals(1D - 1D - 2D, exec("double x = 1; double y = 1; double z = 2; return x-y-z;")); + assertEquals((1D - 1D) - 2D, exec("double x = 1; double y = 1; double z = 2; return (x-y)-z;")); + assertEquals(1D - (1D - 2D), exec("double x = 1; double y = 1; double z = 2; return x-(y-z);")); + assertEquals(10D - 0D, exec("double x = 10; float y = 0; return x-y;")); + assertEquals(0D - 0D, exec("double x = 0; float y = 0; return x-x;")); } public void testyDoubleConst() throws Exception { - assertEquals(1.0-1.0, exec("return 1.0-1.0;")); - assertEquals(2.0-3.0, exec("return 2.0-3.0;")); - assertEquals(5.0-10.0, exec("return 5.0-10.0;")); - assertEquals(1.0-1.0-2.0, exec("return 1.0-1.0-2.0;")); - assertEquals((1.0-1.0)-2.0, exec("return (1.0-1.0)-2.0;")); - assertEquals(1.0-(1.0-2.0), exec("return 1.0-(1.0-2.0);")); - assertEquals(10.0-0.0, exec("return 10.0-0.0;")); - assertEquals(0.0-0.0, exec("return 0.0-0.0;")); + assertEquals(1.0 - 1.0, exec("return 1.0-1.0;")); + assertEquals(2.0 - 3.0, exec("return 2.0-3.0;")); + assertEquals(5.0 - 10.0, exec("return 5.0-10.0;")); + assertEquals(1.0 - 1.0 - 2.0, exec("return 1.0-1.0-2.0;")); + assertEquals((1.0 - 1.0) - 2.0, exec("return (1.0-1.0)-2.0;")); + assertEquals(1.0 - (1.0 - 2.0), exec("return 1.0-(1.0-2.0);")); + assertEquals(10.0 - 0.0, exec("return 10.0-0.0;")); + assertEquals(0.0 - 0.0, exec("return 0.0-0.0;")); } public void testDef() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/TryCatchTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/TryCatchTests.java index 118eb5059cd..272f349f5e2 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/TryCatchTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/TryCatchTests.java @@ -39,32 +39,44 @@ public class TryCatchTests extends ScriptTestCase { /** throws an exception */ public void testThrow() { - RuntimeException exception = expectScriptThrows(RuntimeException.class, () -> { - exec("throw new RuntimeException('test')"); - }); + RuntimeException exception = expectScriptThrows(RuntimeException.class, () -> { exec("throw new RuntimeException('test')"); }); assertEquals("test", exception.getMessage()); } /** catches the exact exception */ public void testCatch() { - assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + - "catch (RuntimeException e) { return 1; } return 2;", - Collections.singletonMap("param", "true"), true)); + assertEquals( + 1, + exec( + "try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (RuntimeException e) { return 1; } return 2;", + Collections.singletonMap("param", "true"), + true + ) + ); } /** catches superclass of the exception */ public void testCatchSuperclass() { - assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + - "catch (Exception e) { return 1; } return 2;", - Collections.singletonMap("param", "true"), true)); + assertEquals( + 1, + exec( + "try { if (params.param == 'true') throw new RuntimeException('test'); } " + "catch (Exception e) { return 1; } return 2;", + Collections.singletonMap("param", "true"), + true + ) + ); } /** tries to catch a different type of exception */ public void testNoCatch() { RuntimeException exception = expectScriptThrows(RuntimeException.class, () -> { - exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + - "catch (ArithmeticException e) { return 1; } return 2;", - Collections.singletonMap("param", "true"), true); + exec( + "try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (ArithmeticException e) { return 1; } return 2;", + Collections.singletonMap("param", "true"), + true + ); }); assertEquals("test", exception.getMessage()); } @@ -72,43 +84,62 @@ public class TryCatchTests extends ScriptTestCase { public void testNoCatchBlock() { assertEquals(0, exec("try { return Integer.parseInt('f') } catch (NumberFormatException nfe) {} return 0;")); - assertEquals(0, exec("try { return Integer.parseInt('f') } " + - "catch (NumberFormatException nfe) {}" + - "catch (Exception e) {}" + - " return 0;")); + assertEquals( + 0, + exec("try { return Integer.parseInt('f') } " + "catch (NumberFormatException nfe) {}" + "catch (Exception e) {}" + " return 0;") + ); - assertEquals(0, exec("try { throw new IllegalArgumentException('test') } " + - "catch (NumberFormatException nfe) {}" + - "catch (Exception e) {}" + - " return 0;")); + assertEquals( + 0, + exec( + "try { throw new IllegalArgumentException('test') } " + + "catch (NumberFormatException nfe) {}" + + "catch (Exception e) {}" + + " return 0;" + ) + ); - assertEquals(0, exec("try { throw new IllegalArgumentException('test') } " + - "catch (NumberFormatException nfe) {}" + - "catch (IllegalArgumentException iae) {}" + - "catch (Exception e) {}" + - " return 0;")); + assertEquals( + 0, + exec( + "try { throw new IllegalArgumentException('test') } " + + "catch (NumberFormatException nfe) {}" + + "catch (IllegalArgumentException iae) {}" + + "catch (Exception e) {}" + + " return 0;" + ) + ); } public void testMultiCatch() { - assertEquals(1, exec( - "try { return Integer.parseInt('f') } " + - "catch (NumberFormatException nfe) {return 1;} " + - "catch (ArrayIndexOutOfBoundsException aioobe) {return 2;} " + - "catch (Exception e) {return 3;}" - )); + assertEquals( + 1, + exec( + "try { return Integer.parseInt('f') } " + + "catch (NumberFormatException nfe) {return 1;} " + + "catch (ArrayIndexOutOfBoundsException aioobe) {return 2;} " + + "catch (Exception e) {return 3;}" + ) + ); - assertEquals(2, exec( - "try { return new int[] {}[0] } " + - "catch (NumberFormatException nfe) {return 1;} " + - "catch (ArrayIndexOutOfBoundsException aioobe) {return 2;} " + - "catch (Exception e) {return 3;}" - )); + assertEquals( + 2, + exec( + "try { return new int[] {}[0] } " + + "catch (NumberFormatException nfe) {return 1;} " + + "catch (ArrayIndexOutOfBoundsException aioobe) {return 2;} " + + "catch (Exception e) {return 3;}" + ) + ); - assertEquals(3, exec( - "try { throw new IllegalArgumentException('test'); } " + - "catch (NumberFormatException nfe) {return 1;} " + - "catch (ArrayIndexOutOfBoundsException aioobe) {return 2;} " + - "catch (Exception e) {return 3;}" - )); + assertEquals( + 3, + exec( + "try { throw new IllegalArgumentException('test'); } " + + "catch (NumberFormatException nfe) {return 1;} " + + "catch (ArrayIndexOutOfBoundsException aioobe) {return 2;} " + + "catch (Exception e) {return 3;}" + ) + ); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/UnaryTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/UnaryTests.java index 0e85b0a5b81..6a0bce105bb 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/UnaryTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/UnaryTests.java @@ -75,9 +75,9 @@ public class UnaryTests extends ScriptTestCase { } public void testDefNotTypedRet() { - assertEquals((double)~1, exec("def x = (byte)1; double y = ~x; return y;")); - assertEquals((float)~1, exec("def x = (short)1; float y = ~x; return y;")); - assertEquals((long)~1, exec("def x = (char)1; long y = ~x; return y;")); + assertEquals((double) ~1, exec("def x = (byte)1; double y = ~x; return y;")); + assertEquals((float) ~1, exec("def x = (short)1; float y = ~x; return y;")); + assertEquals((long) ~1, exec("def x = (char)1; long y = ~x; return y;")); assertEquals(~1, exec("def x = 1; int y = ~x; return y;")); } @@ -92,9 +92,9 @@ public class UnaryTests extends ScriptTestCase { } public void testDefNegTypedRet() { - assertEquals((double)-1, exec("def x = (byte)1; double y = -x; return y;")); - assertEquals((float)-1, exec("def x = (short)1; float y = -x; return y;")); - assertEquals((long)-1, exec("def x = (char)1; long y = -x; return y;")); + assertEquals((double) -1, exec("def x = (byte)1; double y = -x; return y;")); + assertEquals((float) -1, exec("def x = (short)1; float y = -x; return y;")); + assertEquals((long) -1, exec("def x = (char)1; long y = -x; return y;")); assertEquals(-1, exec("def x = 1; int y = -x; return y;")); } @@ -109,9 +109,9 @@ public class UnaryTests extends ScriptTestCase { } public void testDefPlusTypedRet() { - assertEquals((double)-1, exec("def x = (byte)-1; double y = +x; return y;")); - assertEquals((float)-1, exec("def x = (short)-1; float y = +x; return y;")); - assertEquals((long)65535, exec("def x = (char)-1; long y = +x; return y;")); + assertEquals((double) -1, exec("def x = (byte)-1; double y = +x; return y;")); + assertEquals((float) -1, exec("def x = (short)-1; float y = +x; return y;")); + assertEquals((long) 65535, exec("def x = (char)-1; long y = +x; return y;")); assertEquals(-1, exec("def x = -1; int y = +x; return y;")); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java index edf7269a1b8..d0041b22929 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java @@ -46,12 +46,8 @@ import static org.hamcrest.Matchers.instanceOf; public class WhenThingsGoWrongTests extends ScriptTestCase { public void testNullPointer() { - expectScriptThrows(NullPointerException.class, () -> { - exec("int x = params['missing']; return x;"); - }); - expectScriptThrows(NullPointerException.class, () -> { - exec("Double.parseDouble(params['missing'])"); - }); + expectScriptThrows(NullPointerException.class, () -> { exec("int x = params['missing']; return x;"); }); + expectScriptThrows(NullPointerException.class, () -> { exec("Double.parseDouble(params['missing'])"); }); } /** @@ -59,58 +55,44 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { * numbers are really 1 based character numbers. */ public void testScriptStack() { - for (String type : new String[] {"String", "def "}) { + for (String type : new String[] { "String", "def " }) { // trigger NPE at line 1 of the script - ScriptException exception = expectThrows(ScriptException.class, () -> { - exec(type + " x = null; boolean y = x.isEmpty();\n" + - "return y;"); - }); + ScriptException exception = expectThrows( + ScriptException.class, + () -> { exec(type + " x = null; boolean y = x.isEmpty();\n" + "return y;"); } + ); // null deref at x.isEmpty(), the '.' is offset 30 assertScriptElementColumn(30, exception); - assertScriptStack(exception, - "y = x.isEmpty();\n", - " ^---- HERE"); + assertScriptStack(exception, "y = x.isEmpty();\n", " ^---- HERE"); assertThat(exception.getCause(), instanceOf(NullPointerException.class)); // trigger NPE at line 2 of the script - exception = expectThrows(ScriptException.class, () -> { - exec(type + " x = null;\n" + - "return x.isEmpty();"); - }); + exception = expectThrows(ScriptException.class, () -> { exec(type + " x = null;\n" + "return x.isEmpty();"); }); // null deref at x.isEmpty(), the '.' is offset 25 assertScriptElementColumn(25, exception); - assertScriptStack(exception, - "return x.isEmpty();", - " ^---- HERE"); + assertScriptStack(exception, "return x.isEmpty();", " ^---- HERE"); assertThat(exception.getCause(), instanceOf(NullPointerException.class)); // trigger NPE at line 3 of the script - exception = expectThrows(ScriptException.class, () -> { - exec(type + " x = null;\n" + - type + " y = x;\n" + - "return y.isEmpty();"); - }); + exception = expectThrows( + ScriptException.class, + () -> { exec(type + " x = null;\n" + type + " y = x;\n" + "return y.isEmpty();"); } + ); // null deref at y.isEmpty(), the '.' is offset 39 assertScriptElementColumn(39, exception); - assertScriptStack(exception, - "return y.isEmpty();", - " ^---- HERE"); + assertScriptStack(exception, "return y.isEmpty();", " ^---- HERE"); assertThat(exception.getCause(), instanceOf(NullPointerException.class)); // trigger NPE at line 4 in script (inside conditional) - exception = expectThrows(ScriptException.class, () -> { - exec(type + " x = null;\n" + - "boolean y = false;\n" + - "if (!y) {\n" + - " y = x.isEmpty();\n" + - "}\n" + - "return y;"); - }); + exception = expectThrows( + ScriptException.class, + () -> { + exec(type + " x = null;\n" + "boolean y = false;\n" + "if (!y) {\n" + " y = x.isEmpty();\n" + "}\n" + "return y;"); + } + ); // null deref at x.isEmpty(), the '.' is offset 53 assertScriptElementColumn(53, exception); - assertScriptStack(exception, - "y = x.isEmpty();\n}\n", - " ^---- HERE"); + assertScriptStack(exception, "y = x.isEmpty();\n}\n", " ^---- HERE"); assertThat(exception.getCause(), instanceOf(NullPointerException.class)); } } @@ -120,8 +102,9 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { for (int i = 0; i < stackTrace.length; i++) { if (WriterConstants.CLASS_NAME.equals(stackTrace[i].getClassName())) { if (expectedColumn + 1 != stackTrace[i].getLineNumber()) { - AssertionFailedError assertion = new AssertionFailedError("Expected column to be [" + expectedColumn + "] but was [" - + stackTrace[i].getLineNumber() + "]"); + AssertionFailedError assertion = new AssertionFailedError( + "Expected column to be [" + expectedColumn + "] but was [" + stackTrace[i].getLineNumber() + "]" + ); assertion.initCause(exception); throw assertion; } @@ -132,68 +115,52 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { } public void testInvalidShift() { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = 15F; x <<= 2; return x;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 15F; x <<= 2; return x;"); }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = 15F; x <<= 2; return x;"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 15F; x <<= 2; return x;"); }); } public void testBogusParameter() { - IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { - exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue"), true); - }); + IllegalArgumentException expected = expectThrows( + IllegalArgumentException.class, + () -> { exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue"), true); } + ); assertTrue(expected.getMessage().contains("Unrecognized compile-time parameter")); } public void testInfiniteLoops() { - PainlessError expected = expectScriptThrows(PainlessError.class, () -> { - exec("boolean x = true; while (x) {}"); - }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + PainlessError expected = expectScriptThrows(PainlessError.class, () -> { exec("boolean x = true; while (x) {}"); }); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); - expected = expectScriptThrows(PainlessError.class, () -> { - exec("while (true) {int y = 5;}"); - }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + expected = expectScriptThrows(PainlessError.class, () -> { exec("while (true) {int y = 5;}"); }); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); - expected = expectScriptThrows(PainlessError.class, () -> { - exec("while (true) { boolean x = true; while (x) {} }"); - }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + expected = expectScriptThrows(PainlessError.class, () -> { exec("while (true) { boolean x = true; while (x) {} }"); }); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); expected = expectScriptThrows(PainlessError.class, () -> { exec("while (true) { boolean x = false; while (x) {} }"); fail("should have hit PainlessError"); }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); expected = expectScriptThrows(PainlessError.class, () -> { exec("boolean x = true; for (;x;) {}"); fail("should have hit PainlessError"); }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); expected = expectScriptThrows(PainlessError.class, () -> { exec("for (;;) {int x = 5;}"); fail("should have hit PainlessError"); }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); expected = expectScriptThrows(PainlessError.class, () -> { exec("def x = true; do {int y = 5;} while (x)"); fail("should have hit PainlessError"); }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); RuntimeException parseException = expectScriptThrows(RuntimeException.class, () -> { exec("try { int x; } catch (PainlessError error) {}", false); @@ -206,42 +173,32 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { // right below limit: ok exec("for (int x = 0; x < 999999; ++x) {}"); - PainlessError expected = expectScriptThrows(PainlessError.class, () -> { - exec("for (int x = 0; x < 1000000; ++x) {}"); - }); - assertTrue(expected.getMessage().contains( - "The maximum number of statements that can be executed in a loop has been reached.")); + PainlessError expected = expectScriptThrows(PainlessError.class, () -> { exec("for (int x = 0; x < 1000000; ++x) {}"); }); + assertTrue(expected.getMessage().contains("The maximum number of statements that can be executed in a loop has been reached.")); } public void testIllegalDynamicMethod() { - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { - exec("def x = 'test'; return x.getClass().toString()"); - }); + IllegalArgumentException expected = expectScriptThrows( + IllegalArgumentException.class, + () -> { exec("def x = 'test'; return x.getClass().toString()"); } + ); assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, getClass/0] not found")); } public void testDynamicNPE() { - expectScriptThrows(NullPointerException.class, () -> { - exec("def x = null; return x.toString()"); - }); + expectScriptThrows(NullPointerException.class, () -> { exec("def x = null; return x.toString()"); }); } public void testDynamicWrongArgs() { - expectScriptThrows(WrongMethodTypeException.class, () -> { - exec("def x = new ArrayList(); return x.get('bogus');"); - }); + expectScriptThrows(WrongMethodTypeException.class, () -> { exec("def x = new ArrayList(); return x.get('bogus');"); }); } public void testDynamicArrayWrongIndex() { - expectScriptThrows(WrongMethodTypeException.class, () -> { - exec("def x = new long[1]; x[0]=1; return x['bogus'];"); - }); + expectScriptThrows(WrongMethodTypeException.class, () -> { exec("def x = new long[1]; x[0]=1; return x['bogus'];"); }); } public void testDynamicListWrongIndex() { - expectScriptThrows(WrongMethodTypeException.class, () -> { - exec("def x = new ArrayList(); x.add('foo'); return x['bogus'];"); - }); + expectScriptThrows(WrongMethodTypeException.class, () -> { exec("def x = new ArrayList(); x.add('foo'); return x['bogus'];"); }); } /** @@ -249,35 +206,34 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { * the parser with right-curly brackets to allow statements to be delimited by them at the end of blocks. */ public void testRCurlyNotDelim() { - IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { - // We don't want PICKY here so we get the normal error message - exec("def i = 1} return 1", emptyMap(), emptyMap(), false); - }); + IllegalArgumentException e = expectScriptThrows( + IllegalArgumentException.class, + () -> { + // We don't want PICKY here so we get the normal error message + exec("def i = 1} return 1", emptyMap(), emptyMap(), false); + } + ); assertEquals("unexpected token ['}'] was expecting one of [{, ';'}].", e.getMessage()); } public void testBadBoxingCast() { - expectScriptThrows(ClassCastException.class, () -> { - exec("BitSet bs = new BitSet(); bs.and(2);"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("BitSet bs = new BitSet(); bs.and(2);"); }); } public void testOutOfMemoryError() { assumeTrue("test only happens to work for sure on oracle jre", Constants.JAVA_VENDOR.startsWith("Oracle")); - expectScriptThrows(OutOfMemoryError.class, () -> { - exec("int[] x = new int[Integer.MAX_VALUE - 1];"); - }); + expectScriptThrows(OutOfMemoryError.class, () -> { exec("int[] x = new int[Integer.MAX_VALUE - 1];"); }); } public void testStackOverflowError() { - expectScriptThrows(StackOverflowError.class, () -> { - exec("void recurse(int x, int y) {recurse(x, y)} recurse(1, 2);"); - }); + expectScriptThrows(StackOverflowError.class, () -> { exec("void recurse(int x, int y) {recurse(x, y)} recurse(1, 2);"); }); } public void testCanNotOverrideRegexEnabled() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> exec("", null, singletonMap(CompilerSettings.REGEX_ENABLED.getKey(), "true"), false)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> exec("", null, singletonMap(CompilerSettings.REGEX_ENABLED.getKey(), "true"), false) + ); assertEquals("[painless.regex.enabled] can only be set on node startup.", e.getMessage()); } @@ -286,8 +242,10 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { assertEquals("Invalid int constant [864000000000]. If you want a long constant then change it to [864000000000L].", e.getMessage()); assertEquals(864000000000L, exec("return 864000000000L")); e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return -864000000000")); - assertEquals("Invalid int constant [-864000000000]. If you want a long constant then change it to [-864000000000L].", - e.getMessage()); + assertEquals( + "Invalid int constant [-864000000000]. If you want a long constant then change it to [-864000000000L].", + e.getMessage() + ); assertEquals(-864000000000L, exec("return -864000000000L")); // If it isn't a valid long we don't give any suggestions @@ -304,11 +262,15 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { public void testBadStringEscape() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> exec("'\\a'", false)); - assertEquals("unexpected character ['\\a]. The only valid escape sequences in strings starting with ['] are [\\\\] and [\\'].", - e.getMessage()); + assertEquals( + "unexpected character ['\\a]. The only valid escape sequences in strings starting with ['] are [\\\\] and [\\'].", + e.getMessage() + ); e = expectScriptThrows(IllegalArgumentException.class, () -> exec("\"\\a\"", false)); - assertEquals("unexpected character [\"\\a]. The only valid escape sequences in strings starting with [\"] are [\\\\] and [\\\"].", - e.getMessage()); + assertEquals( + "unexpected character [\"\\a]. The only valid escape sequences in strings starting with [\"] are [\\\\] and [\\\"].", + e.getMessage() + ); } public void testRegularUnexpectedCharacter() { @@ -726,20 +688,26 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { // brace access iae = expectScriptThrows(IllegalArgumentException.class, () -> exec("java.util.List[0]")); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); - iae = expectScriptThrows(IllegalArgumentException.class, () -> - exec("java.util.List[] x = new java.util.List[1]; x[java.util.List]")); + iae = expectScriptThrows( + IllegalArgumentException.class, + () -> exec("java.util.List[] x = new java.util.List[1]; x[java.util.List]") + ); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); iae = expectScriptThrows(IllegalArgumentException.class, () -> exec("def x = new java.util.List[1]; x[java.util.List]")); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); iae = expectScriptThrows(IllegalArgumentException.class, () -> exec("Map x = new HashMap(); x[java.util.List]")); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); - iae = expectScriptThrows(IllegalArgumentException.class, () -> - exec("java.util.List x = new java.util.ArrayList(); x[java.util.List]")); + iae = expectScriptThrows( + IllegalArgumentException.class, + () -> exec("java.util.List x = new java.util.ArrayList(); x[java.util.List]") + ); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); // method call - iae = expectScriptThrows(IllegalArgumentException.class, () -> - exec("java.util.List x = new java.util.ArrayList(); x.add(java.util.List)")); + iae = expectScriptThrows( + IllegalArgumentException.class, + () -> exec("java.util.List x = new java.util.ArrayList(); x.add(java.util.List)") + ); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); iae = expectScriptThrows(IllegalArgumentException.class, () -> exec("def x = new java.util.ArrayList(); x.add(java.util.List)")); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); @@ -765,8 +733,10 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { // dot access iae = expectScriptThrows(IllegalArgumentException.class, () -> exec("java.util.List[0]")); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); - iae = expectScriptThrows(IllegalArgumentException.class, () -> - exec("java.util.List[] x = new java.util.List[1]; x[java.util.List]")); + iae = expectScriptThrows( + IllegalArgumentException.class, + () -> exec("java.util.List[] x = new java.util.List[1]; x[java.util.List]") + ); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); // elvis @@ -824,8 +794,10 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); // for - iae = expectScriptThrows(IllegalArgumentException.class, () -> - exec("for (java.util.List x = java.util.List;;) {java.util.List x = 1;}")); + iae = expectScriptThrows( + IllegalArgumentException.class, + () -> exec("for (java.util.List x = java.util.List;;) {java.util.List x = 1;}") + ); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); iae = expectScriptThrows(IllegalArgumentException.class, () -> exec("for (;java.util.List;) {java.util.List x = 1;}")); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); @@ -837,8 +809,10 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); // if/else - iae = expectScriptThrows(IllegalArgumentException.class, () -> - exec("if (java.util.List) {java.util.List x = 1;} else {java.util.List x = 2;}")); + iae = expectScriptThrows( + IllegalArgumentException.class, + () -> exec("if (java.util.List) {java.util.List x = 1;} else {java.util.List x = 2;}") + ); assertEquals(iae.getMessage(), "value required: instead found unexpected type [java.util.List]"); // return @@ -855,8 +829,10 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { } public void testInvalidNullSafeBehavior() { - expectScriptThrows(ClassCastException.class, () -> - exec("def test = ['hostname': 'somehostname']; test?.hostname && params.host.hostname != ''")); + expectScriptThrows( + ClassCastException.class, + () -> exec("def test = ['hostname': 'somehostname']; test?.hostname && params.host.hostname != ''") + ); expectScriptThrows(NullPointerException.class, () -> exec("params?.host?.hostname && params.host?.hostname != ''")); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java index 144ea449458..1f5b252cb74 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/WhitelistLoaderTests.java @@ -47,17 +47,19 @@ public class WhitelistLoaderTests extends ScriptTestCase { public void testUnknownAnnotations() { Map parsers = new HashMap<>(WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS); - RuntimeException expected = expectThrows(RuntimeException.class, () -> { - WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation.unknown"); - }); - assertEquals( - "invalid annotation: parser not found for [unknownAnnotation] [@unknownAnnotation]", expected.getCause().getMessage() + RuntimeException expected = expectThrows( + RuntimeException.class, + () -> { WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation.unknown"); } ); + assertEquals("invalid annotation: parser not found for [unknownAnnotation] [@unknownAnnotation]", expected.getCause().getMessage()); assertEquals(IllegalArgumentException.class, expected.getCause().getClass()); - expected = expectThrows(RuntimeException.class, () -> { - WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation.unknown_with_options"); - }); + expected = expectThrows( + RuntimeException.class, + () -> { + WhitelistLoader.loadFromResourceFiles(Whitelist.class, parsers, "org.opensearch.painless.annotation.unknown_with_options"); + } + ); assertEquals( "invalid annotation: parser not found for [unknownAnootationWithMessage] [@unknownAnootationWithMessage[arg=\"arg value\"]]", expected.getCause().getMessage() @@ -82,16 +84,18 @@ public class WhitelistLoaderTests extends ScriptTestCase { for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { if ("deprecatedMethod".equals(whitelistMethod.methodName)) { - assertEquals("use another method", - ((DeprecatedAnnotation)whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage()); + assertEquals( + "use another method", + ((DeprecatedAnnotation) whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() + ); assertEquals(1, whitelistMethod.painlessAnnotations.size()); ++count; } if ("annotatedTestMethod".equals(whitelistMethod.methodName)) { - AnnotationTestObject.TestAnnotation ta = - ((AnnotationTestObject.TestAnnotation)whitelistMethod.painlessAnnotations.get( - AnnotationTestObject.TestAnnotation.class)); + AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) whitelistMethod.painlessAnnotations.get( + AnnotationTestObject.TestAnnotation.class + )); assertEquals("one", ta.getOne()); assertEquals("two", ta.getTwo()); assertEquals("three", ta.getThree()); @@ -100,11 +104,13 @@ public class WhitelistLoaderTests extends ScriptTestCase { } if ("annotatedMultipleMethod".equals(whitelistMethod.methodName)) { - assertEquals("test", - ((DeprecatedAnnotation)whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage()); - AnnotationTestObject.TestAnnotation ta = - ((AnnotationTestObject.TestAnnotation)whitelistMethod.painlessAnnotations.get( - AnnotationTestObject.TestAnnotation.class)); + assertEquals( + "test", + ((DeprecatedAnnotation) whitelistMethod.painlessAnnotations.get(DeprecatedAnnotation.class)).getMessage() + ); + AnnotationTestObject.TestAnnotation ta = ((AnnotationTestObject.TestAnnotation) whitelistMethod.painlessAnnotations.get( + AnnotationTestObject.TestAnnotation.class + )); assertEquals("one", ta.getOne()); assertEquals("two", ta.getTwo()); assertEquals("three", ta.getThree()); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/XorTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/XorTests.java index 124f0dac7e0..83dfa7f8fec 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/XorTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/XorTests.java @@ -81,21 +81,13 @@ public class XorTests extends ScriptTestCase { } public void testIllegal() throws Exception { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = (float)4; int y = 1; return x ^ y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = (double)4; int y = 1; return x ^ y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; int y = 1; return x ^ y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; int y = 1; return x ^ y"); }); } public void testDef() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (float)4; def y = (byte)1; return x ^ y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (double)4; def y = (byte)1; return x ^ y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; def y = (byte)1; return x ^ y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; def y = (byte)1; return x ^ y"); }); assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y")); assertEquals(5, exec("def x = (short)4; def y = (byte)1; return x ^ y")); assertEquals(5, exec("def x = (char)4; def y = (byte)1; return x ^ y")); @@ -133,18 +125,14 @@ public class XorTests extends ScriptTestCase { assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y")); assertEquals(false, exec("def x = true; def y = true; return x ^ y")); - assertEquals(true, exec("def x = true; def y = false; return x ^ y")); - assertEquals(true, exec("def x = false; def y = true; return x ^ y")); + assertEquals(true, exec("def x = true; def y = false; return x ^ y")); + assertEquals(true, exec("def x = false; def y = true; return x ^ y")); assertEquals(false, exec("def x = false; def y = false; return x ^ y")); } public void testDefTypedLHS() { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = (float)4; def y = (byte)1; return x ^ y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = (double)4; def y = (byte)1; return x ^ y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; def y = (byte)1; return x ^ y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; def y = (byte)1; return x ^ y"); }); assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y")); assertEquals(5, exec("def x = (short)4; def y = (byte)1; return x ^ y")); assertEquals(5, exec("def x = (char)4; def y = (byte)1; return x ^ y")); @@ -182,18 +170,14 @@ public class XorTests extends ScriptTestCase { assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y")); assertEquals(false, exec("def x = true; def y = true; return x ^ y")); - assertEquals(true, exec("def x = true; def y = false; return x ^ y")); - assertEquals(true, exec("def x = false; def y = true; return x ^ y")); + assertEquals(true, exec("def x = true; def y = false; return x ^ y")); + assertEquals(true, exec("def x = false; def y = true; return x ^ y")); assertEquals(false, exec("def x = false; def y = false; return x ^ y")); } public void testDefTypedRHS() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (float)4; byte y = (byte)1; return x ^ y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = (double)4; byte y = (byte)1; return x ^ y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; byte y = (byte)1; return x ^ y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; byte y = (byte)1; return x ^ y"); }); assertEquals(5, exec("def x = (byte)4; byte y = (byte)1; return x ^ y")); assertEquals(5, exec("def x = (short)4; byte y = (byte)1; return x ^ y")); assertEquals(5, exec("def x = (char)4; byte y = (byte)1; return x ^ y")); @@ -231,8 +215,8 @@ public class XorTests extends ScriptTestCase { assertEquals(5L, exec("def x = (long)4; long y = (long)1; return x ^ y")); assertEquals(false, exec("def x = true; boolean y = true; return x ^ y")); - assertEquals(true, exec("def x = true; boolean y = false; return x ^ y")); - assertEquals(true, exec("def x = false; boolean y = true; return x ^ y")); + assertEquals(true, exec("def x = true; boolean y = false; return x ^ y")); + assertEquals(true, exec("def x = false; boolean y = true; return x ^ y")); assertEquals(false, exec("def x = false; boolean y = false; return x ^ y")); } @@ -260,18 +244,10 @@ public class XorTests extends ScriptTestCase { } public void testBogusCompoundAssignment() { - expectScriptThrows(ClassCastException.class, () -> { - exec("float x = 4; int y = 1; x ^= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("double x = 4; int y = 1; x ^= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; float y = 1; x ^= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; double y = 1; x ^= y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("float x = 4; int y = 1; x ^= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("double x = 4; int y = 1; x ^= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; float y = 1; x ^= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; double y = 1; x ^= y"); }); } public void testCompoundAssignmentDef() { @@ -298,17 +274,9 @@ public class XorTests extends ScriptTestCase { } public void testDefBogusCompoundAssignment() { - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4F; int y = 1; x ^= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("def x = 4D; int y = 1; x ^= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; def y = (float)1; x ^= y"); - }); - expectScriptThrows(ClassCastException.class, () -> { - exec("int x = 4; def y = (double)1; x ^= y"); - }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4F; int y = 1; x ^= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4D; int y = 1; x ^= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; def y = (float)1; x ^= y"); }); + expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; def y = (double)1; x ^= y"); }); } } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/action/ContextInfoTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/action/ContextInfoTests.java index d32cf726924..9e3f8794b9c 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/action/ContextInfoTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/action/ContextInfoTests.java @@ -60,9 +60,7 @@ public class ContextInfoTests extends AbstractSerializingTestCase staticFields = new ArrayList<>(); for (int staticField = 0; staticField < staticFieldsSize; ++staticField) { - staticFields.add(new PainlessContextFieldInfo( + staticFields.add( + new PainlessContextFieldInfo( randomAlphaOfLength(randomIntBetween(4, 10)), randomAlphaOfLength(randomIntBetween(4, 10)), - randomAlphaOfLength(randomIntBetween(4, 10)))); + randomAlphaOfLength(randomIntBetween(4, 10)) + ) + ); } int fieldsSize = randomInt(4); List fields = new ArrayList<>(); for (int field = 0; field < fieldsSize; ++field) { - fields.add(new PainlessContextFieldInfo( + fields.add( + new PainlessContextFieldInfo( randomAlphaOfLength(randomIntBetween(4, 10)), randomAlphaOfLength(randomIntBetween(4, 10)), - randomAlphaOfLength(randomIntBetween(4, 10)))); + randomAlphaOfLength(randomIntBetween(4, 10)) + ) + ); } - classes.add(new PainlessContextClassInfo( - randomAlphaOfLength(randomIntBetween(3, 200)), randomBoolean(), - constructors, staticMethods, methods, fields, staticFields)); + classes.add( + new PainlessContextClassInfo( + randomAlphaOfLength(randomIntBetween(3, 200)), + randomBoolean(), + constructors, + staticMethods, + methods, + fields, + staticFields + ) + ); } int importedMethodsSize = randomInt(4); @@ -127,11 +145,14 @@ public class ContextInfoTests extends AbstractSerializingTestCase { - Request r = new Request(new Script(ScriptType.INLINE, - "painless", "params.count / params.total + doc['constant']", params), null, null); + Exception e = expectThrows(ScriptException.class, () -> { + Request r = new Request( + new Script(ScriptType.INLINE, "painless", "params.count / params.total + doc['constant']", params), + null, + null + ); innerShardOperation(r, scriptService, null); }); assertThat(e.getCause().getMessage(), equalTo("cannot resolve symbol [doc]")); @@ -97,15 +99,21 @@ public class PainlessExecuteApiTests extends OpenSearchSingleNodeTestCase { contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 3}"), null); contextSetup.setXContentType(XContentType.JSON); - request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", - singletonMap("max", 3)), "filter", contextSetup); + request = new Request( + new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", singletonMap("max", 3)), + "filter", + contextSetup + ); response = innerShardOperation(request, scriptService, indexService); assertThat(response.getResult(), equalTo(true)); contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 2}"), null); contextSetup.setXContentType(XContentType.JSON); - request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", - singletonMap("max", 3)), "filter", contextSetup); + request = new Request( + new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", singletonMap("max", 3)), + "filter", + contextSetup + ); response = innerShardOperation(request, scriptService, indexService); assertThat(response.getResult(), equalTo(false)); } @@ -114,12 +122,22 @@ public class PainlessExecuteApiTests extends OpenSearchSingleNodeTestCase { ScriptService scriptService = getInstanceFromNode(ScriptService.class); IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "rank", "type=long", "text", "type=text"); - Request.ContextSetup contextSetup = new Request.ContextSetup("index", - new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"), new MatchQueryBuilder("text", "fox")); + Request.ContextSetup contextSetup = new Request.ContextSetup( + "index", + new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"), + new MatchQueryBuilder("text", "fox") + ); contextSetup.setXContentType(XContentType.JSON); - Request request = new Request(new Script(ScriptType.INLINE, "painless", - "Math.round((_score + (doc['rank'].value / params.max_rank)) * 100.0) / 100.0", singletonMap("max_rank", 5.0)), "score", - contextSetup); + Request request = new Request( + new Script( + ScriptType.INLINE, + "painless", + "Math.round((_score + (doc['rank'].value / params.max_rank)) * 100.0) / 100.0", + singletonMap("max_rank", 5.0) + ), + "score", + contextSetup + ); Response response = innerShardOperation(request, scriptService, indexService); assertThat(response.getResult(), equalTo(0.93D)); } diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/action/PainlessExecuteRequestTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/action/PainlessExecuteRequestTests.java index 3000bee8949..7e3ba2b8b27 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/action/PainlessExecuteRequestTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/action/PainlessExecuteRequestTests.java @@ -115,7 +115,7 @@ public class PainlessExecuteRequestTests extends AbstractWireSerializingTestCase assertEquals("Validation Failed: 1: only inline scripts are supported;", e.getMessage()); } - private static ContextSetup randomContextSetup() { + private static ContextSetup randomContextSetup() { String index = randomBoolean() ? randomAlphaOfLength(4) : null; QueryBuilder query = randomBoolean() ? new MatchAllQueryBuilder() : null; BytesReference doc = null; diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java index 701d2164ddb..a7787f4bc3c 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java @@ -39,14 +39,10 @@ import java.util.regex.Pattern; public class LimitedCharSequenceTests extends OpenSearchTestCase { public void testBadFactor() { - IllegalArgumentException badArg = expectThrows(IllegalArgumentException.class, - () -> new LimitedCharSequence("abc", null, -1) - ); + IllegalArgumentException badArg = expectThrows(IllegalArgumentException.class, () -> new LimitedCharSequence("abc", null, -1)); assertEquals("limitFactor must be positive", badArg.getMessage()); - badArg = expectThrows(IllegalArgumentException.class, - () -> new LimitedCharSequence("abc", null, 0) - ); + badArg = expectThrows(IllegalArgumentException.class, () -> new LimitedCharSequence("abc", null, 0)); assertEquals("limitFactor must be positive", badArg.getMessage()); } @@ -57,9 +53,9 @@ public class LimitedCharSequenceTests extends OpenSearchTestCase { public void testCharAtEqualLimit() { String str = "abc"; - for (int limitFactor=1; limitFactor < 4; limitFactor++){ + for (int limitFactor = 1; limitFactor < 4; limitFactor++) { CharSequence seq = new LimitedCharSequence(str, null, limitFactor); - for (int i=0; i seq.charAt(0)); assertEquals( - "[scripting] Regular expression considered too many characters, " + - "pattern: [a.*bc], " + - "limit factor: [2], " + - "char limit: [6], " + - "count: [7], " + - "wrapped: [abc], " + - "this limit can be changed by changed by the [script.painless.regex.limit-factor] setting", - circuitBreakingException.getMessage()); + "[scripting] Regular expression considered too many characters, " + + "pattern: [a.*bc], " + + "limit factor: [2], " + + "char limit: [6], " + + "count: [7], " + + "wrapped: [abc], " + + "this limit can be changed by changed by the [script.painless.regex.limit-factor] setting", + circuitBreakingException.getMessage() + ); final CharSequence seqNullPattern = new LimitedCharSequence(str, null, 2); for (int i = 0; i < 6; i++) { @@ -90,13 +87,14 @@ public class LimitedCharSequenceTests extends OpenSearchTestCase { } circuitBreakingException = expectThrows(CircuitBreakingException.class, () -> seqNullPattern.charAt(0)); assertEquals( - "[scripting] Regular expression considered too many characters, " + - "limit factor: [2], " + - "char limit: [6], " + - "count: [7], " + - "wrapped: [abc], " + - "this limit can be changed by changed by the [script.painless.regex.limit-factor] setting", - circuitBreakingException.getMessage()); + "[scripting] Regular expression considered too many characters, " + + "limit factor: [2], " + + "char limit: [6], " + + "count: [7], " + + "wrapped: [abc], " + + "this limit can be changed by changed by the [script.painless.regex.limit-factor] setting", + circuitBreakingException.getMessage() + ); } public void testSubSequence() { diff --git a/modules/lang-painless/src/yamlRestTest/java/org/opensearch/painless/LangPainlessClientYamlTestSuiteIT.java b/modules/lang-painless/src/yamlRestTest/java/org/opensearch/painless/LangPainlessClientYamlTestSuiteIT.java index fb24932574c..520c6567231 100644 --- a/modules/lang-painless/src/yamlRestTest/java/org/opensearch/painless/LangPainlessClientYamlTestSuiteIT.java +++ b/modules/lang-painless/src/yamlRestTest/java/org/opensearch/painless/LangPainlessClientYamlTestSuiteIT.java @@ -49,4 +49,3 @@ public class LangPainlessClientYamlTestSuiteIT extends OpenSearchClientYamlSuite return OpenSearchClientYamlSuiteTestCase.createParameters(); } } - diff --git a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index 089ec8116fe..d400fb69f34 100644 --- a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -73,8 +73,10 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase private final boolean storeCountedFields; private final boolean loadCountedFields; - public TokenCountFieldMapperIntegrationIT(@Name("storeCountedFields") boolean storeCountedFields, - @Name("loadCountedFields") boolean loadCountedFields) { + public TokenCountFieldMapperIntegrationIT( + @Name("storeCountedFields") boolean storeCountedFields, + @Name("loadCountedFields") boolean loadCountedFields + ) { this.storeCountedFields = storeCountedFields; this.loadCountedFields = loadCountedFields; } @@ -112,10 +114,8 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase public void testFacetByTokenCount() throws IOException { init(); - String facetField = randomFrom(Arrays.asList( - "foo.token_count", "foo.token_count_unstored", "foo.token_count_with_doc_values")); - SearchResponse result = searchByNumericRange(1, 10) - .addAggregation(AggregationBuilders.terms("facet").field(facetField)).get(); + String facetField = randomFrom(Arrays.asList("foo.token_count", "foo.token_count_unstored", "foo.token_count_with_doc_values")); + SearchResponse result = searchByNumericRange(1, 10).addAggregation(AggregationBuilders.terms("facet").field(facetField)).get(); assertSearchReturns(result, "single", "bulk1", "bulk2", "multi", "multibulk1", "multibulk2"); assertThat(result.getAggregations().asList().size(), equalTo(1)); Terms terms = (Terms) result.getAggregations().asList().get(0); @@ -127,52 +127,60 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase settings.put(indexSettings()); settings.put("index.analysis.analyzer.mock_english.tokenizer", "standard"); settings.put("index.analysis.analyzer.mock_english.filter", "stop"); - prepareCreate("test") - .setSettings(settings) - .addMapping("test", jsonBuilder().startObject() - .startObject("test") + prepareCreate("test").setSettings(settings) + .addMapping( + "test", + jsonBuilder().startObject() + .startObject("test") .startObject("properties") - .startObject("foo") - .field("type", "text") - .field("store", storeCountedFields) - .field("analyzer", "simple") - .startObject("fields") - .startObject("token_count") - .field("type", "token_count") - .field("analyzer", "standard") - .field("store", true) - .endObject() - .startObject("token_count_unstored") - .field("type", "token_count") - .field("analyzer", "standard") - .endObject() - .startObject("token_count_with_doc_values") - .field("type", "token_count") - .field("analyzer", "standard") - .field("doc_values", true) - .endObject() - .startObject("token_count_without_position_increments") - .field("type", "token_count") - .field("analyzer", "mock_english") - .field("enable_position_increments", false) - .field("store", true) - .endObject() - .endObject() - .endObject() + .startObject("foo") + .field("type", "text") + .field("store", storeCountedFields) + .field("analyzer", "simple") + .startObject("fields") + .startObject("token_count") + .field("type", "token_count") + .field("analyzer", "standard") + .field("store", true) .endObject() - .endObject().endObject()).get(); + .startObject("token_count_unstored") + .field("type", "token_count") + .field("analyzer", "standard") + .endObject() + .startObject("token_count_with_doc_values") + .field("type", "token_count") + .field("analyzer", "standard") + .field("doc_values", true) + .endObject() + .startObject("token_count_without_position_increments") + .field("type", "token_count") + .field("analyzer", "mock_english") + .field("enable_position_increments", false) + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) + .get(); ensureGreen(); assertEquals(DocWriteResponse.Result.CREATED, prepareIndex("single", "I have four terms").get().getResult()); BulkResponse bulk = client().prepareBulk() - .add(prepareIndex("bulk1", "bulk three terms")) - .add(prepareIndex("bulk2", "this has five bulk terms")).get(); + .add(prepareIndex("bulk1", "bulk three terms")) + .add(prepareIndex("bulk2", "this has five bulk terms")) + .get(); assertFalse(bulk.buildFailureMessage(), bulk.hasFailures()); - assertEquals(DocWriteResponse.Result.CREATED, - prepareIndex("multi", "two terms", "wow now I have seven lucky terms").get().getResult()); + assertEquals( + DocWriteResponse.Result.CREATED, + prepareIndex("multi", "two terms", "wow now I have seven lucky terms").get().getResult() + ); bulk = client().prepareBulk() - .add(prepareIndex("multibulk1", "one", "oh wow now I have eight unlucky terms")) - .add(prepareIndex("multibulk2", "six is a bunch of terms", "ten! ten terms is just crazy! too many too count!")).get(); + .add(prepareIndex("multibulk1", "one", "oh wow now I have eight unlucky terms")) + .add(prepareIndex("multibulk2", "six is a bunch of terms", "ten! ten terms is just crazy! too many too count!")) + .get(); assertFalse(bulk.buildFailureMessage(), bulk.hasFailures()); assertThat(refresh().getFailedShards(), equalTo(0)); @@ -187,9 +195,11 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase } private SearchRequestBuilder searchByNumericRange(int low, int high) { - return prepareSearch().setQuery(QueryBuilders.rangeQuery(randomFrom( - Arrays.asList("foo.token_count", "foo.token_count_unstored", "foo.token_count_with_doc_values") - )).gte(low).lte(high)); + return prepareSearch().setQuery( + QueryBuilders.rangeQuery( + randomFrom(Arrays.asList("foo.token_count", "foo.token_count_unstored", "foo.token_count_with_doc_values")) + ).gte(low).lte(high) + ); } private SearchRequestBuilder prepareSearch() { @@ -213,17 +223,17 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase for (SearchHit hit : result.getHits()) { String id = hit.getId(); if (id.equals("single")) { - assertSearchHit(hit, new int[]{4}, new int[]{4}); + assertSearchHit(hit, new int[] { 4 }, new int[] { 4 }); } else if (id.equals("bulk1")) { - assertSearchHit(hit, new int[]{3}, new int[]{3}); + assertSearchHit(hit, new int[] { 3 }, new int[] { 3 }); } else if (id.equals("bulk2")) { - assertSearchHit(hit, new int[]{5}, new int[]{4}); + assertSearchHit(hit, new int[] { 5 }, new int[] { 4 }); } else if (id.equals("multi")) { - assertSearchHit(hit, new int[]{2, 7}, new int[]{2, 7}); + assertSearchHit(hit, new int[] { 2, 7 }, new int[] { 2, 7 }); } else if (id.equals("multibulk1")) { - assertSearchHit(hit, new int[]{1, 8}, new int[]{1, 8}); + assertSearchHit(hit, new int[] { 1, 8 }, new int[] { 1, 8 }); } else if (id.equals("multibulk2")) { - assertSearchHit(hit, new int[]{6, 10}, new int[]{3, 9}); + assertSearchHit(hit, new int[] { 6, 10 }, new int[] { 3, 9 }); } else { throw new OpenSearchException("Unexpected response!"); } @@ -240,8 +250,7 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase assertThat(hit.field("foo.token_count_without_position_increments"), not(nullValue())); assertThat(hit.field("foo.token_count_without_position_increments").getValues().size(), equalTo(englishTermCounts.length)); for (int i = 0; i < englishTermCounts.length; i++) { - assertThat(hit.field("foo.token_count_without_position_increments").getValues().get(i), - equalTo(englishTermCounts[i])); + assertThat(hit.field("foo.token_count_without_position_increments").getValues().get(i), equalTo(englishTermCounts[i])); } if (loadCountedFields && storeCountedFields) { diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/MapperExtrasPlugin.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/MapperExtrasPlugin.java index bfb7987e7a7..ebb86bc0558 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/MapperExtrasPlugin.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/MapperExtrasPlugin.java @@ -64,8 +64,8 @@ public class MapperExtrasPlugin extends Plugin implements MapperPlugin, SearchPl @Override public List> getQueries() { return Collections.singletonList( - new QuerySpec<>(RankFeatureQueryBuilder.NAME, RankFeatureQueryBuilder::new, - p -> RankFeatureQueryBuilder.PARSER.parse(p, null))); + new QuerySpec<>(RankFeatureQueryBuilder.NAME, RankFeatureQueryBuilder::new, p -> RankFeatureQueryBuilder.PARSER.parse(p, null)) + ); } } diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java index 975ef2d55b9..6f64be86f93 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeatureFieldMapper.java @@ -69,13 +69,17 @@ public class RankFeatureFieldMapper extends ParametrizedFieldMapper { } private static RankFeatureFieldType ft(FieldMapper in) { - return ((RankFeatureFieldMapper)in).fieldType(); + return ((RankFeatureFieldMapper) in).fieldType(); } public static class Builder extends ParametrizedFieldMapper.Builder { - private final Parameter positiveScoreImpact - = Parameter.boolParam("positive_score_impact", false, m -> ft(m).positiveScoreImpact, true); + private final Parameter positiveScoreImpact = Parameter.boolParam( + "positive_score_impact", + false, + m -> ft(m).positiveScoreImpact, + true + ); private final Parameter> meta = Parameter.metaParam(); public Builder(String name) { @@ -89,9 +93,13 @@ public class RankFeatureFieldMapper extends ParametrizedFieldMapper { @Override public RankFeatureFieldMapper build(BuilderContext context) { - return new RankFeatureFieldMapper(name, + return new RankFeatureFieldMapper( + name, new RankFeatureFieldType(buildFullName(context), meta.getValue(), positiveScoreImpact.getValue()), - multiFieldsBuilder.build(this, context), copyTo.build(), positiveScoreImpact.getValue()); + multiFieldsBuilder.build(this, context), + copyTo.build(), + positiveScoreImpact.getValue() + ); } } @@ -147,8 +155,13 @@ public class RankFeatureFieldMapper extends ParametrizedFieldMapper { private final boolean positiveScoreImpact; - private RankFeatureFieldMapper(String simpleName, MappedFieldType mappedFieldType, - MultiFields multiFields, CopyTo copyTo, boolean positiveScoreImpact) { + private RankFeatureFieldMapper( + String simpleName, + MappedFieldType mappedFieldType, + MultiFields multiFields, + CopyTo copyTo, + boolean positiveScoreImpact + ) { super(simpleName, mappedFieldType, multiFields, copyTo); this.positiveScoreImpact = positiveScoreImpact; } @@ -177,8 +190,9 @@ public class RankFeatureFieldMapper extends ParametrizedFieldMapper { } if (context.doc().getByKey(name()) != null) { - throw new IllegalArgumentException("[rank_feature] fields do not support indexing multiple values for the same field [" + - name() + "] in the same document"); + throw new IllegalArgumentException( + "[rank_feature] fields do not support indexing multiple values for the same field [" + name() + "] in the same document" + ); } if (positiveScoreImpact == false) { diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java index f2bc8dfaa31..b35f790901f 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java @@ -72,8 +72,11 @@ public class RankFeaturesFieldMapper extends ParametrizedFieldMapper { @Override public RankFeaturesFieldMapper build(BuilderContext context) { return new RankFeaturesFieldMapper( - name, new RankFeaturesFieldType(buildFullName(context), meta.getValue()), - multiFieldsBuilder.build(this, context), copyTo.build()); + name, + new RankFeaturesFieldType(buildFullName(context), meta.getValue()), + multiFieldsBuilder.build(this, context), + copyTo.build() + ); } } @@ -112,8 +115,7 @@ public class RankFeaturesFieldMapper extends ParametrizedFieldMapper { } } - private RankFeaturesFieldMapper(String simpleName, MappedFieldType mappedFieldType, - MultiFields multiFields, CopyTo copyTo) { + private RankFeaturesFieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo) { super(simpleName, mappedFieldType, multiFields, copyTo); assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0; } @@ -140,8 +142,9 @@ public class RankFeaturesFieldMapper extends ParametrizedFieldMapper { } if (context.parser().currentToken() != Token.START_OBJECT) { - throw new IllegalArgumentException("[rank_features] fields must be json objects, expected a START_OBJECT but got: " + - context.parser().currentToken()); + throw new IllegalArgumentException( + "[rank_features] fields must be json objects, expected a START_OBJECT but got: " + context.parser().currentToken() + ); } String feature = null; @@ -154,13 +157,20 @@ public class RankFeaturesFieldMapper extends ParametrizedFieldMapper { final String key = name() + "." + feature; float value = context.parser().floatValue(true); if (context.doc().getByKey(key) != null) { - throw new IllegalArgumentException("[rank_features] fields do not support indexing multiple values for the same " + - "rank feature [" + key + "] in the same document"); + throw new IllegalArgumentException( + "[rank_features] fields do not support indexing multiple values for the same " + + "rank feature [" + + key + + "] in the same document" + ); } context.doc().addWithKey(key, new FeatureField(name(), feature, value)); } else { - throw new IllegalArgumentException("[rank_features] fields take hashes that map a feature to a strictly positive " + - "float, but got unexpected token " + token); + throw new IllegalArgumentException( + "[rank_features] fields take hashes that map a feature to a strictly positive " + + "float, but got unexpected token " + + token + ); } } } diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java index a2c01db4592..93f3adf4768 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java @@ -92,18 +92,27 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { private final Parameter> ignoreMalformed; private final Parameter> coerce; - private final Parameter scalingFactor = new Parameter<>("scaling_factor", false, () -> null, - (n, c, o) -> XContentMapValues.nodeDoubleValue(o), m -> toType(m).scalingFactor) - .setValidator(v -> { - if (v == null) { - throw new IllegalArgumentException("Field [scaling_factor] is required"); - } - if (Double.isFinite(v) == false || v <= 0) { - throw new IllegalArgumentException("[scaling_factor] must be a positive number, got [" + v + "]"); - } - }); - private final Parameter nullValue = new Parameter<>("null_value", false, () -> null, - (n, c, o) -> o == null ? null : XContentMapValues.nodeDoubleValue(o), m -> toType(m).nullValue).acceptsNull(); + private final Parameter scalingFactor = new Parameter<>( + "scaling_factor", + false, + () -> null, + (n, c, o) -> XContentMapValues.nodeDoubleValue(o), + m -> toType(m).scalingFactor + ).setValidator(v -> { + if (v == null) { + throw new IllegalArgumentException("Field [scaling_factor] is required"); + } + if (Double.isFinite(v) == false || v <= 0) { + throw new IllegalArgumentException("[scaling_factor] must be a positive number, got [" + v + "]"); + } + }); + private final Parameter nullValue = new Parameter<>( + "null_value", + false, + () -> null, + (n, c, o) -> o == null ? null : XContentMapValues.nodeDoubleValue(o), + m -> toType(m).nullValue + ).acceptsNull(); private final Parameter> meta = Parameter.metaParam(); @@ -113,10 +122,13 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { public Builder(String name, boolean ignoreMalformedByDefault, boolean coerceByDefault) { super(name); - this.ignoreMalformed - = Parameter.explicitBoolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault); - this.coerce - = Parameter.explicitBoolParam("coerce", true, m -> toType(m).coerce, coerceByDefault); + this.ignoreMalformed = Parameter.explicitBoolParam( + "ignore_malformed", + true, + m -> toType(m).ignoreMalformed, + ignoreMalformedByDefault + ); + this.coerce = Parameter.explicitBoolParam("coerce", true, m -> toType(m).coerce, coerceByDefault); } Builder scalingFactor(double scalingFactor) { @@ -136,8 +148,15 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { @Override public ScaledFloatFieldMapper build(BuilderContext context) { - ScaledFloatFieldType type = new ScaledFloatFieldType(buildFullName(context), indexed.getValue(), stored.getValue(), - hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue(), nullValue.getValue()); + ScaledFloatFieldType type = new ScaledFloatFieldType( + buildFullName(context), + indexed.getValue(), + stored.getValue(), + hasDocValues.getValue(), + meta.getValue(), + scalingFactor.getValue(), + nullValue.getValue() + ); return new ScaledFloatFieldMapper(name, type, multiFieldsBuilder.build(this, context), copyTo.build(), this); } } @@ -149,8 +168,15 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { private final double scalingFactor; private final Double nullValue; - public ScaledFloatFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues, - Map meta, double scalingFactor, Double nullValue) { + public ScaledFloatFieldType( + String name, + boolean indexed, + boolean stored, + boolean hasDocValues, + Map meta, + double scalingFactor, + Double nullValue + ) { super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); this.scalingFactor = scalingFactor; this.nullValue = nullValue; @@ -268,8 +294,9 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() - + "] does not support custom time zones"); + throw new IllegalArgumentException( + "Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones" + ); } if (format == null) { return DocValueFormat.RAW; @@ -304,11 +331,12 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { private final boolean coerceByDefault; private ScaledFloatFieldMapper( - String simpleName, - ScaledFloatFieldType mappedFieldType, - MultiFields multiFields, - CopyTo copyTo, - Builder builder) { + String simpleName, + ScaledFloatFieldType mappedFieldType, + MultiFields multiFields, + CopyTo copyTo, + Builder builder + ) { super(simpleName, mappedFieldType, multiFields, copyTo); this.indexed = builder.indexed.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); @@ -359,9 +387,7 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { value = context.externalValue(); } else if (parser.currentToken() == Token.VALUE_NULL) { value = null; - } else if (coerce.value() - && parser.currentToken() == Token.VALUE_STRING - && parser.textLength() == 0) { + } else if (coerce.value() && parser.currentToken() == Token.VALUE_STRING && parser.textLength() == 0) { value = null; } else { try { @@ -399,8 +425,7 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { } long scaledValue = Math.round(doubleValue * scalingFactor); - List fields - = NumberFieldMapper.NumberType.LONG.createFields(fieldType().name(), scaledValue, indexed, hasDocValues, stored); + List fields = NumberFieldMapper.NumberType.LONG.createFields(fieldType().name(), scaledValue, indexed, hasDocValues, stored); context.doc().addAll(fields); if (hasDocValues == false && (indexed || stored)) { @@ -530,6 +555,7 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper { public boolean advanceExact(int doc) throws IOException { return singleValues.advanceExact(doc); } + @Override public double doubleValue() throws IOException { return singleValues.longValue() * scalingFactorInverse; diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java index 950bc0a6006..6c55c2ecd0f 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java @@ -105,8 +105,7 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { public static final int MAX_SHINGLE_SIZE = 3; } - public static final TypeParser PARSER - = new TypeParser((n, c) -> new Builder(n, c.getIndexAnalyzers())); + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.getIndexAnalyzers())); private static SearchAsYouTypeFieldMapper toType(FieldMapper in) { return (SearchAsYouTypeFieldMapper) in; @@ -124,23 +123,31 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { // This is only here because for some reason the initial impl of this always serialized // `doc_values=false`, even though it cannot be set; and so we need to continue // serializing it forever because of mapper assertions in mixed clusters. - private final Parameter docValues = Parameter.docValuesParam(m -> false, false) - .setValidator(v -> { - if (v) { - throw new MapperParsingException("Cannot set [doc_values] on field of type [search_as_you_type]"); - } - }) - .alwaysSerialize(); + private final Parameter docValues = Parameter.docValuesParam(m -> false, false).setValidator(v -> { + if (v) { + throw new MapperParsingException("Cannot set [doc_values] on field of type [search_as_you_type]"); + } + }).alwaysSerialize(); - private final Parameter maxShingleSize = Parameter.intParam("max_shingle_size", false, - m -> toType(m).maxShingleSize, Defaults.MAX_SHINGLE_SIZE) - .setValidator(v -> { - if (v < MAX_SHINGLE_SIZE_LOWER_BOUND || v > MAX_SHINGLE_SIZE_UPPER_BOUND) { - throw new MapperParsingException("[max_shingle_size] must be at least [" + MAX_SHINGLE_SIZE_LOWER_BOUND - + "] and at most " + "[" + MAX_SHINGLE_SIZE_UPPER_BOUND + "], got [" + v + "]"); - } - }) - .alwaysSerialize(); + private final Parameter maxShingleSize = Parameter.intParam( + "max_shingle_size", + false, + m -> toType(m).maxShingleSize, + Defaults.MAX_SHINGLE_SIZE + ).setValidator(v -> { + if (v < MAX_SHINGLE_SIZE_LOWER_BOUND || v > MAX_SHINGLE_SIZE_UPPER_BOUND) { + throw new MapperParsingException( + "[max_shingle_size] must be at least [" + + MAX_SHINGLE_SIZE_LOWER_BOUND + + "] and at most " + + "[" + + MAX_SHINGLE_SIZE_UPPER_BOUND + + "], got [" + + v + + "]" + ); + } + }).alwaysSerialize(); final TextParams.Analyzers analyzers; final Parameter similarity = TextParams.similarity(m -> ft(m).getTextSearchInfo().getSimilarity()); @@ -158,9 +165,20 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { @Override protected List> getParameters() { - return Arrays.asList(index, store, docValues, maxShingleSize, - analyzers.indexAnalyzer, analyzers.searchAnalyzer, analyzers.searchQuoteAnalyzer, similarity, - indexOptions, norms, termVectors, meta); + return Arrays.asList( + index, + store, + docValues, + maxShingleSize, + analyzers.indexAnalyzer, + analyzers.searchAnalyzer, + analyzers.searchQuoteAnalyzer, + similarity, + indexOptions, + norms, + termVectors, + meta + ); } @Override @@ -175,8 +193,14 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer(); NamedAnalyzer searchAnalyzer = analyzers.getSearchAnalyzer(); - SearchAsYouTypeFieldType ft = new SearchAsYouTypeFieldType(buildFullName(context), fieldType, similarity.getValue(), - analyzers.getSearchAnalyzer(), analyzers.getSearchQuoteAnalyzer(), meta.getValue()); + SearchAsYouTypeFieldType ft = new SearchAsYouTypeFieldType( + buildFullName(context), + fieldType, + similarity.getValue(), + analyzers.getSearchAnalyzer(), + analyzers.getSearchQuoteAnalyzer(), + meta.getValue() + ); ft.setIndexAnalyzer(analyzers.getIndexAnalyzer()); // set up the prefix field @@ -186,15 +210,19 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { prefixft.setStored(false); final String fullName = buildFullName(context); // wrap the root field's index analyzer with shingles and edge ngrams - final Analyzer prefixIndexWrapper = - SearchAsYouTypeAnalyzer.withShingleAndPrefix(indexAnalyzer.analyzer(), maxShingleSize.getValue()); + final Analyzer prefixIndexWrapper = SearchAsYouTypeAnalyzer.withShingleAndPrefix( + indexAnalyzer.analyzer(), + maxShingleSize.getValue() + ); // wrap the root field's search analyzer with only shingles - final NamedAnalyzer prefixSearchWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(), - SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), maxShingleSize.getValue())); + final NamedAnalyzer prefixSearchWrapper = new NamedAnalyzer( + searchAnalyzer.name(), + searchAnalyzer.scope(), + SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), maxShingleSize.getValue()) + ); // don't wrap the root field's search quote analyzer as prefix field doesn't support phrase queries TextSearchInfo prefixSearchInfo = new TextSearchInfo(prefixft, similarity.getValue(), prefixSearchWrapper, searchAnalyzer); - final PrefixFieldType prefixFieldType - = new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM); + final PrefixFieldType prefixFieldType = new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM); prefixFieldType.setIndexAnalyzer(new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, prefixIndexWrapper)); final PrefixFieldMapper prefixFieldMapper = new PrefixFieldMapper(prefixft, prefixFieldType); @@ -207,14 +235,26 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { shingleft.setStored(false); String fieldName = getShingleFieldName(buildFullName(context), shingleSize); // wrap the root field's index, search, and search quote analyzers with shingles - final SearchAsYouTypeAnalyzer shingleIndexWrapper = - SearchAsYouTypeAnalyzer.withShingle(indexAnalyzer.analyzer(), shingleSize); - final NamedAnalyzer shingleSearchWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(), - SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), shingleSize)); - final NamedAnalyzer shingleSearchQuoteWrapper = new NamedAnalyzer(searchAnalyzer.name(), searchAnalyzer.scope(), - SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), shingleSize)); - TextSearchInfo textSearchInfo - = new TextSearchInfo(shingleft, similarity.getValue(), shingleSearchWrapper, shingleSearchQuoteWrapper); + final SearchAsYouTypeAnalyzer shingleIndexWrapper = SearchAsYouTypeAnalyzer.withShingle( + indexAnalyzer.analyzer(), + shingleSize + ); + final NamedAnalyzer shingleSearchWrapper = new NamedAnalyzer( + searchAnalyzer.name(), + searchAnalyzer.scope(), + SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), shingleSize) + ); + final NamedAnalyzer shingleSearchQuoteWrapper = new NamedAnalyzer( + searchAnalyzer.name(), + searchAnalyzer.scope(), + SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), shingleSize) + ); + TextSearchInfo textSearchInfo = new TextSearchInfo( + shingleft, + similarity.getValue(), + shingleSearchWrapper, + shingleSearchQuoteWrapper + ); final ShingleFieldType shingleFieldType = new ShingleFieldType(fieldName, shingleSize, textSearchInfo); shingleFieldType.setIndexAnalyzer(new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, shingleIndexWrapper)); shingleFieldType.setPrefixFieldType(prefixFieldType); @@ -251,10 +291,22 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { PrefixFieldType prefixField; ShingleFieldType[] shingleFields = new ShingleFieldType[0]; - SearchAsYouTypeFieldType(String name, FieldType fieldType, SimilarityProvider similarity, - NamedAnalyzer searchAnalyzer, NamedAnalyzer searchQuoteAnalyzer, Map meta) { - super(name, fieldType.indexOptions() != IndexOptions.NONE, fieldType.stored(), false, - new TextSearchInfo(fieldType, similarity, searchAnalyzer, searchQuoteAnalyzer), meta); + SearchAsYouTypeFieldType( + String name, + FieldType fieldType, + SimilarityProvider similarity, + NamedAnalyzer searchAnalyzer, + NamedAnalyzer searchQuoteAnalyzer, + Map meta + ) { + super( + name, + fieldType.indexOptions() != IndexOptions.NONE, + fieldType.stored(), + false, + new TextSearchInfo(fieldType, similarity, searchAnalyzer, searchQuoteAnalyzer), + meta + ); this.fieldType = fieldType; } @@ -323,8 +375,7 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { int numPos = countPosition(stream); if (shingleFields.length == 0 || slop > 0 || hasGaps(stream) || numPos <= 1) { - return TextFieldMapper.createPhrasePrefixQuery(stream, name(), slop, maxExpansions, - null, null); + return TextFieldMapper.createPhrasePrefixQuery(stream, name(), slop, maxExpansions, null, null); } final ShingleFieldType shingleField = shingleFieldForPositions(numPos); stream = new FixedShingleFilter(stream, shingleField.shingleSize); @@ -336,8 +387,9 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { if (prefixField != null && prefixField.termLengthWithinBounds(value.length())) { return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixField.name(), indexedValueForSearch(value))), name()); } else { - SpanMultiTermQueryWrapper spanMulti = - new SpanMultiTermQueryWrapper<>(new PrefixQuery(new Term(name(), indexedValueForSearch(value)))); + SpanMultiTermQueryWrapper spanMulti = new SpanMultiTermQueryWrapper<>( + new PrefixQuery(new Term(name(), indexedValueForSearch(value))) + ); spanMulti.setRewriteMethod(method); return spanMulti; } @@ -368,7 +420,7 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { @Override public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, QueryShardContext context) { if (value.length() >= minChars) { - if(caseInsensitive) { + if (caseInsensitive) { return super.termQueryCaseInsensitive(value, context); } return super.termQuery(value, context); @@ -381,8 +433,7 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = new AutomatonQuery(new Term(name(), value + "*"), automaton); query.setRewriteMethod(method); - return new BooleanQuery.Builder() - .add(query, BooleanClause.Occur.SHOULD) + return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) .build(); } @@ -533,11 +584,15 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { @Override public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { - final String prefixFieldName = slop > 0 - ? null - : prefixFieldType.name(); - return TextFieldMapper.createPhrasePrefixQuery(stream, name(), slop, maxExpansions, - prefixFieldName, prefixFieldType::termLengthWithinBounds); + final String prefixFieldName = slop > 0 ? null : prefixFieldType.name(); + return TextFieldMapper.createPhrasePrefixQuery( + stream, + name(), + slop, + maxExpansions, + prefixFieldName, + prefixFieldType::termLengthWithinBounds + ); } @Override @@ -545,8 +600,9 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { if (prefixFieldType != null && prefixFieldType.termLengthWithinBounds(value.length())) { return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixFieldType.name(), indexedValueForSearch(value))), name()); } else { - SpanMultiTermQueryWrapper spanMulti = - new SpanMultiTermQueryWrapper<>(new PrefixQuery(new Term(name(), indexedValueForSearch(value)))); + SpanMultiTermQueryWrapper spanMulti = new SpanMultiTermQueryWrapper<>( + new PrefixQuery(new Term(name(), indexedValueForSearch(value))) + ); spanMulti.setRewriteMethod(method); return spanMulti; } @@ -564,12 +620,14 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { private final Builder builder; - public SearchAsYouTypeFieldMapper(String simpleName, - SearchAsYouTypeFieldType mappedFieldType, - CopyTo copyTo, - PrefixFieldMapper prefixField, - ShingleFieldMapper[] shingleFields, - Builder builder) { + public SearchAsYouTypeFieldMapper( + String simpleName, + SearchAsYouTypeFieldType mappedFieldType, + CopyTo copyTo, + PrefixFieldMapper prefixField, + ShingleFieldMapper[] shingleFields, + Builder builder + ) { super(simpleName, mappedFieldType, MultiFields.empty(), copyTo); this.prefixField = prefixField; this.shingleFields = shingleFields; @@ -634,7 +692,8 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { List subIterators = new ArrayList<>(); subIterators.add(prefixField); subIterators.addAll(Arrays.asList(shingleFields)); - @SuppressWarnings("unchecked") Iterator concat = Iterators.concat(super.iterator(), subIterators.iterator()); + @SuppressWarnings("unchecked") + Iterator concat = Iterators.concat(super.iterator(), subIterators.iterator()); return concat; } @@ -649,9 +708,7 @@ public class SearchAsYouTypeFieldMapper extends ParametrizedFieldMapper { private final int shingleSize; private final boolean indexPrefixes; - private SearchAsYouTypeAnalyzer(Analyzer delegate, - int shingleSize, - boolean indexPrefixes) { + private SearchAsYouTypeAnalyzer(Analyzer delegate, int shingleSize, boolean indexPrefixes) { super(delegate.getReuseStrategy()); this.delegate = Objects.requireNonNull(delegate); diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java index 77aaf223f51..ed71ffa5158 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/TokenCountFieldMapper.java @@ -62,13 +62,20 @@ public class TokenCountFieldMapper extends ParametrizedFieldMapper { private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter store = Parameter.storeParam(m -> toType(m).store, false); - private final Parameter analyzer - = Parameter.analyzerParam("analyzer", true, m -> toType(m).analyzer, () -> null); + private final Parameter analyzer = Parameter.analyzerParam("analyzer", true, m -> toType(m).analyzer, () -> null); private final Parameter nullValue = new Parameter<>( - "null_value", false, () -> null, - (n, c, o) -> o == null ? null : nodeIntegerValue(o), m -> toType(m).nullValue).acceptsNull(); - private final Parameter enablePositionIncrements - = Parameter.boolParam("enable_position_increments", false, m -> toType(m).enablePositionIncrements, true); + "null_value", + false, + () -> null, + (n, c, o) -> o == null ? null : nodeIntegerValue(o), + m -> toType(m).nullValue + ).acceptsNull(); + private final Parameter enablePositionIncrements = Parameter.boolParam( + "enable_position_increments", + false, + m -> toType(m).enablePositionIncrements, + true + ); private final Parameter> meta = Parameter.metaParam(); @@ -92,15 +99,22 @@ public class TokenCountFieldMapper extends ParametrizedFieldMapper { store.getValue(), hasDocValues.getValue(), nullValue.getValue(), - meta.getValue()); + meta.getValue() + ); return new TokenCountFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this); } } static class TokenCountFieldType extends NumberFieldMapper.NumberFieldType { - TokenCountFieldType(String name, boolean isSearchable, boolean isStored, - boolean hasDocValues, Number nullValue, Map meta) { + TokenCountFieldType( + String name, + boolean isSearchable, + boolean isStored, + boolean hasDocValues, + Number nullValue, + Map meta + ) { super(name, NumberFieldMapper.NumberType.INTEGER, isSearchable, isStored, hasDocValues, false, nullValue, meta); } @@ -122,8 +136,13 @@ public class TokenCountFieldMapper extends ParametrizedFieldMapper { private final boolean enablePositionIncrements; private final Integer nullValue; - protected TokenCountFieldMapper(String simpleName, MappedFieldType defaultFieldType, - MultiFields multiFields, CopyTo copyTo, Builder builder) { + protected TokenCountFieldMapper( + String simpleName, + MappedFieldType defaultFieldType, + MultiFields multiFields, + CopyTo copyTo, + Builder builder + ) { super(simpleName, defaultFieldType, multiFields, copyTo); this.analyzer = builder.analyzer.getValue(); this.enablePositionIncrements = builder.enablePositionIncrements.getValue(); @@ -153,9 +172,7 @@ public class TokenCountFieldMapper extends ParametrizedFieldMapper { tokenCount = countPositions(analyzer, name(), value, enablePositionIncrements); } - context.doc().addAll( - NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, index, hasDocValues, store) - ); + context.doc().addAll(NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, index, hasDocValues, store)); } /** diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/query/RankFeatureQueryBuilder.java b/modules/mapper-extras/src/main/java/org/opensearch/index/query/RankFeatureQueryBuilder.java index be38eeb0065..9ad52c805da 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/query/RankFeatureQueryBuilder.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/query/RankFeatureQueryBuilder.java @@ -75,7 +75,9 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder PARSER = new ConstructingObjectParser<>( - "log", a -> new Log((Float) a[0])); + "log", + a -> new Log((Float) a[0]) + ); static { PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("scaling_factor")); } @@ -120,8 +122,10 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder PARSER = new ConstructingObjectParser<>( - "saturation", a -> new Saturation((Float) a[0])); + "saturation", + a -> new Saturation((Float) a[0]) + ); static { PARSER.declareFloat(ConstructingObjectParser.optionalConstructorArg(), new ParseField("pivot")); } @@ -205,7 +211,9 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder PARSER = new ConstructingObjectParser<>( - "sigmoid", a -> new Sigmoid((Float) a[0], ((Float) a[1]).floatValue())); + "sigmoid", + a -> new Sigmoid((Float) a[0], ((Float) a[1]).floatValue()) + ); static { PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("pivot")); PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("exponent")); @@ -229,8 +237,7 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder PARSER = new ObjectParser<>("linear", Linear::new); - public Linear() { - } + public Linear() {} private Linear(StreamInput in) { this(); @@ -308,53 +314,49 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder PARSER = new ConstructingObjectParser<>( - "feature", args -> { - final String field = (String) args[0]; - final float boost = args[1] == null ? DEFAULT_BOOST : (Float) args[1]; - final String queryName = (String) args[2]; - long numNonNulls = Arrays.stream(args, 3, args.length).filter(Objects::nonNull).count(); - final RankFeatureQueryBuilder query; - if (numNonNulls > 1) { - throw new IllegalArgumentException("Can only specify one of [log], [saturation], [sigmoid] and [linear]"); - } else if (numNonNulls == 0) { - query = new RankFeatureQueryBuilder(field, new ScoreFunction.Saturation()); - } else { - ScoreFunction scoreFunction = (ScoreFunction) Arrays.stream(args, 3, args.length) - .filter(Objects::nonNull) - .findAny() - .get(); - query = new RankFeatureQueryBuilder(field, scoreFunction); - } - query.boost(boost); - query.queryName(queryName); - return query; - }); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("feature", args -> { + final String field = (String) args[0]; + final float boost = args[1] == null ? DEFAULT_BOOST : (Float) args[1]; + final String queryName = (String) args[2]; + long numNonNulls = Arrays.stream(args, 3, args.length).filter(Objects::nonNull).count(); + final RankFeatureQueryBuilder query; + if (numNonNulls > 1) { + throw new IllegalArgumentException("Can only specify one of [log], [saturation], [sigmoid] and [linear]"); + } else if (numNonNulls == 0) { + query = new RankFeatureQueryBuilder(field, new ScoreFunction.Saturation()); + } else { + ScoreFunction scoreFunction = (ScoreFunction) Arrays.stream(args, 3, args.length).filter(Objects::nonNull).findAny().get(); + query = new RankFeatureQueryBuilder(field, scoreFunction); + } + query.boost(boost); + query.queryName(queryName); + return query; + }); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field")); PARSER.declareFloat(ConstructingObjectParser.optionalConstructorArg(), BOOST_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NAME_FIELD); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - ScoreFunction.Log.PARSER, new ParseField("log")); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - ScoreFunction.Saturation.PARSER, new ParseField("saturation")); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - ScoreFunction.Sigmoid.PARSER, new ParseField("sigmoid")); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - ScoreFunction.Linear.PARSER, new ParseField("linear")); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ScoreFunction.Log.PARSER, new ParseField("log")); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + ScoreFunction.Saturation.PARSER, + new ParseField("saturation") + ); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ScoreFunction.Sigmoid.PARSER, new ParseField("sigmoid")); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ScoreFunction.Linear.PARSER, new ParseField("linear")); } public static final String NAME = "rank_feature"; @@ -416,8 +418,12 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder mapper.parse(source(b -> b.field("field", Arrays.asList(10, 20)))) ); - assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [field] in the same document", - e.getCause().getMessage()); + assertEquals( + "[rank_feature] fields do not support indexing multiple values for the same field [field] in the same document", + e.getCause().getMessage() + ); e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> { b.startArray("foo"); @@ -151,7 +153,9 @@ public class RankFeatureFieldMapperTests extends MapperTestCase { } b.endArray(); }))); - assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document", - e.getCause().getMessage()); + assertEquals( + "[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document", + e.getCause().getMessage() + ); } } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java index b523511d3a8..46e71096ba3 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java @@ -61,9 +61,18 @@ public class RankFeatureMetaFieldMapperTests extends OpenSearchSingleNodeTestCas } public void testBasics() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject() - .endObject().endObject()); + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "rank_feature") + .endObject() + .endObject() + .endObject() + .endObject() + ); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -80,9 +89,12 @@ public class RankFeatureMetaFieldMapperTests extends OpenSearchSingleNodeTestCas DocumentMapper mapper = parser.parse("_doc", new CompressedXContent(mapping)); String rfMetaField = RankFeatureMetaFieldMapper.CONTENT_TYPE; BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject()); - MapperParsingException e = expectThrows(MapperParsingException.class, () -> - mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON))); + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)) + ); assertTrue( - e.getCause().getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document.")); + e.getCause().getMessage().contains("Field [" + rfMetaField + "] is a metadata field and cannot be added inside a document.") + ); } } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java index 15ae3b4ad93..129ba6b1262 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java @@ -109,8 +109,10 @@ public class RankFeaturesFieldMapperTests extends MapperTestCase { MapperParsingException.class, () -> mapper.parse(source(b -> b.startObject("field").field("foo", Arrays.asList(10, 20)).endObject())) ); - assertEquals("[rank_features] fields take hashes that map a feature to a strictly positive float, but got unexpected token " + - "START_ARRAY", e.getCause().getMessage()); + assertEquals( + "[rank_features] fields take hashes that map a feature to a strictly positive float, but got unexpected token " + "START_ARRAY", + e.getCause().getMessage() + ); e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> { b.startArray("foo"); @@ -120,7 +122,10 @@ public class RankFeaturesFieldMapperTests extends MapperTestCase { } b.endArray(); }))); - assertEquals("[rank_features] fields do not support indexing multiple values for the same rank feature [foo.field.bar] in " + - "the same document", e.getCause().getMessage()); + assertEquals( + "[rank_features] fields do not support indexing multiple values for the same rank feature [foo.field.bar] in " + + "the same document", + e.getCause().getMessage() + ); } } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java index 2888c04fb5a..b3db286d39d 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -68,21 +68,19 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { @Override protected void registerParameters(ParameterChecker checker) throws IOException { - checker.registerConflictCheck( - "scaling_factor", - fieldMapping(this::minimalMapping), - fieldMapping(b -> { - b.field("type", "scaled_float"); - b.field("scaling_factor", 5.0); - })); + checker.registerConflictCheck("scaling_factor", fieldMapping(this::minimalMapping), fieldMapping(b -> { + b.field("type", "scaled_float"); + b.field("scaling_factor", 5.0); + })); checker.registerConflictCheck("doc_values", b -> b.field("doc_values", false)); checker.registerConflictCheck("index", b -> b.field("index", false)); checker.registerConflictCheck("store", b -> b.field("store", true)); checker.registerConflictCheck("null_value", b -> b.field("null_value", 1)); - checker.registerUpdateCheck(b -> b.field("coerce", false), - m -> assertFalse(((ScaledFloatFieldMapper) m).coerce())); - checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), - m -> assertTrue(((ScaledFloatFieldMapper) m).ignoreMalformed())); + checker.registerUpdateCheck(b -> b.field("coerce", false), m -> assertFalse(((ScaledFloatFieldMapper) m).coerce())); + checker.registerUpdateCheck( + b -> b.field("ignore_malformed", true), + m -> assertTrue(((ScaledFloatFieldMapper) m).ignoreMalformed()) + ); } public void testExistsQueryDocValuesDisabled() throws IOException { @@ -133,12 +131,15 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { fieldMapping(b -> b.field("type", "scaled_float").field("index", false).field("scaling_factor", 10.0)) ); - ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapper.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), + XContentType.JSON + ) + ); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -152,12 +153,15 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { fieldMapping(b -> b.field("type", "scaled_float").field("doc_values", false).field("scaling_factor", 10.0)) ); - ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapper.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), + XContentType.JSON + ) + ); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -171,12 +175,15 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { fieldMapping(b -> b.field("type", "scaled_float").field("store", true).field("scaling_factor", 10.0)) ); - ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapper.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), + XContentType.JSON + ) + ); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(3, fields.length); @@ -192,12 +199,15 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { public void testCoerce() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapper.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()), + XContentType.JSON + ) + ); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); IndexableField pointField = fields[0]; @@ -209,12 +219,15 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { DocumentMapper mapper2 = createDocumentMapper( fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0).field("coerce", false)) ); - ThrowingRunnable runnable = () -> mapper2.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject()), - XContentType.JSON)); + ThrowingRunnable runnable = () -> mapper2.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()), + XContentType.JSON + ) + ); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("passed as String")); } @@ -230,24 +243,30 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", value) - .endObject()), - XContentType.JSON)); + ThrowingRunnable runnable = () -> mapper.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), + XContentType.JSON + ) + ); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains)); DocumentMapper mapper2 = createDocumentMapper( fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0).field("ignore_malformed", true)) ); - ParsedDocument doc = mapper2.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", value) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapper2.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), + XContentType.JSON + ) + ); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); @@ -255,23 +274,29 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { public void testNullValue() throws IOException { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapper.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()), + XContentType.JSON + ) + ); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "scaled_float") - .field("scaling_factor", 10.0) - .field("null_value", 2.5))); - doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject()), - XContentType.JSON)); + mapper = createDocumentMapper( + fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0).field("null_value", 2.5)) + ); + doc = mapper.parse( + new SourceToParse( + "test", + "_doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()), + XContentType.JSON + ) + ); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); IndexableField pointField = fields[0]; @@ -291,8 +316,7 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase { MapperParsingException.class, () -> createMapperService(fieldMapping(b -> b.field("type", "scaled_float").field("index_options", randomIndexOptions()))) ); - assertThat(e.getMessage(), - containsString("Failed to parse mapping [_doc]: Field [scaling_factor] is required")); + assertThat(e.getMessage(), containsString("Failed to parse mapping [_doc]: Field [scaling_factor] is required")); assertWarnings("Parameter [index_options] has no effect on type [scaled_float] and will be removed in future"); } } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java index af480240fa1..0baf3db32a4 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -57,23 +57,25 @@ import java.util.Collections; public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft - = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 0.1 + randomDouble() * 100); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( + "scaled_float", + 0.1 + randomDouble() * 100 + ); double value = (randomDouble() * 2 - 1) * 10000; long scaledValue = Math.round(value * ft.getScalingFactor()); assertEquals(LongPoint.newExactQuery("scaled_float", scaledValue), ft.termQuery(value, null)); } public void testTermsQuery() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft - = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 0.1 + randomDouble() * 100); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( + "scaled_float", + 0.1 + randomDouble() * 100 + ); double value1 = (randomDouble() * 2 - 1) * 10000; long scaledValue1 = Math.round(value1 * ft.getScalingFactor()); double value2 = (randomDouble() * 2 - 1) * 10000; long scaledValue2 = Math.round(value2 * ft.getScalingFactor()); - assertEquals( - LongPoint.newSetQuery("scaled_float", scaledValue1, scaledValue2), - ft.termsQuery(Arrays.asList(value1, value2), null)); + assertEquals(LongPoint.newSetQuery("scaled_float", scaledValue1, scaledValue2), ft.termsQuery(Arrays.asList(value1, value2), null)); } public void testRangeQuery() throws IOException { @@ -81,7 +83,14 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { // this test checks that searching scaled floats yields the same results as // searching doubles that are rounded to the closest half float ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( - "scaled_float", true, false, false, Collections.emptyMap(), 0.1 + randomDouble() * 100, null); + "scaled_float", + true, + false, + false, + Collections.emptyMap(), + 0.1 + randomDouble() * 100, + null + ); Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); final int numDocs = 1000; @@ -111,8 +120,7 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { } public void testRoundsUpperBoundCorrectly() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft - = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(null, 0.1, true, false, MOCK_QSC); assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); scaledFloatQ = ft.rangeQuery(null, 0.1, true, true, MOCK_QSC); @@ -130,8 +138,7 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { } public void testRoundsLowerBoundCorrectly() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft - = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(-0.1, null, false, true, MOCK_QSC); assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); scaledFloatQ = ft.rangeQuery(-0.1, null, true, true, MOCK_QSC); @@ -147,10 +154,12 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { } public void testValueForSearch() { - ScaledFloatFieldMapper.ScaledFloatFieldType ft - = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 0.1 + randomDouble() * 100); + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType( + "scaled_float", + 0.1 + randomDouble() * 100 + ); assertNull(ft.valueForDisplay(null)); - assertEquals(10/ft.getScalingFactor(), ft.valueForDisplay(10L)); + assertEquals(10 / ft.getScalingFactor(), ft.valueForDisplay(10L)); } public void testFieldData() throws IOException { @@ -164,30 +173,34 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { w.addDocument(doc); try (DirectoryReader reader = DirectoryReader.open(w)) { // single-valued - ScaledFloatFieldMapper.ScaledFloatFieldType f1 - = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float1", scalingFactor); - IndexNumericFieldData fielddata = (IndexNumericFieldData) f1.fielddataBuilder("index", () -> { - throw new UnsupportedOperationException(); - }).build(null, null); + ScaledFloatFieldMapper.ScaledFloatFieldType f1 = new ScaledFloatFieldMapper.ScaledFloatFieldType( + "scaled_float1", + scalingFactor + ); + IndexNumericFieldData fielddata = (IndexNumericFieldData) f1.fielddataBuilder( + "index", + () -> { throw new UnsupportedOperationException(); } + ).build(null, null); assertEquals(fielddata.getNumericType(), IndexNumericFieldData.NumericType.DOUBLE); LeafNumericFieldData leafFieldData = fielddata.load(reader.leaves().get(0)); SortedNumericDoubleValues values = leafFieldData.getDoubleValues(); assertTrue(values.advanceExact(0)); assertEquals(1, values.docValueCount()); - assertEquals(10/f1.getScalingFactor(), values.nextValue(), 10e-5); + assertEquals(10 / f1.getScalingFactor(), values.nextValue(), 10e-5); // multi-valued - ScaledFloatFieldMapper.ScaledFloatFieldType f2 - = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float2", scalingFactor); - fielddata = (IndexNumericFieldData) f2.fielddataBuilder("index", () -> { - throw new UnsupportedOperationException(); - }).build(null, null); + ScaledFloatFieldMapper.ScaledFloatFieldType f2 = new ScaledFloatFieldMapper.ScaledFloatFieldType( + "scaled_float2", + scalingFactor + ); + fielddata = (IndexNumericFieldData) f2.fielddataBuilder("index", () -> { throw new UnsupportedOperationException(); }) + .build(null, null); leafFieldData = fielddata.load(reader.leaves().get(0)); values = leafFieldData.getDoubleValues(); assertTrue(values.advanceExact(0)); assertEquals(2, values.docValueCount()); - assertEquals(5/f2.getScalingFactor(), values.nextValue(), 10e-5); - assertEquals(12/f2.getScalingFactor(), values.nextValue(), 10e-5); + assertEquals(5 / f2.getScalingFactor(), values.nextValue(), 10e-5); + assertEquals(12 / f2.getScalingFactor(), values.nextValue(), 10e-5); } IOUtils.close(w, dir); } @@ -196,16 +209,12 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath()); - MappedFieldType mapper = new ScaledFloatFieldMapper.Builder("field", false, false) - .scalingFactor(100) - .build(context) - .fieldType(); + MappedFieldType mapper = new ScaledFloatFieldMapper.Builder("field", false, false).scalingFactor(100).build(context).fieldType(); assertEquals(Collections.singletonList(3.14), fetchSourceValue(mapper, 3.1415926)); assertEquals(Collections.singletonList(3.14), fetchSourceValue(mapper, "3.1415")); assertEquals(Collections.emptyList(), fetchSourceValue(mapper, "")); - MappedFieldType nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false) - .scalingFactor(100) + MappedFieldType nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false).scalingFactor(100) .nullValue(2.71) .build(context) .fieldType(); diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeAnalyzerTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeAnalyzerTests.java index d2e04a212dd..053c12ad666 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeAnalyzerTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeAnalyzerTests.java @@ -54,7 +54,7 @@ public class SearchAsYouTypeAnalyzerTests extends OpenSearchTestCase { private static final Analyzer SIMPLE = new SimpleAnalyzer(); - public static List analyze(SearchAsYouTypeAnalyzer analyzer, String text) throws IOException { + public static List analyze(SearchAsYouTypeAnalyzer analyzer, String text) throws IOException { final List tokens = new ArrayList<>(); try (TokenStream tokenStream = analyzer.tokenStream("field", text)) { final CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class); @@ -66,9 +66,11 @@ public class SearchAsYouTypeAnalyzerTests extends OpenSearchTestCase { return tokens; } - private void testCase(String text, - Function analyzerFunction, - Function> expectedTokensFunction) throws IOException { + private void testCase( + String text, + Function analyzerFunction, + Function> expectedTokensFunction + ) throws IOException { for (int shingleSize = 2; shingleSize <= 4; shingleSize++) { final SearchAsYouTypeAnalyzer analyzer = analyzerFunction.apply(shingleSize); @@ -79,114 +81,178 @@ public class SearchAsYouTypeAnalyzerTests extends OpenSearchTestCase { } public void testSingleTermShingles() throws IOException { - testCase( - "quick", - shingleSize -> SearchAsYouTypeAnalyzer.withShingle(SIMPLE, shingleSize), - shingleSize -> emptyList() - ); + testCase("quick", shingleSize -> SearchAsYouTypeAnalyzer.withShingle(SIMPLE, shingleSize), shingleSize -> emptyList()); } public void testMultiTermShingles() throws IOException { - testCase( - "quick brown fox jump lazy", - shingleSize -> SearchAsYouTypeAnalyzer.withShingle(SIMPLE, shingleSize), - shingleSize -> { - if (shingleSize == 2) { - return asList("quick brown", "brown fox", "fox jump", "jump lazy"); - } else if (shingleSize == 3) { - return asList("quick brown fox", "brown fox jump", "fox jump lazy"); - } else if (shingleSize == 4) { - return asList("quick brown fox jump", "brown fox jump lazy"); - } - throw new IllegalArgumentException(); + testCase("quick brown fox jump lazy", shingleSize -> SearchAsYouTypeAnalyzer.withShingle(SIMPLE, shingleSize), shingleSize -> { + if (shingleSize == 2) { + return asList("quick brown", "brown fox", "fox jump", "jump lazy"); + } else if (shingleSize == 3) { + return asList("quick brown fox", "brown fox jump", "fox jump lazy"); + } else if (shingleSize == 4) { + return asList("quick brown fox jump", "brown fox jump lazy"); } - ); + throw new IllegalArgumentException(); + }); } public void testSingleTermPrefix() throws IOException { - testCase( - "quick", - shingleSize -> SearchAsYouTypeAnalyzer.withShingleAndPrefix(SIMPLE, shingleSize), - shingleSize -> { - final List tokens = new ArrayList<>(asList("q", "qu", "qui", "quic", "quick")); - tokens.addAll(tokenWithSpaces("quick", shingleSize)); - return tokens; - } - ); + testCase("quick", shingleSize -> SearchAsYouTypeAnalyzer.withShingleAndPrefix(SIMPLE, shingleSize), shingleSize -> { + final List tokens = new ArrayList<>(asList("q", "qu", "qui", "quic", "quick")); + tokens.addAll(tokenWithSpaces("quick", shingleSize)); + return tokens; + }); } public void testMultiTermPrefix() throws IOException { testCase( - //"quick red fox lazy brown", + // "quick red fox lazy brown", "quick brown fox jump lazy", shingleSize -> SearchAsYouTypeAnalyzer.withShingleAndPrefix(SIMPLE, shingleSize), shingleSize -> { if (shingleSize == 2) { final List tokens = new ArrayList<>(); - tokens.addAll(asList( - "q", "qu", "qui", "quic", "quick", "quick ", "quick b", "quick br", "quick bro", "quick brow", "quick brown" - )); - tokens.addAll(asList( - "b", "br", "bro", "brow", "brown", "brown ", "brown f", "brown fo", "brown fox" - )); - tokens.addAll(asList( - "f", "fo", "fox", "fox ", "fox j", "fox ju", "fox jum", "fox jump" - )); - tokens.addAll(asList( - "j", "ju", "jum", "jump", "jump ", "jump l", "jump la", "jump laz", "jump lazy" - )); - tokens.addAll(asList( - "l", "la", "laz", "lazy" - )); + tokens.addAll( + asList("q", "qu", "qui", "quic", "quick", "quick ", "quick b", "quick br", "quick bro", "quick brow", "quick brown") + ); + tokens.addAll(asList("b", "br", "bro", "brow", "brown", "brown ", "brown f", "brown fo", "brown fox")); + tokens.addAll(asList("f", "fo", "fox", "fox ", "fox j", "fox ju", "fox jum", "fox jump")); + tokens.addAll(asList("j", "ju", "jum", "jump", "jump ", "jump l", "jump la", "jump laz", "jump lazy")); + tokens.addAll(asList("l", "la", "laz", "lazy")); tokens.addAll(tokenWithSpaces("lazy", shingleSize)); return tokens; } else if (shingleSize == 3) { final List tokens = new ArrayList<>(); - tokens.addAll(asList( - "q", "qu", "qui", "quic", "quick", "quick ", "quick b", "quick br", "quick bro", "quick brow", "quick brown", - "quick brown ", "quick brown f", "quick brown fo", "quick brown fox" - )); - tokens.addAll(asList( - "b", "br", "bro", "brow", "brown", "brown ", "brown f", "brown fo", "brown fox", "brown fox ", "brown fox j", - "brown fox ju", "brown fox jum", "brown fox jump" - )); - tokens.addAll(asList( - "f", "fo", "fox", "fox ", "fox j", "fox ju", "fox jum", "fox jump", "fox jump ", "fox jump l", "fox jump la", - "fox jump laz", "fox jump lazy" - )); - tokens.addAll(asList( - "j", "ju", "jum", "jump", "jump ", "jump l", "jump la", "jump laz", "jump lazy" - )); + tokens.addAll( + asList( + "q", + "qu", + "qui", + "quic", + "quick", + "quick ", + "quick b", + "quick br", + "quick bro", + "quick brow", + "quick brown", + "quick brown ", + "quick brown f", + "quick brown fo", + "quick brown fox" + ) + ); + tokens.addAll( + asList( + "b", + "br", + "bro", + "brow", + "brown", + "brown ", + "brown f", + "brown fo", + "brown fox", + "brown fox ", + "brown fox j", + "brown fox ju", + "brown fox jum", + "brown fox jump" + ) + ); + tokens.addAll( + asList( + "f", + "fo", + "fox", + "fox ", + "fox j", + "fox ju", + "fox jum", + "fox jump", + "fox jump ", + "fox jump l", + "fox jump la", + "fox jump laz", + "fox jump lazy" + ) + ); + tokens.addAll(asList("j", "ju", "jum", "jump", "jump ", "jump l", "jump la", "jump laz", "jump lazy")); tokens.addAll(tokenWithSpaces("jump lazy", shingleSize - 1)); - tokens.addAll(asList( - "l", "la", "laz", "lazy" - )); + tokens.addAll(asList("l", "la", "laz", "lazy")); tokens.addAll(tokenWithSpaces("lazy", shingleSize)); return tokens; } else if (shingleSize == 4) { final List tokens = new ArrayList<>(); - tokens.addAll(asList( - "q", "qu", "qui", "quic", "quick", "quick ", "quick b", "quick br", "quick bro", "quick brow", "quick brown", - "quick brown ", "quick brown f", "quick brown fo", "quick brown fox", "quick brown fox ", "quick brown fox j", - "quick brown fox ju", "quick brown fox jum", "quick brown fox jump" - )); - tokens.addAll(asList( - "b", "br", "bro", "brow", "brown", "brown ", "brown f", "brown fo", "brown fox", "brown fox ", "brown fox j", - "brown fox ju", "brown fox jum", "brown fox jump", "brown fox jump ", "brown fox jump l", "brown fox jump la", - "brown fox jump laz", "brown fox jump lazy" - )); - tokens.addAll(asList( - "f", "fo", "fox", "fox ", "fox j", "fox ju", "fox jum", "fox jump", "fox jump ", "fox jump l", "fox jump la", - "fox jump laz", "fox jump lazy" - )); + tokens.addAll( + asList( + "q", + "qu", + "qui", + "quic", + "quick", + "quick ", + "quick b", + "quick br", + "quick bro", + "quick brow", + "quick brown", + "quick brown ", + "quick brown f", + "quick brown fo", + "quick brown fox", + "quick brown fox ", + "quick brown fox j", + "quick brown fox ju", + "quick brown fox jum", + "quick brown fox jump" + ) + ); + tokens.addAll( + asList( + "b", + "br", + "bro", + "brow", + "brown", + "brown ", + "brown f", + "brown fo", + "brown fox", + "brown fox ", + "brown fox j", + "brown fox ju", + "brown fox jum", + "brown fox jump", + "brown fox jump ", + "brown fox jump l", + "brown fox jump la", + "brown fox jump laz", + "brown fox jump lazy" + ) + ); + tokens.addAll( + asList( + "f", + "fo", + "fox", + "fox ", + "fox j", + "fox ju", + "fox jum", + "fox jump", + "fox jump ", + "fox jump l", + "fox jump la", + "fox jump laz", + "fox jump lazy" + ) + ); tokens.addAll(tokenWithSpaces("fox jump lazy", shingleSize - 2)); - tokens.addAll(asList( - "j", "ju", "jum", "jump", "jump ", "jump l", "jump la", "jump laz", "jump lazy" - )); + tokens.addAll(asList("j", "ju", "jum", "jump", "jump ", "jump l", "jump la", "jump laz", "jump lazy")); tokens.addAll(tokenWithSpaces("jump lazy", shingleSize - 1)); - tokens.addAll(asList( - "l", "la", "laz", "lazy" - )); + tokens.addAll(asList("l", "la", "laz", "lazy")); tokens.addAll(tokenWithSpaces("lazy", shingleSize)); return tokens; } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java index 0730faa5e56..037b486df95 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java @@ -100,38 +100,30 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { checker.registerConflictCheck("term_vector", b -> b.field("term_vector", "yes")); // norms can be set from true to false, but not vice versa - checker.registerConflictCheck("norms", - fieldMapping(b -> { - b.field("type", "text"); - b.field("norms", false); - }), - fieldMapping(b -> { - b.field("type", "text"); - b.field("norms", true); - })); - checker.registerUpdateCheck( - b -> { - b.field("type", "search_as_you_type"); - b.field("norms", true); - }, - b -> { - b.field("type", "search_as_you_type"); - b.field("norms", false); - }, - m -> assertFalse(m.fieldType().getTextSearchInfo().hasNorms()) - ); + checker.registerConflictCheck("norms", fieldMapping(b -> { + b.field("type", "text"); + b.field("norms", false); + }), fieldMapping(b -> { + b.field("type", "text"); + b.field("norms", true); + })); + checker.registerUpdateCheck(b -> { + b.field("type", "search_as_you_type"); + b.field("norms", true); + }, b -> { + b.field("type", "search_as_you_type"); + b.field("norms", false); + }, m -> assertFalse(m.fieldType().getTextSearchInfo().hasNorms())); checker.registerUpdateCheck(b -> { - b.field("analyzer", "default"); - b.field("search_analyzer", "keyword"); - }, - m -> assertEquals("keyword", m.fieldType().getTextSearchInfo().getSearchAnalyzer().name())); + b.field("analyzer", "default"); + b.field("search_analyzer", "keyword"); + }, m -> assertEquals("keyword", m.fieldType().getTextSearchInfo().getSearchAnalyzer().name())); checker.registerUpdateCheck(b -> { - b.field("analyzer", "default"); - b.field("search_analyzer", "keyword"); - b.field("search_quote_analyzer", "keyword"); - }, - m -> assertEquals("keyword", m.fieldType().getTextSearchInfo().getSearchQuoteAnalyzer().name())); + b.field("analyzer", "default"); + b.field("search_analyzer", "keyword"); + b.field("search_quote_analyzer", "keyword"); + }, m -> assertEquals("keyword", m.fieldType().getTextSearchInfo().getSearchQuoteAnalyzer().name())); } @@ -158,7 +150,16 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { NamedAnalyzer whitespace = new NamedAnalyzer("whitespace", AnalyzerScope.INDEX, new WhitespaceAnalyzer()); return new IndexAnalyzers( org.opensearch.common.collect.Map.of( - "default", dflt, "standard", standard, "keyword", keyword, "simple", simple, "whitespace", whitespace + "default", + dflt, + "standard", + standard, + "keyword", + keyword, + "simple", + simple, + "whitespace", + whitespace ), org.opensearch.common.collect.Map.of(), org.opensearch.common.collect.Map.of() @@ -179,14 +180,14 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { public void testIndexing() throws IOException { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "new york city"))); - for (String field : new String[] { "field", "field._index_prefix", "field._2gram", "field._3gram"}) { + for (String field : new String[] { "field", "field._index_prefix", "field._2gram", "field._3gram" }) { IndexableField[] fields = doc.rootDoc().getFields(field); assertEquals(1, fields.length); assertEquals("new york city", fields[0].stringValue()); } } - public void testDefaultConfiguration() throws IOException { + public void testDefaultConfiguration() throws IOException { DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); SearchAsYouTypeFieldMapper rootMapper = getRootFieldMapper(defaultMapper, "field"); assertRootFieldMapper(rootMapper, 3, "default"); @@ -195,10 +196,18 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { assertPrefixFieldType(prefixFieldMapper.fieldType(), 3, "default"); assertShingleFieldType( - getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), 2, "default", prefixFieldMapper.fieldType()); + getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), + 2, + "default", + prefixFieldMapper.fieldType() + ); assertShingleFieldType( - getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), 3, "default", prefixFieldMapper.fieldType()); - } + getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), + 3, + "default", + prefixFieldMapper.fieldType() + ); + } public void testConfiguration() throws IOException { int maxShingleSize = 4; @@ -216,11 +225,23 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { assertPrefixFieldType(prefixFieldMapper.fieldType(), maxShingleSize, analyzerName); assertShingleFieldType( - getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), 2, analyzerName, prefixFieldMapper.fieldType()); + getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), + 2, + analyzerName, + prefixFieldMapper.fieldType() + ); assertShingleFieldType( - getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), 3, analyzerName, prefixFieldMapper.fieldType()); + getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), + 3, + analyzerName, + prefixFieldMapper.fieldType() + ); assertShingleFieldType( - getShingleFieldMapper(defaultMapper, "field._4gram").fieldType(), 4, analyzerName, prefixFieldMapper.fieldType()); + getShingleFieldMapper(defaultMapper, "field._4gram").fieldType(), + 4, + analyzerName, + prefixFieldMapper.fieldType() + ); } public void testSimpleMerge() throws IOException { @@ -289,15 +310,23 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { fieldMapping(b -> b.field("type", "search_as_you_type").field("index_options", "offsets")) ); - assertThat(getRootFieldMapper(mapper, "field").fieldType().fieldType.indexOptions(), - equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)); + assertThat( + getRootFieldMapper(mapper, "field").fieldType().fieldType.indexOptions(), + equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) + ); Stream.of( getPrefixFieldMapper(mapper, "field._index_prefix"), getShingleFieldMapper(mapper, "field._2gram"), getShingleFieldMapper(mapper, "field._3gram") - ).forEach(m -> assertThat("for " + m.name(), - m.fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS))); + ) + .forEach( + m -> assertThat( + "for " + m.name(), + m.fieldType.indexOptions(), + equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) + ) + ); } public void testStore() throws IOException { @@ -327,10 +356,8 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { assertTrue(getRootFieldMapper(mapper, "field").fieldType().fieldType.storeTermVectors()); - Stream.of( - getShingleFieldMapper(mapper, "field._2gram"), - getShingleFieldMapper(mapper, "field._3gram") - ).forEach(m -> assertTrue("for " + m.name(), m.fieldType.storeTermVectors())); + Stream.of(getShingleFieldMapper(mapper, "field._2gram"), getShingleFieldMapper(mapper, "field._3gram")) + .forEach(m -> assertTrue("for " + m.name(), m.fieldType.storeTermVectors())); PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(mapper, "field._index_prefix"); assertFalse(prefixFieldMapper.fieldType.storeTermVectors()); @@ -399,10 +426,11 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { { Query q = new MatchPhrasePrefixQueryBuilder("field", "more than three words").toQuery(queryShardContext); - Query expected = new SpanNearQuery.Builder("field._3gram", true) - .addClause(new SpanTermQuery(new Term("field._3gram", "more than three"))) - .addClause(new FieldMaskingSpanQuery( - new SpanTermQuery(new Term("field._index_prefix", "than three words")), "field._3gram") + Query expected = new SpanNearQuery.Builder("field._3gram", true).addClause( + new SpanTermQuery(new Term("field._3gram", "more than three")) + ) + .addClause( + new FieldMaskingSpanQuery(new SpanTermQuery(new Term("field._index_prefix", "than three words")), "field._3gram") ) .build(); assertThat(q, equalTo(expected)); @@ -410,10 +438,11 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { { Query q = new MatchPhrasePrefixQueryBuilder("field._3gram", "more than three words").toQuery(queryShardContext); - Query expected = new SpanNearQuery.Builder("field._3gram", true) - .addClause(new SpanTermQuery(new Term("field._3gram", "more than three"))) - .addClause(new FieldMaskingSpanQuery( - new SpanTermQuery(new Term("field._index_prefix", "than three words")), "field._3gram") + Query expected = new SpanNearQuery.Builder("field._3gram", true).addClause( + new SpanTermQuery(new Term("field._3gram", "more than three")) + ) + .addClause( + new FieldMaskingSpanQuery(new SpanTermQuery(new Term("field._index_prefix", "than three words")), "field._3gram") ) .build(); assertThat(q, equalTo(expected)); @@ -426,9 +455,7 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { } { - Query actual = new MatchPhrasePrefixQueryBuilder("field._3gram", "one two three four") - .slop(1) - .toQuery(queryShardContext); + Query actual = new MatchPhrasePrefixQueryBuilder("field._3gram", "one two three four").slop(1).toQuery(queryShardContext); MultiPhrasePrefixQuery expected = new MultiPhrasePrefixQuery("field._3gram"); expected.setSlop(1); expected.add(new Term("field._3gram", "one two three")); @@ -441,89 +468,70 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { public void testMatchPhrase() throws IOException { QueryShardContext queryShardContext = createQueryShardContext(createMapperService(fieldMapping(this::minimalMapping))); { - Query actual = new MatchPhraseQueryBuilder("field", "one") - .toQuery(queryShardContext); + Query actual = new MatchPhraseQueryBuilder("field", "one").toQuery(queryShardContext); Query expected = new TermQuery(new Term("field", "one")); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field", "one two") - .toQuery(queryShardContext); - Query expected = new MultiPhraseQuery.Builder() - .add(new Term("field._2gram", "one two")) - .build(); + Query actual = new MatchPhraseQueryBuilder("field", "one two").toQuery(queryShardContext); + Query expected = new MultiPhraseQuery.Builder().add(new Term("field._2gram", "one two")).build(); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field", "one two three") - .toQuery(queryShardContext); - Query expected = new MultiPhraseQuery.Builder() - .add(new Term("field._3gram", "one two three")) - .build(); + Query actual = new MatchPhraseQueryBuilder("field", "one two three").toQuery(queryShardContext); + Query expected = new MultiPhraseQuery.Builder().add(new Term("field._3gram", "one two three")).build(); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field", "one two three four") - .toQuery(queryShardContext); - Query expected = new MultiPhraseQuery.Builder() - .add(new Term("field._3gram", "one two three")) + Query actual = new MatchPhraseQueryBuilder("field", "one two three four").toQuery(queryShardContext); + Query expected = new MultiPhraseQuery.Builder().add(new Term("field._3gram", "one two three")) .add(new Term("field._3gram", "two three four")) .build(); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field", "one two") - .slop(1) - .toQuery(queryShardContext); - Query expected = new MultiPhraseQuery.Builder() - .add(new Term("field", "one")) - .add(new Term("field", "two")) - .setSlop(1) - .build(); + Query actual = new MatchPhraseQueryBuilder("field", "one two").slop(1).toQuery(queryShardContext); + Query expected = new MultiPhraseQuery.Builder().add(new Term("field", "one")).add(new Term("field", "two")).setSlop(1).build(); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field._2gram", "one two") - .toQuery(queryShardContext); + Query actual = new MatchPhraseQueryBuilder("field._2gram", "one two").toQuery(queryShardContext); Query expected = new TermQuery(new Term("field._2gram", "one two")); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field._2gram", "one two three") - .toQuery(queryShardContext); - Query expected = new MultiPhraseQuery.Builder() - .add(new Term("field._2gram", "one two")) + Query actual = new MatchPhraseQueryBuilder("field._2gram", "one two three").toQuery(queryShardContext); + Query expected = new MultiPhraseQuery.Builder().add(new Term("field._2gram", "one two")) .add(new Term("field._2gram", "two three")) .build(); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field._3gram", "one two three") - .toQuery(queryShardContext); + Query actual = new MatchPhraseQueryBuilder("field._3gram", "one two three").toQuery(queryShardContext); Query expected = new TermQuery(new Term("field._3gram", "one two three")); assertThat(actual, equalTo(expected)); } { - Query actual = new MatchPhraseQueryBuilder("field._3gram", "one two three four") - .toQuery(queryShardContext); - Query expected = new MultiPhraseQuery.Builder() - .add(new Term("field._3gram", "one two three")) + Query actual = new MatchPhraseQueryBuilder("field._3gram", "one two three four").toQuery(queryShardContext); + Query expected = new MultiPhraseQuery.Builder().add(new Term("field._3gram", "one two three")) .add(new Term("field._3gram", "two three four")) .build(); assertThat(actual, equalTo(expected)); } { - expectThrows(IllegalArgumentException.class, - () -> new MatchPhraseQueryBuilder("field._index_prefix", "one two three four").toQuery(queryShardContext)); + expectThrows( + IllegalArgumentException.class, + () -> new MatchPhraseQueryBuilder("field._index_prefix", "one two three four").toQuery(queryShardContext) + ); } } @@ -534,8 +542,9 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { builder.add(new BooleanClause(new TermQuery(new Term(shingleFieldName, term)), BooleanClause.Occur.SHOULD)); } final String finalTerm = terms.get(terms.size() - 1); - builder.add(new BooleanClause( - new ConstantScoreQuery(new TermQuery(new Term(prefixFieldName, finalTerm))), BooleanClause.Occur.SHOULD)); + builder.add( + new BooleanClause(new ConstantScoreQuery(new TermQuery(new Term(prefixFieldName, finalTerm))), BooleanClause.Occur.SHOULD) + ); return builder.build(); } @@ -557,15 +566,27 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { assertThat(actual, instanceOf(DisjunctionMaxQuery.class)); final DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) actual; assertThat(disMaxQuery.getDisjuncts(), hasSize(4)); - assertThat(disMaxQuery.getDisjuncts(), containsInAnyOrder( - buildBoolPrefixQuery( - "field", "field._index_prefix", asList("quick", "brown", "fox", "jump", "lazy", "dog")), - buildBoolPrefixQuery("field._2gram", "field._index_prefix", - asList("quick brown", "brown fox", "fox jump", "jump lazy", "lazy dog")), - buildBoolPrefixQuery("field._3gram", "field._index_prefix", - asList("quick brown fox", "brown fox jump", "fox jump lazy", "jump lazy dog")), - buildBoolPrefixQuery("field._4gram", "field._index_prefix", - asList("quick brown fox jump", "brown fox jump lazy", "fox jump lazy dog")))); + assertThat( + disMaxQuery.getDisjuncts(), + containsInAnyOrder( + buildBoolPrefixQuery("field", "field._index_prefix", asList("quick", "brown", "fox", "jump", "lazy", "dog")), + buildBoolPrefixQuery( + "field._2gram", + "field._index_prefix", + asList("quick brown", "brown fox", "fox jump", "jump lazy", "lazy dog") + ), + buildBoolPrefixQuery( + "field._3gram", + "field._index_prefix", + asList("quick brown fox", "brown fox jump", "fox jump lazy", "jump lazy dog") + ), + buildBoolPrefixQuery( + "field._4gram", + "field._index_prefix", + asList("quick brown fox jump", "brown fox jump lazy", "fox jump lazy dog") + ) + ) + ); } public void testAnalyzerSerialization() throws IOException { @@ -574,9 +595,11 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { b.field("analyzer", "simple"); })); String serialized = Strings.toString(ms.documentMapper()); - assertEquals(serialized, - "{\"_doc\":{\"properties\":{\"field\":" + - "{\"type\":\"search_as_you_type\",\"doc_values\":false,\"max_shingle_size\":3,\"analyzer\":\"simple\"}}}}"); + assertEquals( + serialized, + "{\"_doc\":{\"properties\":{\"field\":" + + "{\"type\":\"search_as_you_type\",\"doc_values\":false,\"max_shingle_size\":3,\"analyzer\":\"simple\"}}}}" + ); merge(ms, mapping(b -> {})); assertEquals(serialized, Strings.toString(ms.documentMapper())); @@ -596,15 +619,13 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { IndexableField[] prefixFields = parsedDocument.rootDoc().getFields("field._index_prefix"); IndexableField[] shingle2Fields = parsedDocument.rootDoc().getFields("field._2gram"); IndexableField[] shingle3Fields = parsedDocument.rootDoc().getFields("field._3gram"); - for (IndexableField[] fields : new IndexableField[][]{rootFields, prefixFields, shingle2Fields, shingle3Fields}) { + for (IndexableField[] fields : new IndexableField[][] { rootFields, prefixFields, shingle2Fields, shingle3Fields }) { Set expectedValues = Arrays.stream(fields).map(IndexableField::stringValue).collect(Collectors.toSet()); assertThat(values, equalTo(expectedValues)); } } - private static void assertRootFieldMapper(SearchAsYouTypeFieldMapper mapper, - int maxShingleSize, - String analyzerName) { + private static void assertRootFieldMapper(SearchAsYouTypeFieldMapper mapper, int maxShingleSize, String analyzerName) { assertThat(mapper.maxShingleSize(), equalTo(maxShingleSize)); assertThat(mapper.fieldType(), notNullValue()); @@ -614,7 +635,6 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { assertThat(mapper.prefixField().fieldType().parentField, equalTo(mapper.name())); assertPrefixFieldType(mapper.prefixField().fieldType(), maxShingleSize, analyzerName); - for (int shingleSize = 2; shingleSize <= maxShingleSize; shingleSize++) { final ShingleFieldMapper shingleFieldMapper = mapper.shingleFields()[shingleSize - 2]; assertThat(shingleFieldMapper, notNullValue()); @@ -625,9 +645,12 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { assertThat(mapper.shingleFields().length, equalTo(numberOfShingleSubfields)); } - private static void assertSearchAsYouTypeFieldType(SearchAsYouTypeFieldType fieldType, int maxShingleSize, - String analyzerName, - PrefixFieldType prefixFieldType) { + private static void assertSearchAsYouTypeFieldType( + SearchAsYouTypeFieldType fieldType, + int maxShingleSize, + String analyzerName, + PrefixFieldType prefixFieldType + ) { assertThat(fieldType.shingleFields.length, equalTo(maxShingleSize - 1)); for (NamedAnalyzer analyzer : asList(fieldType.indexAnalyzer(), fieldType.getTextSearchInfo().getSearchAnalyzer())) { @@ -641,10 +664,12 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { assertThat(fieldType.prefixField, equalTo(prefixFieldType)); } - private static void assertShingleFieldType(ShingleFieldType fieldType, - int shingleSize, - String analyzerName, - PrefixFieldType prefixFieldType) { + private static void assertShingleFieldType( + ShingleFieldType fieldType, + int shingleSize, + String analyzerName, + PrefixFieldType prefixFieldType + ) { assertThat(fieldType.shingleSize, equalTo(shingleSize)); @@ -667,8 +692,9 @@ public class SearchAsYouTypeFieldMapperTests extends MapperTestCase { } final SearchAsYouTypeAnalyzer wrappedIndexAnalyzer = (SearchAsYouTypeAnalyzer) fieldType.indexAnalyzer().analyzer(); - final SearchAsYouTypeAnalyzer wrappedSearchAnalyzer - = (SearchAsYouTypeAnalyzer) fieldType.getTextSearchInfo().getSearchAnalyzer().analyzer(); + final SearchAsYouTypeAnalyzer wrappedSearchAnalyzer = (SearchAsYouTypeAnalyzer) fieldType.getTextSearchInfo() + .getSearchAnalyzer() + .analyzer(); for (SearchAsYouTypeAnalyzer analyzer : asList(wrappedIndexAnalyzer, wrappedSearchAnalyzer)) { assertThat(analyzer.shingleSize(), equalTo(shingleSize)); } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldTypeTests.java index e295ea2a9b0..1998465318c 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldTypeTests.java @@ -70,12 +70,16 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase { } private static SearchAsYouTypeFieldType createFieldType() { - final SearchAsYouTypeFieldType fieldType = new SearchAsYouTypeFieldType(NAME, SEARCHABLE, null, - Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER, Collections.emptyMap()); + final SearchAsYouTypeFieldType fieldType = new SearchAsYouTypeFieldType( + NAME, + SEARCHABLE, + null, + Lucene.STANDARD_ANALYZER, + Lucene.STANDARD_ANALYZER, + Collections.emptyMap() + ); fieldType.setPrefixField(new PrefixFieldType(NAME, TextSearchInfo.SIMPLE_MATCH_ONLY, Defaults.MIN_GRAM, Defaults.MAX_GRAM)); - fieldType.setShingleFields(new ShingleFieldType[] { - new ShingleFieldType(fieldType.name(), 2, TextSearchInfo.SIMPLE_MATCH_ONLY) - }); + fieldType.setShingleFields(new ShingleFieldType[] { new ShingleFieldType(fieldType.name(), 2, TextSearchInfo.SIMPLE_MATCH_ONLY) }); return fieldType; } @@ -84,8 +88,14 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase { assertThat(fieldType.termQuery("foo", null), equalTo(new TermQuery(new Term(NAME, "foo")))); - SearchAsYouTypeFieldType unsearchable = new SearchAsYouTypeFieldType(NAME, UNSEARCHABLE, null, - Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER, Collections.emptyMap()); + SearchAsYouTypeFieldType unsearchable = new SearchAsYouTypeFieldType( + NAME, + UNSEARCHABLE, + null, + Lucene.STANDARD_ANALYZER, + Lucene.STANDARD_ANALYZER, + Collections.emptyMap() + ); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("foo", null)); assertThat(e.getMessage(), equalTo("Cannot search on field [" + NAME + "] since it is not indexed.")); } @@ -93,13 +103,23 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase { public void testTermsQuery() { final MappedFieldType fieldType = createFieldType(); - assertThat(fieldType.termsQuery(asList("foo", "bar"), null), - equalTo(new TermInSetQuery(NAME, asList(new BytesRef("foo"), new BytesRef("bar"))))); + assertThat( + fieldType.termsQuery(asList("foo", "bar"), null), + equalTo(new TermInSetQuery(NAME, asList(new BytesRef("foo"), new BytesRef("bar")))) + ); - SearchAsYouTypeFieldType unsearchable = new SearchAsYouTypeFieldType(NAME, UNSEARCHABLE, null, - Lucene.STANDARD_ANALYZER, Lucene.STANDARD_ANALYZER, Collections.emptyMap()); - final IllegalArgumentException e = - expectThrows(IllegalArgumentException.class, () -> unsearchable.termsQuery(asList("foo", "bar"), null)); + SearchAsYouTypeFieldType unsearchable = new SearchAsYouTypeFieldType( + NAME, + UNSEARCHABLE, + null, + Lucene.STANDARD_ANALYZER, + Lucene.STANDARD_ANALYZER, + Collections.emptyMap() + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> unsearchable.termsQuery(asList("foo", "bar"), null) + ); assertThat(e.getMessage(), equalTo("Cannot search on field [" + NAME + "] since it is not indexed.")); } @@ -108,20 +128,26 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase { // this term should be a length that can be rewriteable to a term query on the prefix field final String withinBoundsTerm = "foo"; - assertThat(fieldType.prefixQuery(withinBoundsTerm, CONSTANT_SCORE_REWRITE, randomMockShardContext()), - equalTo(new ConstantScoreQuery(new TermQuery(new Term(NAME + "._index_prefix", withinBoundsTerm))))); + assertThat( + fieldType.prefixQuery(withinBoundsTerm, CONSTANT_SCORE_REWRITE, randomMockShardContext()), + equalTo(new ConstantScoreQuery(new TermQuery(new Term(NAME + "._index_prefix", withinBoundsTerm)))) + ); // our defaults don't allow a situation where a term can be too small // this term should be too long to be rewriteable to a term query on the prefix field final String longTerm = "toolongforourprefixfieldthistermis"; - assertThat(fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_QSC), - equalTo(new PrefixQuery(new Term(NAME, longTerm)))); + assertThat(fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_QSC), equalTo(new PrefixQuery(new Term(NAME, longTerm)))); - OpenSearchException ee = expectThrows(OpenSearchException.class, - () -> fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_QSC_DISALLOW_EXPENSIVE)); - assertEquals("[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " + - "For optimised prefix queries on text fields please enable [index_prefixes].", ee.getMessage()); + OpenSearchException ee = expectThrows( + OpenSearchException.class, + () -> fieldType.prefixQuery(longTerm, CONSTANT_SCORE_REWRITE, MOCK_QSC_DISALLOW_EXPENSIVE) + ); + assertEquals( + "[prefix] queries cannot be executed when 'search.allow_expensive_queries' is set to false. " + + "For optimised prefix queries on text fields please enable [index_prefixes].", + ee.getMessage() + ); } public void testFetchSourceValue() throws IOException { @@ -133,13 +159,20 @@ public class SearchAsYouTypeFieldTypeTests extends FieldTypeTestCase { assertEquals(org.opensearch.common.collect.List.of("true"), fetchSourceValue(fieldType, true)); SearchAsYouTypeFieldMapper.PrefixFieldType prefixFieldType = new SearchAsYouTypeFieldMapper.PrefixFieldType( - fieldType.name(), fieldType.getTextSearchInfo(), 2, 10); + fieldType.name(), + fieldType.getTextSearchInfo(), + 2, + 10 + ); assertEquals(org.opensearch.common.collect.List.of("value"), fetchSourceValue(prefixFieldType, "value")); assertEquals(org.opensearch.common.collect.List.of("42"), fetchSourceValue(prefixFieldType, 42L)); assertEquals(org.opensearch.common.collect.List.of("true"), fetchSourceValue(prefixFieldType, true)); SearchAsYouTypeFieldMapper.ShingleFieldType shingleFieldType = new SearchAsYouTypeFieldMapper.ShingleFieldType( - fieldType.name(), 5, fieldType.getTextSearchInfo()); + fieldType.name(), + 5, + fieldType.getTextSearchInfo() + ); assertEquals(org.opensearch.common.collect.List.of("value"), fetchSourceValue(shingleFieldType, "value")); assertEquals(org.opensearch.common.collect.List.of("42"), fetchSourceValue(shingleFieldType, 42L)); assertEquals(org.opensearch.common.collect.List.of("true"), fetchSourceValue(shingleFieldType, true)); diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java index 7450abb0ec9..c3850b151dd 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java @@ -82,22 +82,15 @@ public class TokenCountFieldMapperTests extends MapperTestCase { checker.registerConflictCheck("doc_values", b -> b.field("doc_values", false)); checker.registerConflictCheck("null_value", b -> b.field("null_value", 1)); checker.registerConflictCheck("enable_position_increments", b -> b.field("enable_position_increments", false)); - checker.registerUpdateCheck( - this::minimalMapping, - b -> b.field("type", "token_count").field("analyzer", "standard"), - m -> { - TokenCountFieldMapper tcfm = (TokenCountFieldMapper) m; - assertThat(tcfm.analyzer(), equalTo("standard")); - }); + checker.registerUpdateCheck(this::minimalMapping, b -> b.field("type", "token_count").field("analyzer", "standard"), m -> { + TokenCountFieldMapper tcfm = (TokenCountFieldMapper) m; + assertThat(tcfm.analyzer(), equalTo("standard")); + }); } @Override protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) { - NamedAnalyzer dflt = new NamedAnalyzer( - "default", - AnalyzerScope.INDEX, - new StandardAnalyzer() - ); + NamedAnalyzer dflt = new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()); NamedAnalyzer standard = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()); NamedAnalyzer keyword = new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer()); Map analyzers = new HashMap<>(); @@ -144,7 +137,7 @@ public class TokenCountFieldMapperTests extends MapperTestCase { Token t3 = new Token(); t2.setPositionIncrement(2); // Funny token with more than one increment int finalTokenIncrement = 4; // Final token increment - Token[] tokens = new Token[] {t1, t2, t3}; + Token[] tokens = new Token[] { t1, t2, t3 }; Collections.shuffle(Arrays.asList(tokens), random()); final TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens); // TODO: we have no CannedAnalyzer? @@ -199,7 +192,6 @@ public class TokenCountFieldMapperTests extends MapperTestCase { } private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) { - return mapper.parse(request) - .docs().stream().findFirst().orElseThrow(() -> new IllegalStateException("Test object not parsed")); + return mapper.parse(request).docs().stream().findFirst().orElseThrow(() -> new IllegalStateException("Test object not parsed")); } } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java index a25219b425d..40c4fd24b0b 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java @@ -58,10 +58,23 @@ public class RankFeatureQueryBuilderTests extends AbstractQueryTestCase fields = new ArrayList<>(); fields.add("my_feature_field"); @@ -113,38 +126,33 @@ public class RankFeatureQueryBuilderTests extends AbstractQueryTestCase parseQuery(query).toQuery(createShardContext())); - assertEquals("[rank_feature] query only works on [rank_feature] fields and features of [rank_features] fields, not [text]", - e.getMessage()); + assertEquals( + "[rank_feature] query only works on [rank_feature] fields and features of [rank_features] fields, not [text]", + e.getMessage() + ); } public void testIllegalCombination() throws IOException { - String query = "{\n" + - " \"rank_feature\" : {\n" + - " \"field\": \"my_negative_feature_field\",\n" + - " \"log\" : {\n" + - " \"scaling_factor\": 4.5\n" + - " }\n" + - " }\n" + - "}"; + String query = "{\n" + + " \"rank_feature\" : {\n" + + " \"field\": \"my_negative_feature_field\",\n" + + " \"log\" : {\n" + + " \"scaling_factor\": 4.5\n" + + " }\n" + + " }\n" + + "}"; IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(query).toQuery(createShardContext())); assertEquals( - "Cannot use the [log] function with a field that has a negative score impact as it would trigger negative scores", - e.getMessage()); + "Cannot use the [log] function with a field that has a negative score impact as it would trigger negative scores", + e.getMessage() + ); } } diff --git a/modules/mapper-extras/src/yamlRestTest/java/org/opensearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java b/modules/mapper-extras/src/yamlRestTest/java/org/opensearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java index 64ad86acfd8..4b4544283c6 100644 --- a/modules/mapper-extras/src/yamlRestTest/java/org/opensearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java +++ b/modules/mapper-extras/src/yamlRestTest/java/org/opensearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java @@ -50,4 +50,3 @@ public class MapperExtrasClientYamlTestSuiteIT extends OpenSearchClientYamlSuite return OpenSearchClientYamlSuiteTestCase.createParameters(); } } - diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java index 6ddc2058d8b..c2536897f35 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java @@ -55,10 +55,16 @@ public abstract class AbstractParentChildTestCase extends ParentChildTestCase { @Before public void setupCluster() throws Exception { assertAcked( - prepareCreate("test") - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "article", "comment"), - "commenter", "keyword", "category", "keyword")) + prepareCreate("test").addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "article", "comment"), + "commenter", + "keyword", + "category", + "keyword" + ) + ) ); List requests = new ArrayList<>(); @@ -73,7 +79,7 @@ public abstract class AbstractParentChildTestCase extends ParentChildTestCase { String id = "article-" + i; // TODO: this array is always of length 1, and testChildrenAggs fails if this is changed - String[] categories = new String[randomIntBetween(1,1)]; + String[] categories = new String[randomIntBetween(1, 1)]; for (int j = 0; j < categories.length; j++) { String category = categories[j] = uniqueCategories[catIndex++ % uniqueCategories.length]; Control control = categoryToControl.computeIfAbsent(category, Control::new); @@ -103,17 +109,25 @@ public abstract class AbstractParentChildTestCase extends ParentChildTestCase { articleToControl.get(articleId).commentIds.add(idValue); - IndexRequestBuilder indexRequest = createIndexRequest("test", "comment", idValue, - articleId, "commenter", commenter, "randomized", true); + IndexRequestBuilder indexRequest = createIndexRequest( + "test", + "comment", + idValue, + articleId, + "commenter", + commenter, + "randomized", + true + ); requests.add(indexRequest); } } } - requests.add(createIndexRequest("test", "article", "a", null, "category", new String[]{"a"}, "randomized", false)); - requests.add(createIndexRequest("test", "article", "b", null, "category", new String[]{"a", "b"}, "randomized", false)); - requests.add(createIndexRequest("test", "article", "c", null, "category", new String[]{"a", "b", "c"}, "randomized", false)); - requests.add(createIndexRequest("test", "article", "d", null, "category", new String[]{"c"}, "randomized", false)); + requests.add(createIndexRequest("test", "article", "a", null, "category", new String[] { "a" }, "randomized", false)); + requests.add(createIndexRequest("test", "article", "b", null, "category", new String[] { "a", "b" }, "randomized", false)); + requests.add(createIndexRequest("test", "article", "c", null, "category", new String[] { "a", "b", "c" }, "randomized", false)); + requests.add(createIndexRequest("test", "article", "d", null, "category", new String[] { "c" }, "randomized", false)); requests.add(createIndexRequest("test", "comment", "e", "a")); requests.add(createIndexRequest("test", "comment", "f", "c")); @@ -121,7 +135,6 @@ public abstract class AbstractParentChildTestCase extends ParentChildTestCase { ensureSearchable("test"); } - protected static final class Control { final String category; diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java index 2a6ca68f524..74c884c9d0e 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java @@ -71,15 +71,17 @@ public class ChildrenIT extends AbstractParentChildTestCase { public void testChildrenAggs() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("randomized", true)) - .addAggregation( - terms("category").field("category").size(10000).subAggregation(children("to_comment", "comment") - .subAggregation( - terms("commenters").field("commenter").size(10000).subAggregation( - topHits("top_comments") - )) + .setQuery(matchQuery("randomized", true)) + .addAggregation( + terms("category").field("category") + .size(10000) + .subAggregation( + children("to_comment", "comment").subAggregation( + terms("commenters").field("commenter").size(10000).subAggregation(topHits("top_comments")) ) - ).get(); + ) + ) + .get(); assertSearchResponse(searchResponse); Terms categoryTerms = searchResponse.getAggregations().get("category"); @@ -92,11 +94,10 @@ public class ChildrenIT extends AbstractParentChildTestCase { Children childrenBucket = categoryBucket.getAggregations().get("to_comment"); assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo((long) entry1.getValue().commentIds.size())); - assertThat(((InternalAggregation)childrenBucket).getProperty("_count"), - equalTo((long) entry1.getValue().commentIds.size())); + assertThat(((InternalAggregation) childrenBucket).getProperty("_count"), equalTo((long) entry1.getValue().commentIds.size())); Terms commentersTerms = childrenBucket.getAggregations().get("commenters"); - assertThat(((InternalAggregation)childrenBucket).getProperty("commenters"), sameInstance(commentersTerms)); + assertThat(((InternalAggregation) childrenBucket).getProperty("commenters"), sameInstance(commentersTerms)); assertThat(commentersTerms.getBuckets().size(), equalTo(entry1.getValue().commenterToCommentId.size())); for (Map.Entry> entry2 : entry1.getValue().commenterToCommentId.entrySet()) { Terms.Bucket commentBucket = commentersTerms.getBucketByKey(entry2.getKey()); @@ -113,12 +114,13 @@ public class ChildrenIT extends AbstractParentChildTestCase { public void testParentWithMultipleBuckets() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("randomized", false)) - .addAggregation( - terms("category").field("category").size(10000).subAggregation( - children("to_comment", "comment").subAggregation(topHits("top_comments").sort("id", SortOrder.ASC)) - ) - ).get(); + .setQuery(matchQuery("randomized", false)) + .addAggregation( + terms("category").field("category") + .size(10000) + .subAggregation(children("to_comment", "comment").subAggregation(topHits("top_comments").sort("id", SortOrder.ASC))) + ) + .get(); assertSearchResponse(searchResponse); Terms categoryTerms = searchResponse.getAggregations().get("category"); @@ -172,10 +174,10 @@ public class ChildrenIT extends AbstractParentChildTestCase { public void testWithDeletes() throws Exception { String indexName = "xyz"; assertAcked( - prepareCreate(indexName) - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), - "name", "keyword")) + prepareCreate(indexName).addMapping( + "doc", + addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "name", "keyword") + ) ); List requests = new ArrayList<>(); @@ -188,8 +190,8 @@ public class ChildrenIT extends AbstractParentChildTestCase { for (int i = 0; i < 10; i++) { SearchResponse searchResponse = client().prepareSearch(indexName) - .addAggregation(children("children", "child").subAggregation(sum("counts").field("count"))) - .get(); + .addAggregation(children("children", "child").subAggregation(sum("counts").field("count"))) + .get(); assertNoFailures(searchResponse); Children children = searchResponse.getAggregations().get("children"); @@ -206,20 +208,17 @@ public class ChildrenIT extends AbstractParentChildTestCase { */ UpdateResponse updateResponse; updateResponse = client().prepareUpdate(indexName, "doc", idToUpdate) - .setRouting("1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "count", 1) - .setDetectNoop(false) - .get(); + .setRouting("1") + .setDoc(Requests.INDEX_CONTENT_TYPE, "count", 1) + .setDetectNoop(false) + .get(); assertThat(updateResponse.getVersion(), greaterThan(1L)); refresh(); } } public void testNonExistingChildType() throws Exception { - SearchResponse searchResponse = client().prepareSearch("test") - .addAggregation( - children("non-existing", "xyz") - ).get(); + SearchResponse searchResponse = client().prepareSearch("test").addAggregation(children("non-existing", "xyz")).get(); assertSearchResponse(searchResponse); Children children = searchResponse.getAggregations().get("non-existing"); @@ -232,18 +231,29 @@ public class ChildrenIT extends AbstractParentChildTestCase { String masterType = "masterprod"; String childType = "variantsku"; assertAcked( - prepareCreate(indexName) - .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, - masterType, childType), - "brand", "text", "name", "keyword", "material", "text", "color", "keyword", "size", "keyword")) + prepareCreate(indexName).setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + .addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, masterType, childType), + "brand", + "text", + "name", + "keyword", + "material", + "text", + "color", + "keyword", + "size", + "keyword" + ) + ) ); List requests = new ArrayList<>(); - requests.add(createIndexRequest(indexName, masterType, "1", null, "brand", "Levis", "name", - "Style 501", "material", "Denim")); + requests.add(createIndexRequest(indexName, masterType, "1", null, "brand", "Levis", "name", "Style 501", "material", "Denim")); requests.add(createIndexRequest(indexName, childType, "3", "1", "color", "blue", "size", "32")); requests.add(createIndexRequest(indexName, childType, "4", "1", "color", "blue", "size", "34")); requests.add(createIndexRequest(indexName, childType, "5", "1", "color", "blue", "size", "36")); @@ -251,8 +261,9 @@ public class ChildrenIT extends AbstractParentChildTestCase { requests.add(createIndexRequest(indexName, childType, "7", "1", "color", "black", "size", "40")); requests.add(createIndexRequest(indexName, childType, "8", "1", "color", "gray", "size", "36")); - requests.add(createIndexRequest(indexName, masterType, "2", null, "brand", "Wrangler", "name", - "Regular Cut", "material", "Leather")); + requests.add( + createIndexRequest(indexName, masterType, "2", null, "brand", "Wrangler", "name", "Regular Cut", "material", "Leather") + ); requests.add(createIndexRequest(indexName, childType, "9", "2", "color", "blue", "size", "32")); requests.add(createIndexRequest(indexName, childType, "10", "2", "color", "blue", "size", "34")); requests.add(createIndexRequest(indexName, childType, "12", "2", "color", "black", "size", "36")); @@ -263,11 +274,12 @@ public class ChildrenIT extends AbstractParentChildTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch(indexName) - .setQuery(hasChildQuery(childType, termQuery("color", "orange"), ScoreMode.None)) - .addAggregation(children("my-refinements", childType) - .subAggregation(terms("my-colors").field("color")) - .subAggregation(terms("my-sizes").field("size")) - ).get(); + .setQuery(hasChildQuery(childType, termQuery("color", "orange"), ScoreMode.None)) + .addAggregation( + children("my-refinements", childType).subAggregation(terms("my-colors").field("color")) + .subAggregation(terms("my-sizes").field("size")) + ) + .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -297,11 +309,14 @@ public class ChildrenIT extends AbstractParentChildTestCase { String parentType = "country"; String childType = "city"; assertAcked( - prepareCreate(indexName) - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, - grandParentType, parentType, parentType, childType), - "name", "keyword")) + prepareCreate(indexName).addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, grandParentType, parentType, parentType, childType), + "name", + "keyword" + ) + ) ); createIndexRequest(indexName, grandParentType, "1", null, "name", "europe").get(); @@ -310,14 +325,11 @@ public class ChildrenIT extends AbstractParentChildTestCase { refresh(); SearchResponse response = client().prepareSearch(indexName) - .setQuery(matchQuery("name", "europe")) - .addAggregation( - children(parentType, parentType).subAggregation(children(childType, childType).subAggregation( - terms("name").field("name") - ) - ) - ) - .get(); + .setQuery(matchQuery("name", "europe")) + .addAggregation( + children(parentType, parentType).subAggregation(children(childType, childType).subAggregation(terms("name").field("name"))) + ) + .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -340,11 +352,18 @@ public class ChildrenIT extends AbstractParentChildTestCase { // Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused // us to miss to evaluate child docs in segments we didn't have parent matches for. assertAcked( - prepareCreate("index") - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, - "parentType", "childType"), - "name", "keyword", "town", "keyword", "age", "integer")) + prepareCreate("index").addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parentType", "childType"), + "name", + "keyword", + "town", + "keyword", + "age", + "integer" + ) + ) ); List requests = new ArrayList<>(); requests.add(createIndexRequest("index", "parentType", "1", null, "name", "Bob", "town", "Memphis")); @@ -359,11 +378,14 @@ public class ChildrenIT extends AbstractParentChildTestCase { SearchResponse response = client().prepareSearch("index") .setSize(0) - .addAggregation(AggregationBuilders.terms("towns").field("town") - .subAggregation(AggregationBuilders.terms("parent_names").field("name") - .subAggregation(children("child_docs", "childType")) - ) - ).get(); + .addAggregation( + AggregationBuilders.terms("towns") + .field("town") + .subAggregation( + AggregationBuilders.terms("parent_names").field("name").subAggregation(children("child_docs", "childType")) + ) + ) + .get(); Terms towns = response.getAggregations().get("towns"); assertThat(towns.getBuckets().size(), equalTo(2)); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java index a730885ba2d..351b0beec48 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java @@ -60,49 +60,49 @@ public class ParentIT extends AbstractParentChildTestCase { final SearchRequestBuilder searchRequest = client().prepareSearch("test") .setSize(10000) .setQuery(matchQuery("randomized", true)) - .addAggregation( - parent("to_article", "comment") - .subAggregation( - terms("category").field("category").size(10000))); + .addAggregation(parent("to_article", "comment").subAggregation(terms("category").field("category").size(10000))); SearchResponse searchResponse = searchRequest.get(); assertSearchResponse(searchResponse); - long articlesWithComment = articleToControl.values().stream().filter( - parentControl -> !parentControl.commentIds.isEmpty() - ).count(); + long articlesWithComment = articleToControl.values().stream().filter(parentControl -> !parentControl.commentIds.isEmpty()).count(); Parent parentAgg = searchResponse.getAggregations().get("to_article"); - assertThat("Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", - parentAgg.getDocCount(), equalTo(articlesWithComment)); + assertThat( + "Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", + parentAgg.getDocCount(), + equalTo(articlesWithComment) + ); Terms categoryTerms = parentAgg.getAggregations().get("category"); - long categoriesWithComments = categoryToControl.values().stream().filter( - control -> !control.commentIds.isEmpty()).count(); - assertThat("Buckets: " + categoryTerms.getBuckets().stream().map( - (Function) MultiBucketsAggregation.Bucket::getKeyAsString).collect(Collectors.toList()) + - "\nCategories: " + categoryToControl.keySet(), - (long)categoryTerms.getBuckets().size(), equalTo(categoriesWithComments)); + long categoriesWithComments = categoryToControl.values().stream().filter(control -> !control.commentIds.isEmpty()).count(); + assertThat( + "Buckets: " + + categoryTerms.getBuckets() + .stream() + .map((Function) MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()) + + "\nCategories: " + + categoryToControl.keySet(), + (long) categoryTerms.getBuckets().size(), + equalTo(categoriesWithComments) + ); for (Map.Entry entry : categoryToControl.entrySet()) { // no children for this category -> no entry in the child to parent-aggregation - if(entry.getValue().commentIds.isEmpty()) { + if (entry.getValue().commentIds.isEmpty()) { assertNull(categoryTerms.getBucketByKey(entry.getKey())); continue; } final Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry.getKey()); - assertNotNull("Failed for category " + entry.getKey(), - categoryBucket); - assertThat("Failed for category " + entry.getKey(), - categoryBucket.getKeyAsString(), equalTo(entry.getKey())); + assertNotNull("Failed for category " + entry.getKey(), categoryBucket); + assertThat("Failed for category " + entry.getKey(), categoryBucket.getKeyAsString(), equalTo(entry.getKey())); // count all articles in this category which have at least one comment long articlesForCategory = articleToControl.values().stream(). - // only articles with this category + // only articles with this category filter(parentControl -> parentControl.category.equals(entry.getKey())). // only articles which have comments - filter(parentControl -> !parentControl.commentIds.isEmpty()). - count(); - assertThat("Failed for category " + entry.getKey(), - categoryBucket.getDocCount(), equalTo(articlesForCategory)); + filter(parentControl -> !parentControl.commentIds.isEmpty()).count(); + assertThat("Failed for category " + entry.getKey(), categoryBucket.getDocCount(), equalTo(articlesForCategory)); } } @@ -111,12 +111,13 @@ public class ParentIT extends AbstractParentChildTestCase { .setSize(10000) .setQuery(matchQuery("randomized", true)) .addAggregation( - terms("to_commenter").field("commenter").size(10000).subAggregation( - parent("to_article", "comment").subAggregation( - terms("to_category").field("category").size(10000).subAggregation( - topHits("top_category") - )) - ) + terms("to_commenter").field("commenter") + .size(10000) + .subAggregation( + parent("to_article", "comment").subAggregation( + terms("to_category").field("category").size(10000).subAggregation(topHits("top_category")) + ) + ) ); SearchResponse searchResponse = searchRequest.get(); assertSearchResponse(searchResponse); @@ -125,32 +126,40 @@ public class ParentIT extends AbstractParentChildTestCase { final Map> commenterToComments = getCommenterToComments(); Terms categoryTerms = searchResponse.getAggregations().get("to_commenter"); - assertThat("Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", - categoryTerms.getBuckets().size(), equalTo(commenters.size())); + assertThat( + "Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", + categoryTerms.getBuckets().size(), + equalTo(commenters.size()) + ); for (Terms.Bucket commenterBucket : categoryTerms.getBuckets()) { Set comments = commenterToComments.get(commenterBucket.getKeyAsString()); assertNotNull(comments); - assertThat("Failed for commenter " + commenterBucket.getKeyAsString(), - commenterBucket.getDocCount(), equalTo((long)comments.size())); + assertThat( + "Failed for commenter " + commenterBucket.getKeyAsString(), + commenterBucket.getDocCount(), + equalTo((long) comments.size()) + ); Parent articleAgg = commenterBucket.getAggregations().get("to_article"); assertThat(articleAgg.getName(), equalTo("to_article")); // find all articles for the comments for the current commenter - Set articles = articleToControl.values().stream().flatMap( - (Function>) parentControl -> parentControl.commentIds.stream(). - filter(comments::contains) - ).collect(Collectors.toSet()); + Set articles = articleToControl.values() + .stream() + .flatMap( + (Function>) parentControl -> parentControl.commentIds.stream().filter(comments::contains) + ) + .collect(Collectors.toSet()); - assertThat(articleAgg.getDocCount(), equalTo((long)articles.size())); + assertThat(articleAgg.getDocCount(), equalTo((long) articles.size())); Terms categoryAgg = articleAgg.getAggregations().get("to_category"); assertNotNull(categoryAgg); - List categories = categoryToControl.entrySet(). - stream(). - filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())). - map(Map.Entry::getKey). - collect(Collectors.toList()); + List categories = categoryToControl.entrySet() + .stream() + .filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); for (String category : categories) { Terms.Bucket categoryBucket = categoryAgg.getBucketByKey(category); @@ -172,9 +181,10 @@ public class ParentIT extends AbstractParentChildTestCase { } private Set getCommenters() { - return categoryToControl.values().stream().flatMap( - (Function>) control -> control.commenterToCommentId.keySet().stream()). - collect(Collectors.toSet()); + return categoryToControl.values() + .stream() + .flatMap((Function>) control -> control.commenterToCommentId.keySet().stream()) + .collect(Collectors.toSet()); } private Map> getCommenterToComments() { @@ -189,10 +199,7 @@ public class ParentIT extends AbstractParentChildTestCase { } public void testNonExistingParentType() throws Exception { - SearchResponse searchResponse = client().prepareSearch("test") - .addAggregation( - parent("non-existing", "xyz") - ).get(); + SearchResponse searchResponse = client().prepareSearch("test").addAggregation(parent("non-existing", "xyz")).get(); assertSearchResponse(searchResponse); Parent parent = searchResponse.getAggregations().get("non-existing"); @@ -205,9 +212,10 @@ public class ParentIT extends AbstractParentChildTestCase { .setSize(10000) .setQuery(matchQuery("randomized", true)) .addAggregation( - terms("to_commenter").field("commenter").size(10000).subAggregation( - parent("to_article", "comment").subAggregation( - terms("to_category").field("category").size(10000)))); + terms("to_commenter").field("commenter") + .size(10000) + .subAggregation(parent("to_article", "comment").subAggregation(terms("to_category").field("category").size(10000))) + ); SearchResponse searchResponse = searchRequest.get(); assertSearchResponse(searchResponse); @@ -215,32 +223,40 @@ public class ParentIT extends AbstractParentChildTestCase { final Map> commenterToComments = getCommenterToComments(); Terms commentersAgg = searchResponse.getAggregations().get("to_commenter"); - assertThat("Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", - commentersAgg.getBuckets().size(), equalTo(commenters.size())); + assertThat( + "Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", + commentersAgg.getBuckets().size(), + equalTo(commenters.size()) + ); for (Terms.Bucket commenterBucket : commentersAgg.getBuckets()) { Set comments = commenterToComments.get(commenterBucket.getKeyAsString()); assertNotNull(comments); - assertThat("Failed for commenter " + commenterBucket.getKeyAsString(), - commenterBucket.getDocCount(), equalTo((long)comments.size())); + assertThat( + "Failed for commenter " + commenterBucket.getKeyAsString(), + commenterBucket.getDocCount(), + equalTo((long) comments.size()) + ); Parent articleAgg = commenterBucket.getAggregations().get("to_article"); assertThat(articleAgg.getName(), equalTo("to_article")); // find all articles for the comments for the current commenter - Set articles = articleToControl.values().stream().flatMap( - (Function>) parentControl -> parentControl.commentIds.stream(). - filter(comments::contains) - ).collect(Collectors.toSet()); + Set articles = articleToControl.values() + .stream() + .flatMap( + (Function>) parentControl -> parentControl.commentIds.stream().filter(comments::contains) + ) + .collect(Collectors.toSet()); - assertThat(articleAgg.getDocCount(), equalTo((long)articles.size())); + assertThat(articleAgg.getDocCount(), equalTo((long) articles.size())); Terms categoryAgg = articleAgg.getAggregations().get("to_category"); assertNotNull(categoryAgg); - List categories = categoryToControl.entrySet(). - stream(). - filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())). - map(Map.Entry::getKey). - collect(Collectors.toList()); + List categories = categoryToControl.entrySet() + .stream() + .filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); for (String category : categories) { Terms.Bucket categoryBucket = categoryAgg.getBucketByKey(category); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java index d045e2d6c3e..2972b170e07 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java @@ -101,9 +101,12 @@ import static org.hamcrest.Matchers.is; public class ChildQuerySearchIT extends ParentChildTestCase { public void testMultiLevelChild() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, - "parent", "child", "child", "grandchild"))); + assertAcked( + prepareCreate("test").addMapping( + "doc", + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child", "child", "grandchild") + ) + ); ensureGreen(); createIndexRequest("test", "parent", "p1", null, "p_field", "p_value1").get(); @@ -111,46 +114,51 @@ public class ChildQuerySearchIT extends ParentChildTestCase { createIndexRequest("test", "grandchild", "gc1", "c1", "gc_field", "gc_value1").setRouting("p1").get(); refresh(); - SearchResponse searchResponse = client() - .prepareSearch("test") - .setQuery( - boolQuery() - .must(matchAllQuery()) - .filter(hasChildQuery( - "child", - boolQuery().must(termQuery("c_field", "c_value1")) - .filter(hasChildQuery("grandchild", termQuery("gc_field", "gc_value1"), ScoreMode.None)) - , ScoreMode.None))).get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery( + boolQuery().must(matchAllQuery()) + .filter( + hasChildQuery( + "child", + boolQuery().must(termQuery("c_field", "c_value1")) + .filter(hasChildQuery("grandchild", termQuery("gc_field", "gc_value1"), ScoreMode.None)), + ScoreMode.None + ) + ) + ) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()) - .filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))).execute() - .actionGet(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))) + .execute() + .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()) - .filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))).execute() - .actionGet(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))) + .execute() + .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("gc1")); searchResponse = client().prepareSearch("test") - .setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)).execute() - .actionGet(); + .setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) + .execute() + .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); searchResponse = client().prepareSearch("test") - .setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)).execute() - .actionGet(); + .setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) + .execute() + .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("gc1")); @@ -158,17 +166,18 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // see #2744 public void test2744() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "foo", "test"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "foo", "test")) + ); ensureGreen(); // index simple data createIndexRequest("test", "foo", "1", null, "foo", 1).get(); createIndexRequest("test", "test", "2", "1", "foo", 1).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test"). - setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)) - .get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); @@ -176,8 +185,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testSimpleChildQuery() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data @@ -191,8 +201,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // TEST FETCHING _parent from child SearchResponse searchResponse; - searchResponse = client().prepareSearch("test") - .setQuery(idsQuery("doc").addIds("c1")).get(); + searchResponse = client().prepareSearch("test").setQuery(idsQuery("doc").addIds("c1")).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); @@ -201,8 +210,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // TEST matching on parent searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))) - .get(); + .setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); @@ -213,14 +222,12 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(1).getSourceAsMap()), equalTo("p1")); // HAS CHILD - searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")) - .get(); + searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute() - .actionGet(); + searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute().actionGet(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); @@ -230,15 +237,13 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); // HAS PARENT - searchResponse = client().prepareSearch("test") - .setQuery(randomHasParent("parent", "p_field", "p_value2")).get(); + searchResponse = client().prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value2")).get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 2L); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c3")); assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c4")); - searchResponse = client().prepareSearch("test") - .setQuery(randomHasParent("parent", "p_field", "p_value1")).get(); + searchResponse = client().prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value1")).get(); assertHitCount(searchResponse, 2L); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c2")); @@ -246,8 +251,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Issue #3290 public void testCachingBugWithFqueryFilter() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); List builders = new ArrayList<>(); // index simple data @@ -273,19 +279,20 @@ public class ChildQuerySearchIT extends ParentChildTestCase { for (int i = 1; i <= 10; i++) { logger.info("Round {}", i); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Max))) - .get(); + .setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Max))) + .get(); assertNoFailures(searchResponse); searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery(), true))) - .get(); + .setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery(), true))) + .get(); assertNoFailures(searchResponse); } } public void testHasParentFilter() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); Map> parentToChildren = new HashMap<>(); // Childless parent @@ -317,8 +324,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertThat(parentToChildren.isEmpty(), equalTo(false)); for (Map.Entry> parentToChildrenEntry : parentToChildren.entrySet()) { SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false))) - .setSize(numChildDocsPerParent).get(); + .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false))) + .setSize(numChildDocsPerParent) + .get(); assertNoFailures(searchResponse); Set childIds = parentToChildrenEntry.getValue(); @@ -332,8 +340,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testSimpleChildQueryWithFlush() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data with flushes, so we have many segments @@ -354,22 +363,20 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // HAS CHILD QUERY SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)) - .get(); + .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)) - .get(); + .setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); - searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)) - .get(); + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -377,22 +384,22 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // HAS CHILD FILTER searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))) - .get(); + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) - .get(); + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))) - .get(); + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -400,10 +407,12 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testScopedFacet() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), - "c_field", "keyword"))); + assertAcked( + prepareCreate("test").addMapping( + "doc", + addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "c_field", "keyword") + ) + ); ensureGreen(); // index simple data @@ -416,14 +425,24 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); - SearchResponse searchResponse = client() - .prepareSearch("test") - .setQuery(hasChildQuery("child", - boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")), ScoreMode.None)) - .addAggregation(AggregationBuilders.global("global").subAggregation( - AggregationBuilders.filter("filter", - boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow"))).subAggregation( - AggregationBuilders.terms("facet1").field("c_field")))).get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery( + hasChildQuery( + "child", + boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")), + ScoreMode.None + ) + ) + .addAggregation( + AggregationBuilders.global("global") + .subAggregation( + AggregationBuilders.filter( + "filter", + boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")) + ).subAggregation(AggregationBuilders.terms("facet1").field("c_field")) + ) + ) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -440,8 +459,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testDeletedParent() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data createIndexRequest("test", "parent", "p1", null, "p_field", "p_value1").get(); @@ -454,7 +474,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))).get(); + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); @@ -466,7 +487,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase { client().admin().indices().prepareRefresh().get(); searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))).get(); + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); @@ -474,8 +496,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testDfsSearchType() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data @@ -488,21 +511,24 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryStringQuery("c_field:*")), ScoreMode.None))) - .get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryStringQuery("c_field:*")), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); - searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(boolQuery().mustNot(hasParentQuery("parent", - boolQuery().should(queryStringQuery("p_field:*")), false))).execute() - .actionGet(); + searchResponse = client().prepareSearch("test") + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryStringQuery("p_field:*")), false))) + .execute() + .actionGet(); assertNoFailures(searchResponse); } public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrChildDocs() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); @@ -512,19 +538,22 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None))).get(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false))).get(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } public void testCountApiUsage() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); String parentId = "p1"; @@ -532,30 +561,32 @@ public class ChildQuerySearchIT extends ParentChildTestCase { createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get(); refresh(); - SearchResponse countResponse = client().prepareSearch("test").setSize(0) - .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) - .get(); + SearchResponse countResponse = client().prepareSearch("test") + .setSize(0) + .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) + .get(); assertHitCount(countResponse, 1L); - countResponse = client().prepareSearch("test").setSize(0) - .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)) - .get(); + countResponse = client().prepareSearch("test").setSize(0).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)).get(); assertHitCount(countResponse, 1L); - countResponse = client().prepareSearch("test").setSize(0) - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None))) - .get(); + countResponse = client().prepareSearch("test") + .setSize(0) + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None))) + .get(); assertHitCount(countResponse, 1L); - countResponse = client().prepareSearch("test").setSize(0) + countResponse = client().prepareSearch("test") + .setSize(0) .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false))) - .get(); + .get(); assertHitCount(countResponse, 1L); } public void testExplainUsage() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); String parentId = "p1"; @@ -564,22 +595,22 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setExplain(true) - .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) - .get(); + .setExplain(true) + .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) + .get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); searchResponse = client().prepareSearch("test") - .setExplain(true) - .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)) - .get(); + .setExplain(true) + .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)) + .get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); ExplainResponse explainResponse = client().prepareExplain("test", "doc", parentId) - .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) - .get(); + .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) + .get(); assertThat(explainResponse.isExists(), equalTo(true)); assertThat(explainResponse.getExplanation().toString(), containsString("join value p1")); } @@ -618,27 +649,36 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties") - .startObject("join_field") + assertAcked( + prepareCreate("test").addMapping( + "doc", + jsonBuilder().startObject() + .startObject("doc") + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("parent", new String[] {"child", "child1"}) + .field("parent", new String[] { "child", "child1" }) .endObject() - .endObject() - .endObject().endObject().endObject() - )); + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); ensureGreen(); indexRandom(true, createDocBuilders().toArray(new IndexRequestBuilder[0])); - SearchResponse response = client() - .prepareSearch("test") - .setQuery( - hasChildQuery( - "child", - QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), - fieldValueFactorFunction("c_field1")) - .boostMode(CombineFunction.REPLACE), ScoreMode.Total)).get(); + SearchResponse response = client().prepareSearch("test") + .setQuery( + hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1")) + .boostMode(CombineFunction.REPLACE), + ScoreMode.Total + ) + ) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); @@ -648,14 +688,16 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); assertThat(response.getHits().getHits()[2].getScore(), equalTo(3f)); - response = client() - .prepareSearch("test") - .setQuery( - hasChildQuery( - "child", - QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), - fieldValueFactorFunction("c_field1")) - .boostMode(CombineFunction.REPLACE), ScoreMode.Max)).get(); + response = client().prepareSearch("test") + .setQuery( + hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1")) + .boostMode(CombineFunction.REPLACE), + ScoreMode.Max + ) + ) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); @@ -665,14 +707,16 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); assertThat(response.getHits().getHits()[2].getScore(), equalTo(2f)); - response = client() - .prepareSearch("test") - .setQuery( - hasChildQuery( - "child", - QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), - fieldValueFactorFunction("c_field1")) - .boostMode(CombineFunction.REPLACE), ScoreMode.Avg)).get(); + response = client().prepareSearch("test") + .setQuery( + hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1")) + .boostMode(CombineFunction.REPLACE), + ScoreMode.Avg + ) + ) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); @@ -682,15 +726,18 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); assertThat(response.getHits().getHits()[2].getScore(), equalTo(1.5f)); - response = client() - .prepareSearch("test") - .setQuery( - hasParentQuery( - "parent", - QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"), - fieldValueFactorFunction("p_field2")) - .boostMode(CombineFunction.REPLACE), true)) - .addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()).get(); + response = client().prepareSearch("test") + .setQuery( + hasParentQuery( + "parent", + QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"), fieldValueFactorFunction("p_field2")) + .boostMode(CombineFunction.REPLACE), + true + ) + ) + .addSort(SortBuilders.fieldSort("c_field3")) + .addSort(SortBuilders.scoreSort()) + .get(); assertThat(response.getHits().getTotalHits().value, equalTo(7L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); @@ -711,42 +758,43 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Issue #2536 public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); SearchResponse response = client().prepareSearch("test") - .setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get(); + .setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)) + .get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - client().prepareIndex("test", "doc").setSource(jsonBuilder().startObject().field("text", "value").endObject()) - .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("test", "doc") + .setSource(jsonBuilder().startObject().field("text", "value").endObject()) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .get(); - response = client().prepareSearch("test") - .setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get(); + response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max)) - .get(); + response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max)).get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - response = client().prepareSearch("test") - .setQuery(hasParentQuery("parent", matchQuery("text", "value"), false)).get(); + response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), false)).get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), true)) - .get(); + response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), true)).get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); } public void testHasChildAndHasParentFilter_withFilter() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); @@ -757,35 +805,39 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None))) - .get(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()) - .filter(hasParentQuery("parent", termQuery("p_field", 1), false))).get(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); } public void testHasChildInnerHitsHighlighting() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); createIndexRequest("test", "child", "2", "1", "c_field", "foo bar").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery( - hasChildQuery("child", matchQuery("c_field", "foo"), ScoreMode.None) - .innerHit(new InnerHitBuilder().setHighlightBuilder( - new HighlightBuilder().field(new Field("c_field") - .highlightQuery(QueryBuilders.matchQuery("c_field", "bar")))))) - .get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery( + hasChildQuery("child", matchQuery("c_field", "foo"), ScoreMode.None).innerHit( + new InnerHitBuilder().setHighlightBuilder( + new HighlightBuilder().field(new Field("c_field").highlightQuery(QueryBuilders.matchQuery("c_field", "bar"))) + ) + ) + ) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); @@ -796,8 +848,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // query filter in case for p/c shouldn't execute per segment, but rather @@ -807,30 +860,41 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))) - .get(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))) + .get(); assertSearchHit(searchResponse, 1, hasId("1")); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchQuery("p_field", 1), false))).get(); + .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchQuery("p_field", 1), false))) + .get(); assertSearchHit(searchResponse, 1, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()) - .filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None)))) - .get(); + .setQuery( + boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))) + ) + .get(); assertSearchHit(searchResponse, 1, hasId("1")); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()) - .filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false)))).get(); + .setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false)))) + .get(); assertSearchHit(searchResponse, 1, hasId("2")); } public void testSimpleQueryRewrite() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), - "c_field", "keyword", "p_field", "keyword"))); + assertAcked( + prepareCreate("test").addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), + "c_field", + "keyword", + "p_field", + "keyword" + ) + ) + ); ensureGreen(); // index simple data @@ -847,12 +911,14 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } refresh(); - SearchType[] searchTypes = new SearchType[]{SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH}; + SearchType[] searchTypes = new SearchType[] { SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH }; for (SearchType searchType : searchTypes) { - SearchResponse searchResponse = client().prepareSearch("test").setSearchType(searchType) - .setQuery(hasChildQuery("child", prefixQuery("c_field", "c"), ScoreMode.Max)) - .addSort("p_field", SortOrder.ASC) - .setSize(5).get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setSearchType(searchType) + .setQuery(hasChildQuery("child", prefixQuery("c_field", "c"), ScoreMode.Max)) + .addSort("p_field", SortOrder.ASC) + .setSize(5) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(10L)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("p000")); @@ -861,9 +927,12 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertThat(searchResponse.getHits().getHits()[3].getId(), equalTo("p003")); assertThat(searchResponse.getHits().getHits()[4].getId(), equalTo("p004")); - searchResponse = client().prepareSearch("test").setSearchType(searchType) - .setQuery(hasParentQuery("parent", prefixQuery("p_field", "p"), true)).addSort("c_field", SortOrder.ASC) - .setSize(5).get(); + searchResponse = client().prepareSearch("test") + .setSearchType(searchType) + .setQuery(hasParentQuery("parent", prefixQuery("p_field", "p"), true)) + .addSort("c_field", SortOrder.ASC) + .setSize(5) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(500L)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("c000")); @@ -876,8 +945,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Issue #3144 public void testReIndexingParentAndChildDocuments() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data @@ -891,17 +961,16 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)).get(); + .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); - searchResponse = client() - .prepareSearch("test") - .setQuery( - boolQuery().must(matchQuery("c_field", "x")).must( - hasParentQuery("parent", termQuery("p_field", "p_value2"), true))).get(); + searchResponse = client().prepareSearch("test") + .setQuery(boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c3")); @@ -918,17 +987,15 @@ public class ChildQuerySearchIT extends ParentChildTestCase { searchResponse = client().prepareSearch("test") .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)) - .get(); + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); - searchResponse = client() - .prepareSearch("test") - .setQuery( - boolQuery().must(matchQuery("c_field", "x")).must( - hasParentQuery("parent", termQuery("p_field", "p_value2"), true))).get(); + searchResponse = client().prepareSearch("test") + .setQuery(boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); @@ -937,8 +1004,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Issue #3203 public void testHasChildQueryWithMinimumScore() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data @@ -950,10 +1018,10 @@ public class ChildQuerySearchIT extends ParentChildTestCase { createIndexRequest("test", "child", "c5", "p2", "c_field", "x").get(); refresh(); - SearchResponse searchResponse = client() - .prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)) - .setMinScore(3) // Score needs to be 3 or above! - .get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)) + .setMinScore(3) // Score needs to be 3 or above! + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); @@ -961,9 +1029,10 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testParentFieldQuery() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.refresh_interval", -1)) + .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); SearchResponse response = client().prepareSearch("test") @@ -983,20 +1052,19 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); response = client().prepareSearch("test") - .setQuery(boolQuery() - .should(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))) - .should(boolQuery().filter(termQuery("join_field#parent", "p2")).filter(termQuery("join_field", "child"))) - ).get(); + .setQuery( + boolQuery().should(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))) + .should(boolQuery().filter(termQuery("join_field#parent", "p2")).filter(termQuery("join_field", "child"))) + ) + .get(); assertHitCount(response, 2L); } public void testParentIdQuery() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder() - .put(indexSettings()) - .put("index.refresh_interval", -1) - ) - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.refresh_interval", -1)) + .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); createIndexRequest("test", "child", "c1", "p1").get(); @@ -1009,16 +1077,15 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); response = client().prepareSearch("test") - .setQuery(boolQuery() - .should(parentId("child", "p1")) - .should(parentId("child", "p2")) - ).get(); + .setQuery(boolQuery().should(parentId("child", "p1")).should(parentId("child", "p2"))) + .get(); assertHitCount(response, 2L); } public void testHasChildNotBeingCached() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data @@ -1037,8 +1104,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase { client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) - .get(); + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -1046,8 +1113,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase { client().admin().indices().prepareRefresh("test").get(); searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) - .get(); + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); } @@ -1078,17 +1145,24 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Issue #3818 public void testHasChildQueryOnlyReturnsSingleChildType() throws Exception { - assertAcked(prepareCreate("grandissue") - .addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties") - .startObject("join_field") + assertAcked( + prepareCreate("grandissue").addMapping( + "doc", + jsonBuilder().startObject() + .startObject("doc") + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("grandparent", "parent") - .field("parent", new String[] {"child_type_one", "child_type_two"}) + .field("grandparent", "parent") + .field("parent", new String[] { "child_type_one", "child_type_two" }) .endObject() - .endObject() - .endObject().endObject().endObject() - )); + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); createIndexRequest("grandissue", "grandparent", "1", null, "name", "Grandpa").get(); createIndexRequest("grandissue", "parent", "2", "1", "name", "Dana").get(); @@ -1096,62 +1170,92 @@ public class ChildQuerySearchIT extends ParentChildTestCase { createIndexRequest("grandissue", "child_type_two", "4", "2", "name", "Kate").setRouting("1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("grandissue").setQuery( + SearchResponse searchResponse = client().prepareSearch("grandissue") + .setQuery( boolQuery().must( - hasChildQuery( - "parent", - boolQuery().must( - hasChildQuery( - "child_type_one", - boolQuery().must( - queryStringQuery("name:William*") - ), - ScoreMode.None) - ), - ScoreMode.None) + hasChildQuery( + "parent", + boolQuery().must( + hasChildQuery("child_type_one", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None) + ), + ScoreMode.None + ) ) - ).get(); + ) + .get(); assertHitCount(searchResponse, 1L); - searchResponse = client().prepareSearch("grandissue").setQuery( + searchResponse = client().prepareSearch("grandissue") + .setQuery( boolQuery().must( - hasChildQuery( - "parent", - boolQuery().must( - hasChildQuery( - "child_type_two", - boolQuery().must( - queryStringQuery("name:William*") - ), - ScoreMode.None) - ), - ScoreMode.None) + hasChildQuery( + "parent", + boolQuery().must( + hasChildQuery("child_type_two", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None) + ), + ScoreMode.None + ) ) - ).get(); + ) + .get(); assertHitCount(searchResponse, 0L); } public void testHasChildQueryWithNestedInnerObjects() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), - "objects", "nested"))); + assertAcked( + prepareCreate("test").addMapping( + "doc", + addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "objects", "nested") + ) + ); ensureGreen(); - createIndexRequest("test", "parent", "p1", null, jsonBuilder().startObject().field("p_field", "1").startArray("objects") - .startObject().field("i_field", "1").endObject() - .startObject().field("i_field", "2").endObject() - .startObject().field("i_field", "3").endObject() - .startObject().field("i_field", "4").endObject() - .startObject().field("i_field", "5").endObject() - .startObject().field("i_field", "6").endObject() - .endArray().endObject()) - .get(); - createIndexRequest("test", "parent", "p2", null, jsonBuilder().startObject().field("p_field", "2").startArray("objects") - .startObject().field("i_field", "1").endObject() - .startObject().field("i_field", "2").endObject() - .endArray().endObject()) - .get(); + createIndexRequest( + "test", + "parent", + "p1", + null, + jsonBuilder().startObject() + .field("p_field", "1") + .startArray("objects") + .startObject() + .field("i_field", "1") + .endObject() + .startObject() + .field("i_field", "2") + .endObject() + .startObject() + .field("i_field", "3") + .endObject() + .startObject() + .field("i_field", "4") + .endObject() + .startObject() + .field("i_field", "5") + .endObject() + .startObject() + .field("i_field", "6") + .endObject() + .endArray() + .endObject() + ).get(); + createIndexRequest( + "test", + "parent", + "p2", + null, + jsonBuilder().startObject() + .field("p_field", "2") + .startArray("objects") + .startObject() + .field("i_field", "1") + .endObject() + .startObject() + .field("i_field", "2") + .endObject() + .endArray() + .endObject() + ).get(); createIndexRequest("test", "child", "c1", "p1", "c_field", "blue").get(); createIndexRequest("test", "child", "c2", "p1", "c_field", "red").get(); createIndexRequest("test", "child", "c3", "p2", "c_field", "red").get(); @@ -1159,23 +1263,28 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ScoreMode scoreMode = randomFrom(ScoreMode.values()); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(hasChildQuery("child", termQuery("c_field", "blue"), scoreMode)) - .filter(boolQuery().mustNot(termQuery("p_field", "3")))) - .get(); + .setQuery( + boolQuery().must(hasChildQuery("child", termQuery("c_field", "blue"), scoreMode)) + .filter(boolQuery().mustNot(termQuery("p_field", "3"))) + ) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(hasChildQuery("child", termQuery("c_field", "red"), scoreMode)) - .filter(boolQuery().mustNot(termQuery("p_field", "3")))) - .get(); + .setQuery( + boolQuery().must(hasChildQuery("child", termQuery("c_field", "red"), scoreMode)) + .filter(boolQuery().mustNot(termQuery("p_field", "3"))) + ) + .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); } public void testNamedFilters() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); String parentId = "p1"; @@ -1183,40 +1292,37 @@ public class ChildQuerySearchIT extends ParentChildTestCase { createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", - termQuery("c_field", "1"), ScoreMode.Max).queryName("test")) - .get(); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max).queryName("test")) + .get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); - searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", - termQuery("p_field", "1"), true).queryName("test")) - .get(); + searchResponse = client().prepareSearch("test") + .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true).queryName("test")) + .get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", - termQuery("c_field", "1"), ScoreMode.None).queryName("test"))) - .get(); + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None).queryName("test"))) + .get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent", - termQuery("p_field", "1"), false).queryName("test"))) - .get(); + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false).queryName("test"))) + .get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); } public void testParentChildQueriesNoParentType() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder() - .put(indexSettings()) - .put("index.refresh_interval", -1))); + assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.refresh_interval", -1))); ensureGreen(); String parentId = "p1"; @@ -1224,45 +1330,35 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); try { - client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)) - .get(); + client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) - .get(); + client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test") - .setPostFilter(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)) - .get(); + client().prepareSearch("test").setPostFilter(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test") - .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)) - .get(); + client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test") - .setPostFilter(hasParentQuery("parent", termQuery("p_field", "1"), false)) - .get(); + client().prepareSearch("test").setPostFilter(hasParentQuery("parent", termQuery("p_field", "1"), false)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); @@ -1270,9 +1366,10 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testParentChildCaching() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.refresh_interval", -1)) + .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); // index simple data @@ -1292,29 +1389,33 @@ public class ChildQuerySearchIT extends ParentChildTestCase { for (int i = 0; i < 2; i++) { SearchResponse searchResponse = client().prepareSearch() - .setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery() - .must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None)) - .must(matchAllQuery()))) - .get(); + .setQuery( + boolQuery().must(matchAllQuery()) + .filter( + boolQuery().must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None)).must(matchAllQuery()) + ) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); } - - createIndexRequest("test", "child", "c3", "p2", "c_field", "blue").get(); + createIndexRequest("test", "child", "c3", "p2", "c_field", "blue").get(); client().admin().indices().prepareRefresh("test").get(); SearchResponse searchResponse = client().prepareSearch() - .setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery() - .must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None)) - .must(matchAllQuery()))) - .get(); + .setQuery( + boolQuery().must(matchAllQuery()) + .filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None)).must(matchAllQuery())) + ) + .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } public void testParentChildQueriesViaScrollApi() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); for (int i = 0; i < 10; i++) { createIndexRequest("test", "parent", "p" + i, null).get(); @@ -1323,21 +1424,20 @@ public class ChildQuerySearchIT extends ParentChildTestCase { refresh(); - QueryBuilder[] queries = new QueryBuilder[]{ - hasChildQuery("child", matchAllQuery(), ScoreMode.None), - boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None)), - hasParentQuery("parent", matchAllQuery(), false), - boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false)) - }; + QueryBuilder[] queries = new QueryBuilder[] { + hasChildQuery("child", matchAllQuery(), ScoreMode.None), + boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None)), + hasParentQuery("parent", matchAllQuery(), false), + boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false)) }; for (QueryBuilder query : queries) { SearchResponse scrollResponse = client().prepareSearch("test") - .setScroll(TimeValue.timeValueSeconds(30)) - .setSize(1) - .addStoredField("_id") - .setQuery(query) - .execute() - .actionGet(); + .setScroll(TimeValue.timeValueSeconds(30)) + .setSize(1) + .addStoredField("_id") + .setQuery(query) + .execute() + .actionGet(); assertNoFailures(scrollResponse); assertThat(scrollResponse.getHits().getTotalHits().value, equalTo(10L)); @@ -1345,9 +1445,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { do { assertThat(scrollResponse.getHits().getTotalHits().value, equalTo(10L)); scannedDocs += scrollResponse.getHits().getHits().length; - scrollResponse = client() - .prepareSearchScroll(scrollResponse.getScrollId()) - .setScroll(TimeValue.timeValueSeconds(30)).get(); + scrollResponse = client().prepareSearchScroll(scrollResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } while (scrollResponse.getHits().getHits().length > 0); clearScroll(scrollResponse.getScrollId()); assertThat(scannedDocs, equalTo(10)); @@ -1357,16 +1455,16 @@ public class ChildQuerySearchIT extends ParentChildTestCase { private List createMinMaxDocBuilders() { List indexBuilders = new ArrayList<>(); // Parent 1 and its children - indexBuilders.add(createIndexRequest("test", "parent", "1", null, "id",1)); + indexBuilders.add(createIndexRequest("test", "parent", "1", null, "id", 1)); indexBuilders.add(createIndexRequest("test", "child", "10", "1", "foo", "one")); // Parent 2 and its children - indexBuilders.add(createIndexRequest("test", "parent", "2", null, "id",2)); + indexBuilders.add(createIndexRequest("test", "parent", "2", null, "id", 2)); indexBuilders.add(createIndexRequest("test", "child", "11", "2", "foo", "one")); indexBuilders.add(createIndexRequest("test", "child", "12", "2", "foo", "one two")); // Parent 3 and its children - indexBuilders.add(createIndexRequest("test", "parent", "3", null, "id",3)); + indexBuilders.add(createIndexRequest("test", "parent", "3", null, "id", 3)); indexBuilders.add(createIndexRequest("test", "child", "13", "3", "foo", "one")); indexBuilders.add(createIndexRequest("test", "child", "14", "3", "foo", "one two")); indexBuilders.add(createIndexRequest("test", "child", "15", "3", "foo", "one two three")); @@ -1383,26 +1481,24 @@ public class ChildQuerySearchIT extends ParentChildTestCase { private SearchResponse minMaxQuery(ScoreMode scoreMode, int minChildren, Integer maxChildren) throws SearchPhaseExecutionException { HasChildQueryBuilder hasChildQuery = hasChildQuery( - "child", - QueryBuilders.functionScoreQuery(constantScoreQuery(QueryBuilders.termQuery("foo", "two")), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ - new FunctionScoreQueryBuilder.FilterFunctionBuilder(weightFactorFunction(1)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "three"), - weightFactorFunction(1)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "four"), - weightFactorFunction(1)) - }).boostMode(CombineFunction.REPLACE).scoreMode(FunctionScoreQuery.ScoreMode.SUM), scoreMode) - .minMaxChildren(minChildren, maxChildren != null ? maxChildren : HasChildQueryBuilder.DEFAULT_MAX_CHILDREN); + "child", + QueryBuilders.functionScoreQuery( + constantScoreQuery(QueryBuilders.termQuery("foo", "two")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(weightFactorFunction(1)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1)) } + ).boostMode(CombineFunction.REPLACE).scoreMode(FunctionScoreQuery.ScoreMode.SUM), + scoreMode + ).minMaxChildren(minChildren, maxChildren != null ? maxChildren : HasChildQueryBuilder.DEFAULT_MAX_CHILDREN); - return client() - .prepareSearch("test") - .setQuery(hasChildQuery) - .addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); + return client().prepareSearch("test").setQuery(hasChildQuery).addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); } public void testMinMaxChildren() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); ensureGreen(); indexRandom(true, createMinMaxDocBuilders().toArray(new IndexRequestBuilder[0])); @@ -1714,64 +1810,75 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testHasParentInnerQueryType() { - assertAcked(prepareCreate("test") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent-type", "child-type"))); + assertAcked( + prepareCreate("test").addMapping( + "doc", + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent-type", "child-type") + ) + ); createIndexRequest("test", "child-type", "child-id", "parent-id").get(); createIndexRequest("test", "parent-type", "parent-id", null).get(); refresh(); - //make sure that when we explicitly set a type, the inner query is executed in the context of the child type instead - SearchResponse searchResponse = client().prepareSearch("test").setQuery( - hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)).get(); + // make sure that when we explicitly set a type, the inner query is executed in the context of the child type instead + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)) + .get(); assertSearchHits(searchResponse, "parent-id"); - //make sure that when we explicitly set a type, the inner query is executed in the context of the parent type instead - searchResponse = client().prepareSearch("test").setQuery( - hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)).get(); + // make sure that when we explicitly set a type, the inner query is executed in the context of the parent type instead + searchResponse = client().prepareSearch("test") + .setQuery(hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)) + .get(); assertSearchHits(searchResponse, "child-id"); } public void testHighlightersIgnoreParentChild() throws IOException { - assertAcked(prepareCreate("test") - .addMapping("doc", jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + assertAcked( + prepareCreate("test").addMapping( + "doc", + jsonBuilder().startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("parent-type", "child-type") + .field("parent-type", "child-type") .endObject() - .endObject() - .startObject("searchText") + .endObject() + .startObject("searchText") .field("type", "text") .field("term_vector", "with_positions_offsets") .field("index_options", "offsets") - .endObject() - .endObject().endObject() - )); + .endObject() + .endObject() + .endObject() + ) + ); createIndexRequest("test", "parent-type", "parent-id", null, "searchText", "quick brown fox").get(); createIndexRequest("test", "child-type", "child-id", "parent-id", "searchText", "quick brown fox").get(); refresh(); - String[] highlightTypes = new String[] {"plain", "fvh", "unified"}; + String[] highlightTypes = new String[] { "plain", "fvh", "unified" }; for (String highlightType : highlightTypes) { logger.info("Testing with highlight type [{}]", highlightType); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(new BoolQueryBuilder() - .must(new MatchQueryBuilder("searchText", "fox")) - .must(new HasChildQueryBuilder("child-type", new MatchAllQueryBuilder(), ScoreMode.None)) - ) - .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))) - .get(); + .setQuery( + new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox")) + .must(new HasChildQueryBuilder("child-type", new MatchAllQueryBuilder(), ScoreMode.None)) + ) + .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))) + .get(); assertHitCount(searchResponse, 1); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("parent-id")); HighlightField highlightField = searchResponse.getHits().getAt(0).getHighlightFields().get("searchText"); assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown fox")); searchResponse = client().prepareSearch("test") - .setQuery(new BoolQueryBuilder() - .must(new MatchQueryBuilder("searchText", "fox")) - .must(new HasParentQueryBuilder("parent-type", new MatchAllQueryBuilder(), false)) - ) - .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))) - .get(); + .setQuery( + new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox")) + .must(new HasParentQueryBuilder("parent-type", new MatchAllQueryBuilder(), false)) + ) + .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))) + .get(); assertHitCount(searchResponse, 1); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("child-id")); highlightField = searchResponse.getHits().getAt(0).getHighlightFields().get("searchText"); @@ -1780,16 +1887,17 @@ public class ChildQuerySearchIT extends ParentChildTestCase { } public void testAliasesFilterWithHasChildQuery() throws Exception { - assertAcked(prepareCreate("my-index") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("my-index").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); createIndexRequest("my-index", "parent", "1", null).get(); createIndexRequest("my-index", "child", "2", "1").get(); refresh(); - assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", - hasChildQuery("child", matchAllQuery(), ScoreMode.None))); - assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", - hasParentQuery("parent", matchAllQuery(), false))); + assertAcked( + admin().indices().prepareAliases().addAlias("my-index", "filter1", hasChildQuery("child", matchAllQuery(), ScoreMode.None)) + ); + assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("parent", matchAllQuery(), false))); SearchResponse response = client().prepareSearch("filter1").get(); assertHitCount(response, 1); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java index 19d385db1e9..913cba69502 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java @@ -102,23 +102,30 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testSimpleParentChild() throws Exception { - assertAcked(prepareCreate("articles") - .addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties") - .startObject("join_field") + assertAcked( + prepareCreate("articles").addMapping( + "doc", + jsonBuilder().startObject() + .startObject("doc") + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("article", "comment") + .field("article", "comment") .endObject() - .endObject() - .startObject("title") + .endObject() + .startObject("title") .field("type", "text") - .endObject() - .startObject("message") + .endObject() + .startObject("message") .field("type", "text") .field("fielddata", true) - .endObject() - .endObject().endObject().endObject() - )); + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); List requests = new ArrayList<>(); requests.add(createIndexRequest("articles", "article", "p1", null, "title", "quick brown fox")); @@ -132,8 +139,7 @@ public class InnerHitsIT extends ParentChildTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") - .setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None) - .innerHit(new InnerHitBuilder())) + .setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -151,8 +157,11 @@ public class InnerHitsIT extends ParentChildTestCase { final boolean seqNoAndTerm = randomBoolean(); response = client().prepareSearch("articles") - .setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None) - .innerHit(new InnerHitBuilder().setSeqNoAndPrimaryTerm(seqNoAndTerm))) + .setQuery( + hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None).innerHit( + new InnerHitBuilder().setSeqNoAndPrimaryTerm(seqNoAndTerm) + ) + ) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -188,13 +197,14 @@ public class InnerHitsIT extends ParentChildTestCase { response = client().prepareSearch("articles") .setQuery( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( - new InnerHitBuilder() - .addFetchField("message") + new InnerHitBuilder().addFetchField("message") .setHighlightBuilder(new HighlightBuilder().field("message")) - .setExplain(true).setSize(1) - .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", - Collections.emptyMap()))) - ).get(); + .setExplain(true) + .setSize(1) + .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) + ) + ) + .get(); assertNoFailures(response); innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getHits().length, equalTo(1)); @@ -207,7 +217,9 @@ public class InnerHitsIT extends ParentChildTestCase { .setQuery( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( new InnerHitBuilder().addDocValueField("message").setSize(1) - )).get(); + ) + ) + .get(); assertNoFailures(response); innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getHits().length, equalTo(1)); @@ -215,19 +227,26 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testRandomParentChild() throws Exception { - assertAcked(prepareCreate("idx") - .addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties") - .startObject("id") + assertAcked( + prepareCreate("idx").addMapping( + "doc", + jsonBuilder().startObject() + .startObject("doc") + .startObject("properties") + .startObject("id") .field("type", "keyword") - .endObject() - .startObject("join_field") + .endObject() + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("parent", new String[] {"child1", "child2"}) + .field("parent", new String[] { "child1", "child2" }) .endObject() - .endObject() - .endObject().endObject().endObject() - )); + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); int numDocs = scaledRandomIntBetween(5, 50); List requestBuilders = new ArrayList<>(); @@ -254,12 +273,20 @@ public class InnerHitsIT extends ParentChildTestCase { int size = randomIntBetween(0, numDocs); BoolQueryBuilder boolQuery = new BoolQueryBuilder(); - boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setName("a") - .addSort(new FieldSortBuilder("id").order(SortOrder.ASC)).setSize(size)))); - boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setName("b") - .addSort(new FieldSortBuilder("id").order(SortOrder.ASC)).setSize(size)))); + boolQuery.should( + constantScoreQuery( + hasChildQuery("child1", matchAllQuery(), ScoreMode.None).innerHit( + new InnerHitBuilder().setName("a").addSort(new FieldSortBuilder("id").order(SortOrder.ASC)).setSize(size) + ) + ) + ); + boolQuery.should( + constantScoreQuery( + hasChildQuery("child2", matchAllQuery(), ScoreMode.None).innerHit( + new InnerHitBuilder().setName("b").addSort(new FieldSortBuilder("id").order(SortOrder.ASC)).setSize(size) + ) + ) + ); SearchResponse searchResponse = client().prepareSearch("idx") .setSize(numDocs) .addSort("id", SortOrder.ASC) @@ -281,7 +308,7 @@ public class InnerHitsIT extends ParentChildTestCase { SearchHits inner = searchHit.getInnerHits().get("a"); assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { - SearchHit innerHit = inner.getAt(child); + SearchHit innerHit = inner.getAt(child); assertThat(innerHit.getType(), equalTo("doc")); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); assertThat(innerHit.getId(), equalTo(childId)); @@ -303,26 +330,47 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testInnerHitsOnHasParent() throws Exception { - assertAcked(prepareCreate("stack") - .addMapping("doc", addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "question", "answer"), - "body", "text"))); + assertAcked( + prepareCreate("stack").addMapping( + "doc", + addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "question", "answer"), "body", "text") + ) + ); List requests = new ArrayList<>(); - requests.add(createIndexRequest("stack", "question", "1", null, "body", "I'm using HTTPS + Basic authentication " - + "to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?")); + requests.add( + createIndexRequest( + "stack", + "question", + "1", + null, + "body", + "I'm using HTTPS + Basic authentication " + + "to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?" + ) + ); requests.add(createIndexRequest("stack", "answer", "3", "1", "body", "install fail2ban and enable rules for apache")); - requests.add(createIndexRequest("stack", "question", "2", null, "body", - "I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?")); - requests.add(createIndexRequest("stack", "answer", "4", "2", "body", - "Denyhosts protects only ssh; Fail2Ban protects all daemons.")); + requests.add( + createIndexRequest( + "stack", + "question", + "2", + null, + "body", + "I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?" + ) + ); + requests.add( + createIndexRequest("stack", "answer", "4", "2", "body", "Denyhosts protects only ssh; Fail2Ban protects all daemons.") + ); indexRandom(true, requests); SearchResponse response = client().prepareSearch("stack") .addSort("id", SortOrder.ASC) .setQuery( - boolQuery() - .must(matchQuery("body", "fail2ban")) + boolQuery().must(matchQuery("body", "fail2ban")) .must(hasParentQuery("question", matchAllQuery(), false).innerHit(new InnerHitBuilder())) - ).get(); + ) + .get(); assertNoFailures(response); assertHitCount(response, 2); @@ -342,10 +390,18 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testParentChildMultipleLayers() throws Exception { - assertAcked(prepareCreate("articles") - .addMapping("doc", - addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, - "article", "comment", "comment", "remark"), "title", "text", "message", "text"))); + assertAcked( + prepareCreate("articles").addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "article", "comment", "comment", "remark"), + "title", + "text", + "message", + "text" + ) + ) + ); List requests = new ArrayList<>(); requests.add(createIndexRequest("articles", "article", "1", null, "title", "quick brown fox")); @@ -357,9 +413,13 @@ public class InnerHitsIT extends ParentChildTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") - .setQuery(hasChildQuery("comment", - hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder()), - ScoreMode.None).innerHit(new InnerHitBuilder())) + .setQuery( + hasChildQuery( + "comment", + hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder()), + ScoreMode.None + ).innerHit(new InnerHitBuilder()) + ) .get(); assertNoFailures(response); @@ -378,9 +438,13 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(innerHits.getAt(0).getType(), equalTo("doc")); response = client().prepareSearch("articles") - .setQuery(hasChildQuery("comment", - hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder()), - ScoreMode.None).innerHit(new InnerHitBuilder())) + .setQuery( + hasChildQuery( + "comment", + hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder()), + ScoreMode.None + ).innerHit(new InnerHitBuilder()) + ) .get(); assertNoFailures(response); @@ -400,9 +464,23 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testRoyals() throws Exception { - assertAcked(prepareCreate("royals") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, - "king", "prince", "prince", "duke", "duke", "earl", "earl", "baron"))); + assertAcked( + prepareCreate("royals").addMapping( + "doc", + buildParentJoinFieldMappingFromSimplifiedDef( + "join_field", + true, + "king", + "prince", + "prince", + "duke", + "duke", + "earl", + "earl", + "baron" + ) + ) + ); List requests = new ArrayList<>(); requests.add(createIndexRequest("royals", "king", "king", null)); @@ -419,20 +497,25 @@ public class InnerHitsIT extends ParentChildTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("royals") - .setQuery(boolQuery() - .filter(hasParentQuery("prince", - hasParentQuery("king", matchAllQuery(), false).innerHit(new InnerHitBuilder().setName("kings")), - false).innerHit(new InnerHitBuilder().setName("princes")) + .setQuery( + boolQuery().filter( + hasParentQuery( + "prince", + hasParentQuery("king", matchAllQuery(), false).innerHit(new InnerHitBuilder().setName("kings")), + false + ).innerHit(new InnerHitBuilder().setName("princes")) ) - .filter(hasChildQuery("earl", - hasChildQuery("baron", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setName("barons")), - ScoreMode.None).innerHit(new InnerHitBuilder() - .addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC)) - .setName("earls") - .setSize(4)) - ) - ).get(); + .filter( + hasChildQuery( + "earl", + hasChildQuery("baron", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("barons")), + ScoreMode.None + ).innerHit( + new InnerHitBuilder().addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC)).setName("earls").setSize(4) + ) + ) + ) + .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); @@ -469,8 +552,9 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testMatchesQueriesParentChildInnerHits() throws Exception { - assertAcked(prepareCreate("index") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); + assertAcked( + prepareCreate("index").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); List requests = new ArrayList<>(); requests.add(createIndexRequest("index", "parent", "1", null)); requests.add(createIndexRequest("index", "child", "3", "1", "field", "value1")); @@ -480,8 +564,9 @@ public class InnerHitsIT extends ParentChildTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("index") - .setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None) - .innerHit(new InnerHitBuilder())) + .setQuery( + hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None).innerHit(new InnerHitBuilder()) + ) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -495,12 +580,10 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); - QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None) - .innerHit(new InnerHitBuilder()); - response = client().prepareSearch("index") - .setQuery(query) - .addSort("id", SortOrder.ASC) - .get(); + QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None).innerHit( + new InnerHitBuilder() + ); + response = client().prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); @@ -509,9 +592,12 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testUseMaxDocInsteadOfSize() throws Exception { - assertAcked(prepareCreate("index1") - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); - client().admin().indices().prepareUpdateSettings("index1") + assertAcked( + prepareCreate("index1").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + ); + client().admin() + .indices() + .prepareUpdateSettings("index1") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH)) .get(); List requests = new ArrayList<>(); @@ -519,26 +605,39 @@ public class InnerHitsIT extends ParentChildTestCase { requests.add(createIndexRequest("index1", "child", "2", "1", "field", "value1")); indexRandom(true, requests); - QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None) - .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)); - SearchResponse response = client().prepareSearch("index1") - .setQuery(query) - .get(); + QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None).innerHit( + new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1) + ); + SearchResponse response = client().prepareSearch("index1").setQuery(query).get(); assertNoFailures(response); assertHitCount(response, 1); } public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("doc", addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, - "parent_type", "child_type"), "nested_type", "nested"))); + assertAcked( + prepareCreate("test").addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), + "nested_type", + "nested" + ) + ) + ); createIndexRequest("test", "parent_type", "1", null, "key", "value").get(); createIndexRequest("test", "child_type", "2", "1", "nested_type", Collections.singletonMap("key", "value")).get(); refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(boolQuery().must(matchQuery("key", "value")) - .should(hasChildQuery("child_type", nestedQuery("nested_type", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder()), ScoreMode.None).innerHit(new InnerHitBuilder()))) + .setQuery( + boolQuery().must(matchQuery("key", "value")) + .should( + hasChildQuery( + "child_type", + nestedQuery("nested_type", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder()), + ScoreMode.None + ).innerHit(new InnerHitBuilder()) + ) + ) .get(); assertHitCount(response, 1); SearchHit hit = response.getHits().getAt(0); @@ -548,10 +647,15 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testInnerHitsWithIgnoreUnmapped() throws Exception { - assertAcked(prepareCreate("index1") - .addMapping("doc", addFieldMappings( - buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), - "nested_type", "nested")) + assertAcked( + prepareCreate("index1").addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), + "nested_type", + "nested" + ) + ) ); assertAcked(prepareCreate("index2")); createIndexRequest("index1", "parent_type", "1", null, "nested_type", Collections.singletonMap("key", "value")).get(); @@ -560,10 +664,11 @@ public class InnerHitsIT extends ParentChildTestCase { refresh(); SearchResponse response = client().prepareSearch("index1", "index2") - .setQuery(boolQuery() - .should(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setIgnoreUnmapped(true))) - .should(termQuery("key", "value")) + .setQuery( + boolQuery().should( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setIgnoreUnmapped(true)) + ).should(termQuery("key", "value")) ) .get(); assertNoFailures(response); @@ -572,46 +677,73 @@ public class InnerHitsIT extends ParentChildTestCase { } public void testTooHighResultWindow() throws Exception { - assertAcked(prepareCreate("index1") - .addMapping("doc", addFieldMappings( - buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), - "nested_type", "nested")) + assertAcked( + prepareCreate("index1").addMapping( + "doc", + addFieldMappings( + buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), + "nested_type", + "nested" + ) + ) ); createIndexRequest("index1", "parent_type", "1", null, "nested_type", Collections.singletonMap("key", "value")).get(); createIndexRequest("index1", "child_type", "2", "1").get(); refresh(); SearchResponse response = client().prepareSearch("index1") - .setQuery(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(50).setSize(10).setName("_name"))) + .setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(50).setSize(10).setName("_name")) + ) .get(); assertNoFailures(response); assertHitCount(response, 1); - Exception e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("index1") - .setQuery(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))) - .get()); - assertThat(e.getCause().getMessage(), - containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]")); - e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("index1") - .setQuery(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))) - .get()); - assertThat(e.getCause().getMessage(), - containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]")); + Exception e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("index1") + .setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")) + ) + .get() + ); + assertThat( + e.getCause().getMessage(), + containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]") + ); + e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch("index1") + .setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")) + ) + .get() + ); + assertThat( + e.getCause().getMessage(), + containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]") + ); - client().admin().indices().prepareUpdateSettings("index1") + client().admin() + .indices() + .prepareUpdateSettings("index1") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110)) .get(); response = client().prepareSearch("index1") - .setQuery(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))) + .setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")) + ) .get(); assertNoFailures(response); response = client().prepareSearch("index1") - .setQuery(hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))) + .setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")) + ) .get(); assertNoFailures(response); } diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java index 8cd28149075..40c9b79c1ab 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java @@ -70,7 +70,8 @@ public abstract class ParentChildTestCase extends OpenSearchIntegTestCase { @Override public Settings indexSettings() { - Settings.Builder builder = Settings.builder().put(super.indexSettings()) + Settings.Builder builder = Settings.builder() + .put(super.indexSettings()) // aggressive filter caching so that we can assert on the filter cache size .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), true) .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true); @@ -87,23 +88,25 @@ public abstract class ParentChildTestCase extends OpenSearchIntegTestCase { return createIndexRequest(index, type, id, parentId, source); } - protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, - XContentBuilder builder) throws IOException { + protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, XContentBuilder builder) + throws IOException { Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(builder), false); return createIndexRequest(index, type, id, parentId, source); } - public static Map buildParentJoinFieldMappingFromSimplifiedDef(String joinFieldName, - boolean eagerGlobalOrdinals, - String... relations) { + public static Map buildParentJoinFieldMappingFromSimplifiedDef( + String joinFieldName, + boolean eagerGlobalOrdinals, + String... relations + ) { Map fields = new HashMap<>(); Map joinField = new HashMap<>(); joinField.put("type", "join"); joinField.put("eager_global_ordinals", eagerGlobalOrdinals); Map relationMap = new HashMap<>(); - for (int i = 0; i < relations.length; i+=2) { - String[] children = relations[i+1].split(","); + for (int i = 0; i < relations.length; i += 2) { + String[] children = relations[i + 1].split(","); if (children.length > 1) { relationMap.put(relations[i], children); } else { @@ -119,7 +122,7 @@ public abstract class ParentChildTestCase extends OpenSearchIntegTestCase { @SuppressWarnings("unchecked") public static Map addFieldMappings(Map map, String... fields) { Map propsMap = (Map) map.get("properties"); - for (int i = 0; i < fields.length; i+=2) { + for (int i = 0; i < fields.length; i += 2) { String field = fields[i]; String type = fields[i + 1]; propsMap.put(field, Collections.singletonMap("type", type)); diff --git a/modules/parent-join/src/main/java/org/opensearch/join/ParentJoinPlugin.java b/modules/parent-join/src/main/java/org/opensearch/join/ParentJoinPlugin.java index a81eb47e40f..6889e38c04f 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/ParentJoinPlugin.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/ParentJoinPlugin.java @@ -52,8 +52,7 @@ import java.util.Map; public class ParentJoinPlugin extends Plugin implements SearchPlugin, MapperPlugin { - public ParentJoinPlugin() { - } + public ParentJoinPlugin() {} @Override public List> getQueries() { diff --git a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Children.java b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Children.java index 48d6688c6fb..a4d73fda833 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Children.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Children.java @@ -37,5 +37,4 @@ import org.opensearch.search.aggregations.bucket.SingleBucketAggregation; /** * An single bucket aggregation that translates parent documents to their children documents. */ -public interface Children extends SingleBucketAggregation { -} +public interface Children extends SingleBucketAggregation {} diff --git a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenAggregationBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenAggregationBuilder.java index 1b47fcb5469..bff730878d1 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenAggregationBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenAggregationBuilder.java @@ -78,8 +78,7 @@ public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder metadata) { + protected ChildrenAggregationBuilder(ChildrenAggregationBuilder clone, Builder factoriesBuilder, Map metadata) { super(clone, factoriesBuilder, metadata); this.childType = clone.childType; this.childFilter = clone.childFilter; @@ -115,12 +114,22 @@ public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder metadata) throws IOException { + public ChildrenAggregatorFactory( + String name, + ValuesSourceConfig config, + Query childFilter, + Query parentFilter, + QueryShardContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metadata); this.childFilter = childFilter; @@ -82,19 +84,33 @@ public class ChildrenAggregatorFactory extends ValuesSourceAggregatorFactory { } @Override - protected Aggregator doCreateInternal(SearchContext searchContext, Aggregator parent, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + protected Aggregator doCreateInternal( + SearchContext searchContext, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { ValuesSource rawValuesSource = config.getValuesSource(); if (rawValuesSource instanceof WithOrdinals == false) { - throw new AggregationExecutionException("ValuesSource type " + rawValuesSource.toString() + - "is not supported for aggregation " + this.name()); + throw new AggregationExecutionException( + "ValuesSource type " + rawValuesSource.toString() + "is not supported for aggregation " + this.name() + ); } WithOrdinals valuesSource = (WithOrdinals) rawValuesSource; long maxOrd = valuesSource.globalMaxOrd(searchContext.searcher()); - return new ParentToChildrenAggregator(name, factories, searchContext, parent, childFilter, - parentFilter, valuesSource, maxOrd, cardinality, metadata); + return new ParentToChildrenAggregator( + name, + factories, + searchContext, + parent, + childFilter, + parentFilter, + valuesSource, + maxOrd, + cardinality, + metadata + ); } @Override diff --git a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenToParentAggregator.java b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenToParentAggregator.java index 244f25d694b..d8fce3d72ed 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenToParentAggregator.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ChildrenToParentAggregator.java @@ -51,17 +51,32 @@ public class ChildrenToParentAggregator extends ParentJoinAggregator { static final ParseField TYPE_FIELD = new ParseField("type"); - public ChildrenToParentAggregator(String name, AggregatorFactories factories, - SearchContext context, Aggregator parent, Query childFilter, - Query parentFilter, ValuesSource.Bytes.WithOrdinals valuesSource, - long maxOrd, CardinalityUpperBound cardinality, Map metadata) throws IOException { + public ChildrenToParentAggregator( + String name, + AggregatorFactories factories, + SearchContext context, + Aggregator parent, + Query childFilter, + Query parentFilter, + ValuesSource.Bytes.WithOrdinals valuesSource, + long maxOrd, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, childFilter, parentFilter, valuesSource, maxOrd, cardinality, metadata); } @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalParent(name, bucketDocCount(owningBucketOrd), subAggregationResults, metadata())); + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalParent( + name, + bucketDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) + ); } @Override diff --git a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Parent.java b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Parent.java index 46021a5111a..dcd53f23081 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Parent.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/Parent.java @@ -37,5 +37,4 @@ import org.opensearch.search.aggregations.bucket.SingleBucketAggregation; /** * An single bucket aggregation that translates child documents to their parent documents. */ -public interface Parent extends SingleBucketAggregation { -} +public interface Parent extends SingleBucketAggregation {} diff --git a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentAggregationBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentAggregationBuilder.java index 1ebabdf0dae..f180981db12 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentAggregationBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentAggregationBuilder.java @@ -78,8 +78,7 @@ public class ParentAggregationBuilder extends ValuesSourceAggregationBuilder metadata) { + protected ParentAggregationBuilder(ParentAggregationBuilder clone, Builder factoriesBuilder, Map metadata) { super(clone, factoriesBuilder, metadata); this.childType = clone.childType; this.childFilter = clone.childFilter; @@ -115,12 +114,22 @@ public class ParentAggregationBuilder extends ValuesSourceAggregationBuilder metadata) throws IOException { + public ParentAggregatorFactory( + String name, + ValuesSourceConfig config, + Query childFilter, + Query parentFilter, + QueryShardContext queryShardContext, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { super(name, config, queryShardContext, parent, subFactoriesBuilder, metadata); this.childFilter = childFilter; @@ -82,19 +84,33 @@ public class ParentAggregatorFactory extends ValuesSourceAggregatorFactory { } @Override - protected Aggregator doCreateInternal(SearchContext searchContext, Aggregator children, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + protected Aggregator doCreateInternal( + SearchContext searchContext, + Aggregator children, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { ValuesSource rawValuesSource = config.getValuesSource(); if (rawValuesSource instanceof WithOrdinals == false) { - throw new AggregationExecutionException("ValuesSource type " + rawValuesSource.toString() + - "is not supported for aggregation " + this.name()); + throw new AggregationExecutionException( + "ValuesSource type " + rawValuesSource.toString() + "is not supported for aggregation " + this.name() + ); } WithOrdinals valuesSource = (WithOrdinals) rawValuesSource; long maxOrd = valuesSource.globalMaxOrd(searchContext.searcher()); - return new ChildrenToParentAggregator(name, factories, searchContext, children, childFilter, - parentFilter, valuesSource, maxOrd, cardinality, metadata); + return new ChildrenToParentAggregator( + name, + factories, + searchContext, + children, + childFilter, + parentFilter, + valuesSource, + maxOrd, + cardinality, + metadata + ); } @Override diff --git a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentJoinAggregator.java index 7e362a992e9..4e1016a5968 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentJoinAggregator.java @@ -74,16 +74,18 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements */ private final CollectionStrategy collectionStrategy; - public ParentJoinAggregator(String name, - AggregatorFactories factories, - SearchContext context, - Aggregator parent, - Query inFilter, - Query outFilter, - ValuesSource.Bytes.WithOrdinals valuesSource, - long maxOrd, - CardinalityUpperBound cardinality, - Map metadata) throws IOException { + public ParentJoinAggregator( + String name, + AggregatorFactories factories, + SearchContext context, + Aggregator parent, + Query inFilter, + Query outFilter, + ValuesSource.Bytes.WithOrdinals valuesSource, + long maxOrd, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { /* * We have to use MANY to work around * https://github.com/elastic/elasticsearch/issues/59097 @@ -91,8 +93,9 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements super(name, factories, context, parent, CardinalityUpperBound.MANY, metadata); if (maxOrd > Integer.MAX_VALUE) { - throw new IllegalStateException("the number of parent [" + maxOrd + "] + is greater than the allowed limit " + - "for this aggregation: " + Integer.MAX_VALUE); + throw new IllegalStateException( + "the number of parent [" + maxOrd + "] + is greater than the allowed limit " + "for this aggregation: " + Integer.MAX_VALUE + ); } // these two filters are cached in the parser @@ -106,8 +109,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements } @Override - public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } @@ -173,7 +175,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements * structure that maps a primitive long to a list of primitive * longs. */ - for (long owningBucketOrd: ordsToCollect) { + for (long owningBucketOrd : ordsToCollect) { if (collectionStrategy.exists(owningBucketOrd, globalOrdinal)) { collectBucket(sub, docId, owningBucketOrd); } @@ -196,6 +198,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements */ protected interface CollectionStrategy extends Releasable { void add(long owningBucketOrd, int globalOrdinal); + boolean exists(long owningBucketOrd, int globalOrdinal); } diff --git a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentToChildrenAggregator.java b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentToChildrenAggregator.java index 34e2dd02865..8d342eada4d 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentToChildrenAggregator.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/aggregations/ParentToChildrenAggregator.java @@ -47,17 +47,32 @@ public class ParentToChildrenAggregator extends ParentJoinAggregator { static final ParseField TYPE_FIELD = new ParseField("type"); - public ParentToChildrenAggregator(String name, AggregatorFactories factories, - SearchContext context, Aggregator parent, Query childFilter, - Query parentFilter, ValuesSource.Bytes.WithOrdinals valuesSource, - long maxOrd, CardinalityUpperBound cardinality, Map metadata) throws IOException { + public ParentToChildrenAggregator( + String name, + AggregatorFactories factories, + SearchContext context, + Aggregator parent, + Query childFilter, + Query parentFilter, + ValuesSource.Bytes.WithOrdinals valuesSource, + long maxOrd, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { super(name, factories, context, parent, parentFilter, childFilter, valuesSource, maxOrd, cardinality, metadata); } @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForSingleBucket(owningBucketOrds, (owningBucketOrd, subAggregationResults) -> - new InternalChildren(name, bucketDocCount(owningBucketOrd), subAggregationResults, metadata())); + return buildAggregationsForSingleBucket( + owningBucketOrds, + (owningBucketOrd, subAggregationResults) -> new InternalChildren( + name, + bucketDocCount(owningBucketOrd), + subAggregationResults, + metadata() + ) + ); } @Override diff --git a/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java b/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java index e52a310a4bc..4570a67d67e 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/mapper/MetaJoinFieldMapper.java @@ -149,8 +149,7 @@ public class MetaJoinFieldMapper extends FieldMapper { } @Override - protected void mergeOptions(FieldMapper other, List conflicts) { - } + protected void mergeOptions(FieldMapper other, List conflicts) {} @Override protected void parseCreateField(ParseContext context) throws IOException { diff --git a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java index 6a87377c87e..4e3d26d73c3 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentIdFieldMapper.java @@ -103,8 +103,13 @@ public final class ParentIdFieldMapper extends FieldMapper { @Override public ParentIdFieldMapper build(BuilderContext context) { - return new ParentIdFieldMapper(name, parent, children, fieldType, - new ParentIdFieldType(buildFullName(context), eagerGlobalOrdinals, meta)); + return new ParentIdFieldMapper( + name, + parent, + children, + fieldType, + new ParentIdFieldType(buildFullName(context), eagerGlobalOrdinals, meta) + ); } } @@ -144,11 +149,13 @@ public final class ParentIdFieldMapper extends FieldMapper { private final String parentName; private Set children; - protected ParentIdFieldMapper(String simpleName, - String parentName, - Set children, - FieldType fieldType, - MappedFieldType mappedFieldType) { + protected ParentIdFieldMapper( + String simpleName, + String parentName, + Set children, + FieldType fieldType, + MappedFieldType mappedFieldType + ) { super(simpleName, fieldType, mappedFieldType, MultiFields.empty(), CopyTo.empty()); this.parentName = parentName; this.children = children; @@ -169,6 +176,7 @@ public final class ParentIdFieldMapper extends FieldMapper { public Query getParentFilter() { return new TermQuery(new Term(name().substring(0, name().indexOf('#')), parentName)); } + /** * Returns the children names associated with this mapper. */ diff --git a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java index 2823575a1e7..7d34e6d40a7 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/mapper/ParentJoinFieldMapper.java @@ -98,8 +98,9 @@ public final class ParentJoinFieldMapper extends FieldMapper { * if there is no parent-join field in this mapping. */ public static ParentJoinFieldMapper getMapper(MapperService service) { - MetaJoinFieldMapper.MetaJoinFieldType fieldType = - (MetaJoinFieldMapper.MetaJoinFieldType) service.fieldType(MetaJoinFieldMapper.NAME); + MetaJoinFieldMapper.MetaJoinFieldType fieldType = (MetaJoinFieldMapper.MetaJoinFieldType) service.fieldType( + MetaJoinFieldMapper.NAME + ); if (fieldType == null) { return null; } @@ -115,15 +116,17 @@ public final class ParentJoinFieldMapper extends FieldMapper { private static void checkIndexCompatibility(IndexSettings settings, String name) { if (settings.getIndexMetadata().isRoutingPartitionedIndex()) { - throw new IllegalStateException("cannot create join field [" + name + "] " + - "for the partitioned index " + "[" + settings.getIndex().getName() + "]"); + throw new IllegalStateException( + "cannot create join field [" + name + "] " + "for the partitioned index " + "[" + settings.getIndex().getName() + "]" + ); } } private static void checkObjectOrNested(ContentPath path, String name) { if (path.pathAsText(name).contains(".")) { - throw new IllegalArgumentException("join field [" + path.pathAsText(name) + "] " + - "cannot be added inside an object or in a multi-field"); + throw new IllegalArgumentException( + "join field [" + path.pathAsText(name) + "] " + "cannot be added inside an object or in a multi-field" + ); } } @@ -166,18 +169,22 @@ public final class ParentJoinFieldMapper extends FieldMapper { public ParentJoinFieldMapper build(BuilderContext context) { checkObjectOrNested(context.path(), name); final List parentIdFields = new ArrayList<>(); - parentIdFieldBuilders.stream() - .map((parentBuilder) -> { - if (eagerGlobalOrdinals) { - parentBuilder.eagerGlobalOrdinals(true); - } - return parentBuilder.build(context); - }) - .forEach(parentIdFields::add); + parentIdFieldBuilders.stream().map((parentBuilder) -> { + if (eagerGlobalOrdinals) { + parentBuilder.eagerGlobalOrdinals(true); + } + return parentBuilder.build(context); + }).forEach(parentIdFields::add); checkParentFields(name(), parentIdFields); MetaJoinFieldMapper unique = new MetaJoinFieldMapper.Builder(name).build(context); - return new ParentJoinFieldMapper(name, fieldType, new JoinFieldType(buildFullName(context), meta), - unique, Collections.unmodifiableList(parentIdFields), eagerGlobalOrdinals); + return new ParentJoinFieldMapper( + name, + fieldType, + new JoinFieldType(buildFullName(context), meta), + unique, + Collections.unmodifiableList(parentIdFields), + eagerGlobalOrdinals + ); } } @@ -200,7 +207,7 @@ public final class ParentJoinFieldMapper extends FieldMapper { } if ("relations".equals(entry.getKey())) { Map relations = XContentMapValues.nodeMapValue(entry.getValue(), "relations"); - for (Iterator> relIt = relations.entrySet().iterator(); relIt.hasNext(); ) { + for (Iterator> relIt = relations.entrySet().iterator(); relIt.hasNext();) { Map.Entry relation = relIt.next(); final String parent = relation.getKey(); Set children; @@ -255,12 +262,14 @@ public final class ParentJoinFieldMapper extends FieldMapper { private List parentIdFields; private boolean eagerGlobalOrdinals; - protected ParentJoinFieldMapper(String simpleName, - FieldType fieldType, - MappedFieldType mappedFieldType, - MetaJoinFieldMapper uniqueFieldMapper, - List parentIdFields, - boolean eagerGlobalOrdinals) { + protected ParentJoinFieldMapper( + String simpleName, + FieldType fieldType, + MappedFieldType mappedFieldType, + MetaJoinFieldMapper uniqueFieldMapper, + List parentIdFields, + boolean eagerGlobalOrdinals + ) { super(simpleName, fieldType, mappedFieldType, MultiFields.empty(), CopyTo.empty()); this.parentIdFields = parentIdFields; this.uniqueFieldMapper = uniqueFieldMapper; @@ -284,7 +293,7 @@ public final class ParentJoinFieldMapper extends FieldMapper { @Override public Iterator iterator() { - List mappers = new ArrayList<> (parentIdFields); + List mappers = new ArrayList<>(parentIdFields); mappers.add(uniqueFieldMapper); return mappers.iterator(); } @@ -332,12 +341,12 @@ public final class ParentJoinFieldMapper extends FieldMapper { if (self == null) { if (getParentIdFieldMapper(mergeWithMapper.getParentName(), false) != null) { // it is forbidden to add a parent to an existing child - conflicts.add("cannot create parent [" + mergeWithMapper.getParentName() + "] from an existing child"); + conflicts.add("cannot create parent [" + mergeWithMapper.getParentName() + "] from an existing child"); } for (String child : mergeWithMapper.getChildren()) { if (getParentIdFieldMapper(child, true) != null) { // it is forbidden to add a parent to an existing child - conflicts.add("cannot create child [" + child + "] from an existing parent"); + conflicts.add("cannot create child [" + child + "] from an existing parent"); } } newParentIdFields.add(mergeWithMapper); @@ -392,7 +401,7 @@ public final class ParentJoinFieldMapper extends FieldMapper { name = context.parser().text(); parent = null; } else { - throw new IllegalStateException("[" + name + "] expected START_OBJECT or VALUE_STRING but was: " + token); + throw new IllegalStateException("[" + name + "] expected START_OBJECT or VALUE_STRING but was: " + token); } ParentIdFieldMapper parentIdField = getParentIdFieldMapper(name, true); diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java index e8f2da76ee4..2e7411743b1 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java @@ -89,8 +89,8 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder children = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(query, children); - InnerHitContextBuilder innerHitContextBuilder = - new ParentChildInnerHitContextBuilder(type, true, query, innerHitBuilder, children); + InnerHitContextBuilder innerHitContextBuilder = new ParentChildInnerHitContextBuilder( + type, + true, + query, + innerHitBuilder, + children + ); innerHits.put(name, innerHitContextBuilder); } } diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasParentQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasParentQueryBuilder.java index a54cf8401f9..32cb8cf8174 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasParentQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasParentQueryBuilder.java @@ -89,7 +89,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder children = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(query, children); - InnerHitContextBuilder innerHitContextBuilder = - new ParentChildInnerHitContextBuilder(type, false, query, innerHitBuilder, children); + InnerHitContextBuilder innerHitContextBuilder = new ParentChildInnerHitContextBuilder( + type, + false, + query, + innerHitBuilder, + children + ); innerHits.put(name, innerHitContextBuilder); } } diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/ParentChildInnerHitContextBuilder.java index 4b9c73d497e..61114cba3fa 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/ParentChildInnerHitContextBuilder.java @@ -72,8 +72,13 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder { private final String typeName; private final boolean fetchChildInnerHits; - ParentChildInnerHitContextBuilder(String typeName, boolean fetchChildInnerHits, QueryBuilder query, - InnerHitBuilder innerHitBuilder, Map children) { + ParentChildInnerHitContextBuilder( + String typeName, + boolean fetchChildInnerHits, + QueryBuilder query, + InnerHitBuilder innerHitBuilder, + Map children + ) { super(query, innerHitBuilder, children); this.typeName = typeName; this.fetchChildInnerHits = fetchChildInnerHits; @@ -85,8 +90,13 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder { ParentJoinFieldMapper joinFieldMapper = ParentJoinFieldMapper.getMapper(context.mapperService()); if (joinFieldMapper != null) { String name = innerHitBuilder.getName() != null ? innerHitBuilder.getName() : typeName; - JoinFieldInnerHitSubContext joinFieldInnerHits = new JoinFieldInnerHitSubContext(name, context, typeName, - fetchChildInnerHits, joinFieldMapper); + JoinFieldInnerHitSubContext joinFieldInnerHits = new JoinFieldInnerHitSubContext( + name, + context, + typeName, + fetchChildInnerHits, + joinFieldMapper + ); setupInnerHitsContext(queryShardContext, joinFieldInnerHits); innerHitsContext.addInnerHitDefinition(joinFieldInnerHits); } else { @@ -101,8 +111,13 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder { private final boolean fetchChildInnerHits; private final ParentJoinFieldMapper joinFieldMapper; - JoinFieldInnerHitSubContext(String name, SearchContext context, String typeName, boolean fetchChildInnerHits, - ParentJoinFieldMapper joinFieldMapper) { + JoinFieldInnerHitSubContext( + String name, + SearchContext context, + String typeName, + boolean fetchChildInnerHits, + ParentJoinFieldMapper joinFieldMapper + ) { super(name, context); this.typeName = typeName; this.fetchChildInnerHits = fetchChildInnerHits; @@ -118,8 +133,7 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder { } QueryShardContext qsc = context.getQueryShardContext(); - ParentIdFieldMapper parentIdFieldMapper = - joinFieldMapper.getParentIdFieldMapper(typeName, fetchChildInnerHits == false); + ParentIdFieldMapper parentIdFieldMapper = joinFieldMapper.getParentIdFieldMapper(typeName, fetchChildInnerHits == false); if (parentIdFieldMapper == null) { return new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN); } @@ -148,10 +162,9 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder { intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx); } return new TopDocsAndMaxScore( - new TopDocs( - new TotalHits(totalHitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO), - Lucene.EMPTY_SCORE_DOCS - ), Float.NaN); + new TopDocs(new TotalHits(totalHitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), + Float.NaN + ); } else { int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc()); TopDocsCollector topDocsCollector; diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/ParentIdQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/ParentIdQueryBuilder.java index 240d7372495..8964cdefc8e 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/ParentIdQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/ParentIdQueryBuilder.java @@ -166,12 +166,12 @@ public final class ParentIdQueryBuilder extends AbstractQueryBuilder> expectedParentChildRelations = setupIndex(indexWriter); indexWriter.close(); - IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), - new ShardId(new Index("foo", "_na_"), 1)); + IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1)); // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved IndexSearcher indexSearcher = newSearcher(indexReader, false, true); @@ -143,13 +142,17 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { // verify for each children for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), - indexSearcher, aggregation -> { - assertEquals("Expected one result for min-aggregation for parent: " + parent + - ", but had aggregation-results: " + aggregation, - 1, aggregation.getDocCount()); - assertEquals(expectedParentChildRelations.get(parent).v2(), - ((InternalMin) aggregation.getAggregations().get("in_parent")).getValue(), Double.MIN_VALUE); + testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexSearcher, aggregation -> { + assertEquals( + "Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation, + 1, + aggregation.getDocCount() + ); + assertEquals( + expectedParentChildRelations.get(parent).v2(), + ((InternalMin) aggregation.getAggregations().get("in_parent")).getValue(), + Double.MIN_VALUE + ); }); } @@ -157,7 +160,6 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { directory.close(); } - public void testParentChildTerms() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); @@ -168,13 +170,13 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { SortedMap entries = new TreeMap<>(); for (Tuple value : expectedParentChildRelations.values()) { Long l = entries.computeIfAbsent(value.v2(), integer -> 0L); - entries.put(value.v2(), l+1); + entries.put(value.v2(), l + 1); } List> sortedValues = new ArrayList<>(entries.entrySet()); sortedValues.sort((o1, o2) -> { // sort larger values first int ret = o2.getValue().compareTo(o1.getValue()); - if(ret != 0) { + if (ret != 0) { return ret; } @@ -182,8 +184,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { return o1.getKey().compareTo(o2.getKey()); }); - IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), - new ShardId(new Index("foo", "_na_"), 1)); + IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1)); // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved IndexSearcher indexSearcher = newSearcher(indexReader, false, true); @@ -201,7 +202,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { for (Map.Entry entry : sortedValues) { LongTerms.Bucket bucket = valueTermsBuckets.get(i); assertEquals(entry.getKey().longValue(), bucket.getKeyAsNumber()); - assertEquals(entry.getValue(), (Long)bucket.getDocCount()); + assertEquals(entry.getValue(), (Long) bucket.getDocCount()); i++; } @@ -221,11 +222,10 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { SortedMap sortedValues = new TreeMap<>(); for (Tuple value : expectedParentChildRelations.values()) { Long l = sortedValues.computeIfAbsent(value.v2(), integer -> 0L); - sortedValues.put(value.v2(), l+1); + sortedValues.put(value.v2(), l + 1); } - IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), - new ShardId(new Index("foo", "_na_"), 1)); + IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1)); // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved IndexSearcher indexSearcher = newSearcher(indexReader, false, true); @@ -267,18 +267,18 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { private static List createParentDocument(String id, int value) { return Arrays.asList( - new StringField(IdFieldMapper.NAME, Uid.encodeId(id), Field.Store.NO), - new StringField("join_field", PARENT_TYPE, Field.Store.NO), - createJoinField(PARENT_TYPE, id), - new SortedNumericDocValuesField("number", value) + new StringField(IdFieldMapper.NAME, Uid.encodeId(id), Field.Store.NO), + new StringField("join_field", PARENT_TYPE, Field.Store.NO), + createJoinField(PARENT_TYPE, id), + new SortedNumericDocValuesField("number", value) ); } private static List createChildDocument(String childId, String parentId, int value) { return Arrays.asList( - new StringField(IdFieldMapper.NAME, Uid.encodeId(childId), Field.Store.NO), - new StringField("join_field", CHILD_TYPE, Field.Store.NO), - createJoinField(PARENT_TYPE, parentId), + new StringField(IdFieldMapper.NAME, Uid.encodeId(childId), Field.Store.NO), + new StringField("join_field", CHILD_TYPE, Field.Store.NO), + createJoinField(PARENT_TYPE, parentId), new SortedNumericDocValuesField("subNumber", value) ); } @@ -294,8 +294,13 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { MetaJoinFieldMapper.MetaJoinFieldType metaJoinFieldType = mock(MetaJoinFieldMapper.MetaJoinFieldType.class); when(metaJoinFieldType.getJoinField()).thenReturn("join_field"); when(mapperService.fieldType("_parent_join")).thenReturn(metaJoinFieldType); - MappingLookup fieldMappers = new MappingLookup(Collections.singleton(joinFieldMapper), - Collections.emptyList(), Collections.emptyList(), 0, null); + MappingLookup fieldMappers = new MappingLookup( + Collections.singleton(joinFieldMapper), + Collections.emptyList(), + Collections.emptyList(), + 0, + null + ); DocumentMapper mockMapper = mock(DocumentMapper.class); when(mockMapper.mappers()).thenReturn(fieldMappers); when(mapperService.documentMapper()).thenReturn(mockMapper); @@ -304,13 +309,11 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { private static ParentJoinFieldMapper createJoinFieldMapper() { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); - return new ParentJoinFieldMapper.Builder("join_field") - .addParent(PARENT_TYPE, Collections.singleton(CHILD_TYPE)) - .build(new Mapper.BuilderContext(settings, new ContentPath(0))); + return new ParentJoinFieldMapper.Builder("join_field").addParent(PARENT_TYPE, Collections.singleton(CHILD_TYPE)) + .build(new Mapper.BuilderContext(settings, new ContentPath(0))); } - private void testCase(Query query, IndexSearcher indexSearcher, Consumer verify) - throws IOException { + private void testCase(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { ParentAggregationBuilder aggregationBuilder = new ParentAggregationBuilder("_name", CHILD_TYPE); aggregationBuilder.subAggregation(new MinAggregationBuilder("in_parent").field("number")); @@ -320,8 +323,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { verify.accept(result); } - private void testCaseTerms(Query query, IndexSearcher indexSearcher, Consumer verify) - throws IOException { + private void testCaseTerms(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { ParentAggregationBuilder aggregationBuilder = new ParentAggregationBuilder("_name", CHILD_TYPE); aggregationBuilder.subAggregation(new TermsAggregationBuilder("value_terms").field("number")); @@ -332,12 +334,13 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { } // run a terms aggregation on the number in child-documents, then a parent aggregation and then terms on the parent-number - private void testCaseTermsParentTerms(Query query, IndexSearcher indexSearcher, Consumer verify) - throws IOException { - AggregationBuilder aggregationBuilder = - new TermsAggregationBuilder("subvalue_terms").field("subNumber"). - subAggregation(new ParentAggregationBuilder("to_parent", CHILD_TYPE). - subAggregation(new TermsAggregationBuilder("value_terms").field("number"))); + private void testCaseTermsParentTerms(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { + AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("subvalue_terms").field("subNumber") + .subAggregation( + new ParentAggregationBuilder("to_parent", CHILD_TYPE).subAggregation( + new TermsAggregationBuilder("value_terms").field("number") + ) + ); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG); MappedFieldType subFieldType = new NumberFieldMapper.NumberFieldType("subNumber", NumberFieldMapper.NumberType.LONG); diff --git a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalChildrenTests.java b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalChildrenTests.java index 21eb0e272e2..8eaedc4aa15 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalChildrenTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalChildrenTests.java @@ -56,14 +56,23 @@ public class InternalChildrenTests extends InternalSingleBucketAggregationTestCa @Override protected List getNamedXContents() { List extendedNamedXContents = new ArrayList<>(super.getNamedXContents()); - extendedNamedXContents.add(new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(ChildrenAggregationBuilder.NAME), - (p, c) -> ParsedChildren.fromXContent(p, (String) c))); - return extendedNamedXContents ; + extendedNamedXContents.add( + new NamedXContentRegistry.Entry( + Aggregation.class, + new ParseField(ChildrenAggregationBuilder.NAME), + (p, c) -> ParsedChildren.fromXContent(p, (String) c) + ) + ); + return extendedNamedXContents; } @Override - protected InternalChildren createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalChildren createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalChildren(name, docCount, aggregations, metadata); } diff --git a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalParentTests.java b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalParentTests.java index 8ee41fc40ab..cd5236ab49a 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalParentTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/InternalParentTests.java @@ -55,14 +55,19 @@ public class InternalParentTests extends InternalSingleBucketAggregationTestCase @Override protected List getNamedXContents() { List extendedNamedXContents = new ArrayList<>(super.getNamedXContents()); - extendedNamedXContents.add(new Entry(Aggregation.class, new ParseField(ParentAggregationBuilder.NAME), - (p, c) -> ParsedParent.fromXContent(p, (String) c))); - return extendedNamedXContents ; + extendedNamedXContents.add( + new Entry(Aggregation.class, new ParseField(ParentAggregationBuilder.NAME), (p, c) -> ParsedParent.fromXContent(p, (String) c)) + ); + return extendedNamedXContents; } @Override - protected InternalParent createTestInstance(String name, long docCount, InternalAggregations aggregations, - Map metadata) { + protected InternalParent createTestInstance( + String name, + long docCount, + InternalAggregations aggregations, + Map metadata + ) { return new InternalParent(name, docCount, aggregations, metadata); } diff --git a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java index 373b13fb9da..4373a74b172 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -100,8 +100,11 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { testCase(new MatchAllDocsQuery(), newSearcher(indexReader, false, true), parentToChild -> { assertEquals(0, parentToChild.getDocCount()); - assertEquals(Double.POSITIVE_INFINITY, ((InternalMin) parentToChild.getAggregations().get("in_child")).getValue(), - Double.MIN_VALUE); + assertEquals( + Double.POSITIVE_INFINITY, + ((InternalMin) parentToChild.getAggregations().get("in_child")).getValue(), + Double.MIN_VALUE + ); }); indexReader.close(); directory.close(); @@ -114,8 +117,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { final Map> expectedParentChildRelations = setupIndex(indexWriter); indexWriter.close(); - IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), - new ShardId(new Index("foo", "_na_"), 1)); + IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1)); // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved IndexSearcher indexSearcher = newSearcher(indexReader, false, true); @@ -134,8 +136,11 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { for (String parent : expectedParentChildRelations.keySet()) { testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexSearcher, child -> { assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount()); - assertEquals(expectedParentChildRelations.get(parent).v2(), - ((InternalMin) child.getAggregations().get("in_child")).getValue(), Double.MIN_VALUE); + assertEquals( + expectedParentChildRelations.get(parent).v2(), + ((InternalMin) child.getAggregations().get("in_child")).getValue(), + Double.MIN_VALUE + ); }); } indexReader.close(); @@ -177,8 +182,13 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { expectedOddMin = Math.min(expectedOddMin, e.getValue().v2()); } } - StringTerms result = - searchAndReduce(indexSearcher, new MatchAllDocsQuery(), request, longField("number"), keywordField("kwd")); + StringTerms result = searchAndReduce( + indexSearcher, + new MatchAllDocsQuery(), + request, + longField("number"), + keywordField("kwd") + ); StringTerms.Bucket evenBucket = result.getBucketByKey("even"); InternalChildren evenChildren = evenBucket.getAggregations().get("children"); @@ -219,19 +229,19 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { private static List createParentDocument(String id, String kwd) { return Arrays.asList( - new StringField(IdFieldMapper.NAME, Uid.encodeId(id), Field.Store.NO), - new SortedSetDocValuesField("kwd", new BytesRef(kwd)), - new StringField("join_field", PARENT_TYPE, Field.Store.NO), - createJoinField(PARENT_TYPE, id) + new StringField(IdFieldMapper.NAME, Uid.encodeId(id), Field.Store.NO), + new SortedSetDocValuesField("kwd", new BytesRef(kwd)), + new StringField("join_field", PARENT_TYPE, Field.Store.NO), + createJoinField(PARENT_TYPE, id) ); } private static List createChildDocument(String childId, String parentId, int value) { return Arrays.asList( - new StringField(IdFieldMapper.NAME, Uid.encodeId(childId), Field.Store.NO), - new StringField("join_field", CHILD_TYPE, Field.Store.NO), - createJoinField(PARENT_TYPE, parentId), - new SortedNumericDocValuesField("number", value) + new StringField(IdFieldMapper.NAME, Uid.encodeId(childId), Field.Store.NO), + new StringField("join_field", CHILD_TYPE, Field.Store.NO), + createJoinField(PARENT_TYPE, parentId), + new SortedNumericDocValuesField("number", value) ); } @@ -246,8 +256,13 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { MetaJoinFieldMapper.MetaJoinFieldType metaJoinFieldType = mock(MetaJoinFieldMapper.MetaJoinFieldType.class); when(metaJoinFieldType.getJoinField()).thenReturn("join_field"); when(mapperService.fieldType("_parent_join")).thenReturn(metaJoinFieldType); - MappingLookup fieldMappers = new MappingLookup(Collections.singleton(joinFieldMapper), - Collections.emptyList(), Collections.emptyList(), 0, null); + MappingLookup fieldMappers = new MappingLookup( + Collections.singleton(joinFieldMapper), + Collections.emptyList(), + Collections.emptyList(), + 0, + null + ); DocumentMapper mockMapper = mock(DocumentMapper.class); when(mockMapper.mappers()).thenReturn(fieldMappers); when(mapperService.documentMapper()).thenReturn(mockMapper); @@ -256,13 +271,11 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { private static ParentJoinFieldMapper createJoinFieldMapper() { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); - return new ParentJoinFieldMapper.Builder("join_field") - .addParent(PARENT_TYPE, Collections.singleton(CHILD_TYPE)) - .build(new Mapper.BuilderContext(settings, new ContentPath(0))); + return new ParentJoinFieldMapper.Builder("join_field").addParent(PARENT_TYPE, Collections.singleton(CHILD_TYPE)) + .build(new Mapper.BuilderContext(settings, new ContentPath(0))); } - private void testCase(Query query, IndexSearcher indexSearcher, Consumer verify) - throws IOException { + private void testCase(Query query, IndexSearcher indexSearcher, Consumer verify) throws IOException { ChildrenAggregationBuilder aggregationBuilder = new ChildrenAggregationBuilder("_name", CHILD_TYPE); aggregationBuilder.subAggregation(new MinAggregationBuilder("in_child").field("number")); diff --git a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java index 116ae84eaa1..62040b3893e 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java @@ -60,262 +60,421 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase { } public void testSingleLevel() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() + .field("type", "join") + .startObject("relations") + .field("parent", "child") .endObject() - .endObject() - .endObject()); + .endObject() + .endObject() + .endObject() + ); IndexService service = createIndex("test"); - DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE); + DocumentMapper docMapper = service.mapperService() + .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(service.mapperService())); // Doc without join - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "0", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); + ParsedDocument doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "0", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), + XContentType.JSON + ) + ); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent - doc = docMapper.parse(new SourceToParse("test", "type", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field("join_field", "parent") - .endObject()), XContentType.JSON)); + doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()), + XContentType.JSON + ) + ); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child - doc = docMapper.parse(new SourceToParse("test", "type", "2", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .startObject("join_field") - .field("name", "child") - .field("parent", "1") - .endObject() - .endObject()), XContentType.JSON, "1")); + doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "2", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .startObject("join_field") + .field("name", "child") + .field("parent", "1") + .endObject() + .endObject() + ), + XContentType.JSON, + "1" + ) + ); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Unknown join name - MapperException exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(new SourceToParse("test", "type", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field("join_field", "unknown") - .endObject()), XContentType.JSON))); + MapperException exc = expectThrows( + MapperParsingException.class, + () -> docMapper.parse( + new SourceToParse( + "test", + "type", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()), + XContentType.JSON + ) + ) + ); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); } public void testParentIdSpecifiedAsNumber() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() + .field("type", "join") + .startObject("relations") + .field("parent", "child") .endObject() - .endObject()); + .endObject() + .endObject() + .endObject() + ); IndexService service = createIndex("test"); - DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "2", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .startObject("join_field") - .field("name", "child") - .field("parent", 1) - .endObject() - .endObject()), XContentType.JSON, "1")); + DocumentMapper docMapper = service.mapperService() + .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + ParsedDocument doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "2", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .startObject("join_field") + .field("name", "child") + .field("parent", 1) + .endObject() + .endObject() + ), + XContentType.JSON, + "1" + ) + ); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); - doc = docMapper.parse(new SourceToParse("test", "type", "2", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .startObject("join_field") - .field("name", "child") - .field("parent", 1.0) - .endObject() - .endObject()), XContentType.JSON, "1")); + doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "2", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .startObject("join_field") + .field("name", "child") + .field("parent", 1.0) + .endObject() + .endObject() + ), + XContentType.JSON, + "1" + ) + ); assertEquals("1.0", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); } public void testMultipleLevels() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") .endObject() - .endObject() - .endObject()); + .endObject() + .endObject() + .endObject() + ); IndexService service = createIndex("test"); - DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE); + DocumentMapper docMapper = service.mapperService() + .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(service.mapperService())); // Doc without join - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "0", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); + ParsedDocument doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "0", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), + XContentType.JSON + ) + ); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent - doc = docMapper.parse(new SourceToParse("test", "type", "1", - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject() - .field("join_field", "parent") - .endObject()), XContentType.JSON)); + doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()), + XContentType.JSON + ) + ); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child - doc = docMapper.parse(new SourceToParse("test", "type", "2", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .startObject("join_field") - .field("name", "child") - .field("parent", "1") - .endObject() - .endObject()), XContentType.JSON, "1")); + doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "2", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .startObject("join_field") + .field("name", "child") + .field("parent", "1") + .endObject() + .endObject() + ), + XContentType.JSON, + "1" + ) + ); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child missing parent - MapperException exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(new SourceToParse("test", "type", "2", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field("join_field", "child") - .endObject()), XContentType.JSON, "1"))); + MapperException exc = expectThrows( + MapperParsingException.class, + () -> docMapper.parse( + new SourceToParse( + "test", + "type", + "2", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "child").endObject()), + XContentType.JSON, + "1" + ) + ) + ); assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]")); // Doc child missing routing - exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(new SourceToParse("test", "type", "2", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .startObject("join_field") - .field("name", "child") - .field("parent", "1") - .endObject() - .endObject()), XContentType.JSON))); + exc = expectThrows( + MapperParsingException.class, + () -> docMapper.parse( + new SourceToParse( + "test", + "type", + "2", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .startObject("join_field") + .field("name", "child") + .field("parent", "1") + .endObject() + .endObject() + ), + XContentType.JSON + ) + ) + ); assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]")); // Doc grand_child - doc = docMapper.parse(new SourceToParse("test", "type", "3", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .startObject("join_field") - .field("name", "grand_child") - .field("parent", "2") - .endObject() - .endObject()), XContentType.JSON, "1")); + doc = docMapper.parse( + new SourceToParse( + "test", + "type", + "3", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .startObject("join_field") + .field("name", "grand_child") + .field("parent", "2") + .endObject() + .endObject() + ), + XContentType.JSON, + "1" + ) + ); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Unknown join name - exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(new SourceToParse("test", "type", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field("join_field", "unknown") - .endObject()), XContentType.JSON))); + exc = expectThrows( + MapperParsingException.class, + () -> docMapper.parse( + new SourceToParse( + "test", + "type", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()), + XContentType.JSON + ) + ) + ); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); } public void testUpdateRelations() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("parent", "child") - .array("child", "grand_child1", "grand_child2") + .field("parent", "child") + .array("child", "grand_child1", "grand_child2") .endObject() - .endObject() - .endObject().endObject()); + .endObject() + .endObject() + .endObject() + ); IndexService indexService = createIndex("test"); - DocumentMapper docMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE); + DocumentMapper docMapper = indexService.mapperService() + .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); { - final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + final String updateMapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .array("child", "grand_child1", "grand_child2") + .array("child", "grand_child1", "grand_child2") .endObject() - .endObject() - .endObject().endObject()); - IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), - MapperService.MergeReason.MAPPING_UPDATE)); + .endObject() + .endObject() + .endObject() + ); + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> indexService.mapperService() + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE) + ); assertThat(exc.getMessage(), containsString("cannot remove parent [parent] in join field [join_field]")); } { - final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + final String updateMapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("parent", "child") - .field("child", "grand_child1") + .field("parent", "child") + .field("child", "grand_child1") .endObject() - .endObject() - .endObject().endObject()); - IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), - MapperService.MergeReason.MAPPING_UPDATE)); + .endObject() + .endObject() + .endObject() + ); + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> indexService.mapperService() + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE) + ); assertThat(exc.getMessage(), containsString("cannot remove child [grand_child2] in join field [join_field]")); } { - final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + final String updateMapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("uber_parent", "parent") - .field("parent", "child") - .array("child", "grand_child1", "grand_child2") + .field("uber_parent", "parent") + .field("parent", "child") + .array("child", "grand_child1", "grand_child2") .endObject() - .endObject() - .endObject().endObject()); - IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), - MapperService.MergeReason.MAPPING_UPDATE)); + .endObject() + .endObject() + .endObject() + ); + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> indexService.mapperService() + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE) + ); assertThat(exc.getMessage(), containsString("cannot create child [parent] from an existing parent")); } { - final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + final String updateMapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .field("parent", "child") - .array("child", "grand_child1", "grand_child2") - .field("grand_child2", "grand_grand_child") + .field("parent", "child") + .array("child", "grand_child1", "grand_child2") + .field("grand_child2", "grand_grand_child") .endObject() - .endObject() - .endObject().endObject()); - IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), - MapperService.MergeReason.MAPPING_UPDATE)); + .endObject() + .endObject() + .endObject() + ); + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> indexService.mapperService() + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE) + ); assertThat(exc.getMessage(), containsString("cannot create parent [grand_child2] from an existing child]")); } { - final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + final String updateMapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .array("parent", "child", "child2") - .array("child", "grand_child1", "grand_child2") + .array("parent", "child", "child2") + .array("child", "grand_child1", "grand_child2") .endObject() - .endObject() - .endObject().endObject()); - docMapper = indexService.mapperService().merge("_doc", new CompressedXContent(updateMapping), - MapperService.MergeReason.MAPPING_UPDATE); + .endObject() + .endObject() + .endObject() + ); + docMapper = indexService.mapperService() + .merge("_doc", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService()); assertNotNull(mapper); assertEquals("join_field", mapper.name()); @@ -326,18 +485,23 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase { } { - final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("join_field") + final String updateMapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") .field("type", "join") .startObject("relations") - .array("parent", "child", "child2") - .array("child", "grand_child1", "grand_child2") - .array("other", "child_other1", "child_other2") + .array("parent", "child", "child2") + .array("child", "grand_child1", "grand_child2") + .array("other", "child_other1", "child_other2") .endObject() - .endObject() - .endObject().endObject()); - docMapper = indexService.mapperService().merge("_doc", new CompressedXContent(updateMapping), - MapperService.MergeReason.MAPPING_UPDATE); + .endObject() + .endObject() + .endObject() + ); + docMapper = indexService.mapperService() + .merge("_doc", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService()); assertNotNull(mapper); assertEquals("join_field", mapper.name()); @@ -351,115 +515,147 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase { } public void testInvalidJoinFieldInsideObject() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("object") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() + .startObject("object") + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") .endObject() - .endObject() - .endObject().endObject()); + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ); IndexService indexService = createIndex("test"); - MapperParsingException exc = expectThrows(MapperParsingException.class, - () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE)); - assertThat(exc.getRootCause().getMessage(), - containsString("join field [object.join_field] cannot be added inside an object or in a multi-field")); + MapperParsingException exc = expectThrows( + MapperParsingException.class, + () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE) + ); + assertThat( + exc.getRootCause().getMessage(), + containsString("join field [object.join_field] cannot be added inside an object or in a multi-field") + ); } public void testInvalidJoinFieldInsideMultiFields() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") - .startObject("number") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("number") .field("type", "integer") .startObject("fields") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") .endObject() - .endObject() - .endObject().endObject()); + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ); IndexService indexService = createIndex("test"); - MapperParsingException exc = expectThrows(MapperParsingException.class, - () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE)); - assertThat(exc.getRootCause().getMessage(), - containsString("join field [number.join_field] cannot be added inside an object or in a multi-field")); + MapperParsingException exc = expectThrows( + MapperParsingException.class, + () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE) + ); + assertThat( + exc.getRootCause().getMessage(), + containsString("join field [number.join_field] cannot be added inside an object or in a multi-field") + ); } public void testMultipleJoinFields() throws Exception { IndexService indexService = createIndex("test"); { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") + .endObject() .endObject() .startObject("another_join_field") - .field("type", "join") - .startObject("relations") - .field("product", "item") - .endObject() + .field("type", "join") + .startObject("relations") + .field("product", "item") .endObject() - .endObject() - .endObject()); - MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type", - new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); + .endObject() + .endObject() + .endObject() + ); + MapperParsingException exc = expectThrows( + MapperParsingException.class, + () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE) + ); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once")); } { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") .endObject() - .endObject() - .endObject()); - indexService.mapperService().merge("type", - new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + .endObject() + .endObject() + .endObject() + ); + indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + String updateMapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("another_join_field") - .field("type", "join") + .field("type", "join") .endObject() - .endObject() - .endObject()); - MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type", - new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); + .endObject() + .endObject() + ); + MapperParsingException exc = expectThrows( + MapperParsingException.class, + () -> indexService.mapperService() + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE) + ); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once")); } } public void testEagerGlobalOrdinals() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") .endObject() - .endObject() - .endObject()); + .endObject() + .endObject() + .endObject() + ); IndexService service = createIndex("test"); - DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE); + DocumentMapper docMapper = service.mapperService() + .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(service.mapperService())); assertFalse(service.mapperService().fieldType("join_field").eagerGlobalOrdinals()); assertNotNull(service.mapperService().fieldType("join_field#parent")); @@ -467,20 +663,22 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase { assertNotNull(service.mapperService().fieldType("join_field#child")); assertTrue(service.mapperService().fieldType("join_field#child").eagerGlobalOrdinals()); - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("properties") + mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") .startObject("join_field") - .field("type", "join") - .field("eager_global_ordinals", false) - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() + .field("type", "join") + .field("eager_global_ordinals", false) + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") .endObject() - .endObject() - .endObject()); - service.mapperService().merge("type", new CompressedXContent(mapping), - MapperService.MergeReason.MAPPING_UPDATE); + .endObject() + .endObject() + .endObject() + ); + service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertFalse(service.mapperService().fieldType("join_field").eagerGlobalOrdinals()); assertNotNull(service.mapperService().fieldType("join_field#parent")); assertFalse(service.mapperService().fieldType("join_field#parent").eagerGlobalOrdinals()); diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java index 131a2b910f2..e069fc23a14 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java @@ -105,42 +105,45 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase hasChildQuery(null, query, ScoreMode.None)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> hasChildQuery(null, query, ScoreMode.None)); assertEquals("[has_child] requires 'type' field", e.getMessage()); e = expectThrows(IllegalArgumentException.class, () -> hasChildQuery("foo", null, ScoreMode.None)); @@ -231,44 +237,43 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase failingQueryBuilder.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("[" + HasChildQueryBuilder.NAME + - "] join field [join_field] doesn't hold [unmapped] as a child")); + assertThat( + e.getMessage(), + containsString("[" + HasChildQueryBuilder.NAME + "] join field [join_field] doesn't hold [unmapped] as a child") + ); } public void testIgnoreUnmappedWithRewrite() throws IOException { // WrapperQueryBuilder makes sure we always rewrite - final HasChildQueryBuilder queryBuilder - = new HasChildQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), ScoreMode.None); + final HasChildQueryBuilder queryBuilder = new HasChildQueryBuilder( + "unmapped", + new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), + ScoreMode.None + ); queryBuilder.ignoreUnmapped(true); QueryShardContext queryShardContext = createShardContext(); Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext); @@ -375,11 +386,16 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase InnerHitContextBuilder.extractInnerHits(queryBuilder, Collections.singletonMap("some_name", null))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> InnerHitContextBuilder.extractInnerHits(queryBuilder, Collections.singletonMap("some_name", null)) + ); assertEquals("[inner_hits] already contains an entry for key [some_name]", e.getMessage()); } @@ -387,11 +403,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); - assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", - e.getMessage()); + HasChildQueryBuilder queryBuilder = hasChildQuery(CHILD_DOC, new TermQueryBuilder("custom_string", "value"), ScoreMode.None); + OpenSearchException e = expectThrows(OpenSearchException.class, () -> queryBuilder.toQuery(queryShardContext)); + assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", e.getMessage()); } } diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java index c2166e4c4f8..9783cb703ad 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java @@ -88,38 +88,41 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase hasParentQuery(null, query, false)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> hasParentQuery(null, query, false)); assertThat(e.getMessage(), equalTo("[has_parent] requires 'parent_type' field")); - e = expectThrows(IllegalArgumentException.class, - () -> hasParentQuery("foo", null, false)); + e = expectThrows(IllegalArgumentException.class, () -> hasParentQuery("foo", null, false)); assertThat(e.getMessage(), equalTo("[has_parent] requires 'query' field")); QueryShardContext context = createShardContext(); @@ -190,13 +192,12 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase failingQueryBuilder.toQuery(createShardContext())); - assertThat(e.getMessage(), - containsString("[has_parent] join field [join_field] doesn't hold [unmapped] as a parent")); + assertThat(e.getMessage(), containsString("[has_parent] join field [join_field] doesn't hold [unmapped] as a parent")); } public void testIgnoreUnmappedWithRewrite() throws IOException { // WrapperQueryBuilder makes sure we always rewrite - final HasParentQueryBuilder queryBuilder = - new HasParentQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), false); + final HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder( + "unmapped", + new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), + false + ); queryBuilder.ignoreUnmapped(true); QueryShardContext queryShardContext = createShardContext(); Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext); @@ -268,11 +270,16 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase InnerHitContextBuilder.extractInnerHits(queryBuilder, Collections.singletonMap("some_name", null))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> InnerHitContextBuilder.extractInnerHits(queryBuilder, Collections.singletonMap("some_name", null)) + ); assertEquals("[inner_hits] already contains an entry for key [some_name]", e.getMessage()); } @@ -281,10 +288,11 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); - assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", - e.getMessage()); + CHILD_DOC, + new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), + false + ); + OpenSearchException e = expectThrows(OpenSearchException.class, () -> queryBuilder.toQuery(queryShardContext)); + assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", e.getMessage()); } } diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java index 8ca09631708..41bc717db1f 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java @@ -77,38 +77,41 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); - assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", - e.getMessage()); + OpenSearchException e = expectThrows(OpenSearchException.class, () -> queryBuilder.toQuery(queryShardContext)); + assertEquals("[joining] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", e.getMessage()); } } diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index e94a66fcb9a..c111590d7a2 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -97,43 +97,41 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { } public void testPercolatorQuery() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") + assertAcked( + client().admin() + .indices() + .prepareCreate("test") .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject() - .field("id", "1") - .field("query", matchAllQuery()).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchAllQuery()).endObject()) + .get(); client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject() - .field("id", "2") - .field("query", matchQuery("field1", "value")).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "value")).endObject()) + .get(); client().prepareIndex("test", "type", "3") - .setSource(jsonBuilder().startObject() + .setSource( + jsonBuilder().startObject() .field("id", "3") - .field("query", boolQuery() - .must(matchQuery("field1", "value")) - .must(matchQuery("field2", "value")) - ).endObject()).get(); + .field("query", boolQuery().must(matchQuery("field1", "value")).must(matchQuery("field2", "value"))) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().endObject()); logger.info("percolating empty doc"); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + SearchResponse response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); logger.info("percolating doc with 1 field"); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); + .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); @@ -143,9 +141,9 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); + .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); @@ -156,12 +154,18 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { logger.info("percolating doc with 2 fields"); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", Arrays.asList( + .setQuery( + new PercolateQueryBuilder( + "query", + Arrays.asList( BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()) - ), XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); + ), + XContentType.JSON + ) + ) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); @@ -172,143 +176,179 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { } public void testPercolatorRangeQueries() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type", "field1", "type=long", "field2", "type=double", "field3", "type=ip", "field4", "type=date", - "query", "type=percolator") + assertAcked( + client().admin() + .indices() + .prepareCreate("test") + .addMapping( + "type", + "field1", + "type=long", + "field2", + "type=double", + "field3", + "type=ip", + "field4", + "type=date", + "query", + "type=percolator" + ) ); client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(10).to(12)).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(10).to(12)).endObject()) + .get(); client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(20).to(22)).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(20).to(22)).endObject()) + .get(); client().prepareIndex("test", "type", "3") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .must(rangeQuery("field1").from(10).to(12)) - .must(rangeQuery("field1").from(12).to(14)) - ).endObject()).get(); + .setSource( + jsonBuilder().startObject() + .field("query", boolQuery().must(rangeQuery("field1").from(10).to(12)).must(rangeQuery("field1").from(12).to(14))) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); client().prepareIndex("test", "type", "4") - .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(10).to(12)).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(10).to(12)).endObject()) + .get(); client().prepareIndex("test", "type", "5") - .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(20).to(22)).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(20).to(22)).endObject()) + .get(); client().prepareIndex("test", "type", "6") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .must(rangeQuery("field2").from(10).to(12)) - .must(rangeQuery("field2").from(12).to(14)) - ).endObject()).get(); + .setSource( + jsonBuilder().startObject() + .field("query", boolQuery().must(rangeQuery("field2").from(10).to(12)).must(rangeQuery("field2").from(12).to(14))) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); client().prepareIndex("test", "type", "7") - .setSource(jsonBuilder().startObject() - .field("query", rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")) - .endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")).endObject()) + .get(); client().prepareIndex("test", "type", "8") - .setSource(jsonBuilder().startObject() - .field("query", rangeQuery("field3").from("192.168.1.20").to("192.168.1.30")) - .endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field3").from("192.168.1.20").to("192.168.1.30")).endObject()) + .get(); client().prepareIndex("test", "type", "9") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .must(rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")) - .must(rangeQuery("field3").from("192.168.1.5").to("192.168.1.10")) - ).endObject()).get(); + .setSource( + jsonBuilder().startObject() + .field( + "query", + boolQuery().must(rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")) + .must(rangeQuery("field3").from("192.168.1.5").to("192.168.1.10")) + ) + .endObject() + ) + .get(); client().prepareIndex("test", "type", "10") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .must(rangeQuery("field4").from("2010-01-01").to("2018-01-01")) - .must(rangeQuery("field4").from("2010-01-01").to("now")) - ).endObject()).get(); + .setSource( + jsonBuilder().startObject() + .field( + "query", + boolQuery().must(rangeQuery("field4").from("2010-01-01").to("2018-01-01")) + .must(rangeQuery("field4").from("2010-01-01").to("now")) + ) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); // Test long range: BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject()); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + SearchResponse response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); logger.info("response={}", response); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 11).endObject()); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); // Test double range: source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 12).endObject()); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("6")); assertThat(response.getHits().getAt(1).getId(), equalTo("4")); source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 11).endObject()); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("4")); // Test IP range: source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.5").endObject()); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("9")); assertThat(response.getHits().getAt(1).getId(), equalTo("7")); source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.4").endObject()); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("7")); // Test date range: source = BytesReference.bytes(jsonBuilder().startObject().field("field4", "2016-05-15").endObject()); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("10")); } public void testPercolatorGeoQueries() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type", "id", "type=keyword", - "field1", "type=geo_point", "field2", "type=geo_shape", "query", "type=percolator")); + assertAcked( + client().admin() + .indices() + .prepareCreate("test") + .addMapping( + "type", + "id", + "type=keyword", + "field1", + "type=geo_point", + "field2", + "type=geo_shape", + "query", + "type=percolator" + ) + ); client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject() - .field("query", geoDistanceQuery("field1").point(52.18, 4.38).distance(50, DistanceUnit.KILOMETERS)) - .field("id", "1") - .endObject()).get(); + .setSource( + jsonBuilder().startObject() + .field("query", geoDistanceQuery("field1").point(52.18, 4.38).distance(50, DistanceUnit.KILOMETERS)) + .field("id", "1") + .endObject() + ) + .get(); client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject() - .field("query", geoBoundingBoxQuery("field1").setCorners(52.3, 4.4, 52.1, 4.6)) - .field("id", "2") - .endObject()).get(); + .setSource( + jsonBuilder().startObject() + .field("query", geoBoundingBoxQuery("field1").setCorners(52.3, 4.4, 52.1, 4.6)) + .field("id", "2") + .endObject() + ) + .get(); client().prepareIndex("test", "type", "3") - .setSource(jsonBuilder().startObject() - .field("query", - geoPolygonQuery("field1", Arrays.asList(new GeoPoint(52.1, 4.4), new GeoPoint(52.3, 4.5), new GeoPoint(52.1, 4.6)))) - .field("id", "3") - .endObject()).get(); + .setSource( + jsonBuilder().startObject() + .field( + "query", + geoPolygonQuery("field1", Arrays.asList(new GeoPoint(52.1, 4.4), new GeoPoint(52.3, 4.5), new GeoPoint(52.1, 4.6))) + ) + .field("id", "3") + .endObject() + ) + .get(); refresh(); - BytesReference source = BytesReference.bytes(jsonBuilder().startObject() - .startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject() - .endObject()); + BytesReference source = BytesReference.bytes( + jsonBuilder().startObject().startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject().endObject() + ); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) @@ -320,26 +360,27 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { } public void testPercolatorQueryExistingDocument() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") + assertAcked( + client().admin() + .indices() + .prepareCreate("test") .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject() - .field("id", "1") - .field("query", matchAllQuery()).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchAllQuery()).endObject()) + .get(); client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject() - .field("id", "2") - .field("query", matchQuery("field1", "value")).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "value")).endObject()) + .get(); client().prepareIndex("test", "type", "3") - .setSource(jsonBuilder().startObject() + .setSource( + jsonBuilder().startObject() .field("id", "3") - .field("query", boolQuery() - .must(matchQuery("field1", "value")) - .must(matchQuery("field2", "value"))).endObject()).get(); + .field("query", boolQuery().must(matchQuery("field1", "value")).must(matchQuery("field2", "value"))) + .endObject() + ) + .get(); client().prepareIndex("test", "type", "4").setSource("{\"id\": \"4\"}", XContentType.JSON).get(); client().prepareIndex("test", "type", "5").setSource(XContentType.JSON, "id", "5", "field1", "value").get(); @@ -348,25 +389,25 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "type", "1", null, null, null)) - .get(); + .setQuery(new PercolateQueryBuilder("query", "test", "type", "1", null, null, null)) + .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); logger.info("percolating doc with 1 field"); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null)) - .addSort("id", SortOrder.ASC) - .get(); + .setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null)) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "type", "6", null, null, null)) - .addSort("id", SortOrder.ASC) - .get(); + .setQuery(new PercolateQueryBuilder("query", "test", "type", "6", null, null, null)) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); @@ -374,97 +415,110 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { } public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type", "_source", "enabled=false", "field1", "type=keyword", "query", "type=percolator") + assertAcked( + client().admin() + .indices() + .prepareCreate("test") + .addMapping("type", "_source", "enabled=false", "field1", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .get(); + client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()).get(); client().prepareIndex("test", "type", "2").setSource("{}", XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); logger.info("percolating empty doc with source disabled"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "type", "1", null, null, null)) - .get(); - }); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> { client().prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "type", "1", null, null, null)).get(); } + ); assertThat(e.getMessage(), containsString("source disabled")); } - public void testPercolatorSpecificQueries() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") + public void testPercolatorSpecificQueries() throws Exception { + assertAcked( + client().admin() + .indices() + .prepareCreate("test") .addMapping("type", "id", "type=keyword", "field1", "type=text", "field2", "type=text", "query", "type=percolator") ); client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject() - .field("id", 1) - .field("query", commonTermsQuery("field1", "quick brown fox")).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("id", 1).field("query", commonTermsQuery("field1", "quick brown fox")).endObject()) + .get(); client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject() + .setSource( + jsonBuilder().startObject() .field("id", 2) - .field("query", multiMatchQuery("quick brown fox", "field1", "field2") - .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) - .endObject()) - .get(); + .field("query", multiMatchQuery("quick brown fox", "field1", "field2").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + .endObject() + ) + .get(); client().prepareIndex("test", "type", "3") - .setSource(jsonBuilder().startObject() + .setSource( + jsonBuilder().startObject() .field("id", 3) - .field("query", - spanNearQuery(spanTermQuery("field1", "quick"), 0) - .addClause(spanTermQuery("field1", "brown")) - .addClause(spanTermQuery("field1", "fox")) - .inOrder(true)) - .endObject()) - .get(); + .field( + "query", + spanNearQuery(spanTermQuery("field1", "quick"), 0).addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) + .inOrder(true) + ) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); client().prepareIndex("test", "type", "4") - .setSource(jsonBuilder().startObject() + .setSource( + jsonBuilder().startObject() .field("id", 4) - .field("query", + .field( + "query", spanNotQuery( - spanNearQuery(spanTermQuery("field1", "quick"), 0) - .addClause(spanTermQuery("field1", "brown")) - .addClause(spanTermQuery("field1", "fox")) - .inOrder(true), - spanNearQuery(spanTermQuery("field1", "the"), 0) - .addClause(spanTermQuery("field1", "lazy")) - .addClause(spanTermQuery("field1", "dog")) - .inOrder(true)).dist(2)) - .endObject()) - .get(); + spanNearQuery(spanTermQuery("field1", "quick"), 0).addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) + .inOrder(true), + spanNearQuery(spanTermQuery("field1", "the"), 0).addClause(spanTermQuery("field1", "lazy")) + .addClause(spanTermQuery("field1", "dog")) + .inOrder(true) + ).dist(2) + ) + .endObject() + ) + .get(); // doesn't match client().prepareIndex("test", "type", "5") - .setSource(jsonBuilder().startObject() + .setSource( + jsonBuilder().startObject() .field("id", 5) - .field("query", + .field( + "query", spanNotQuery( - spanNearQuery(spanTermQuery("field1", "quick"), 0) - .addClause(spanTermQuery("field1", "brown")) - .addClause(spanTermQuery("field1", "fox")) - .inOrder(true), - spanNearQuery(spanTermQuery("field1", "the"), 0) - .addClause(spanTermQuery("field1", "lazy")) - .addClause(spanTermQuery("field1", "dog")) - .inOrder(true)).dist(3)) - .endObject()) - .get(); + spanNearQuery(spanTermQuery("field1", "quick"), 0).addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) + .inOrder(true), + spanNearQuery(spanTermQuery("field1", "the"), 0).addClause(spanTermQuery("field1", "lazy")) + .addClause(spanTermQuery("field1", "dog")) + .inOrder(true) + ).dist(3) + ) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); - BytesReference source = BytesReference.bytes(jsonBuilder().startObject() + BytesReference source = BytesReference.bytes( + jsonBuilder().startObject() .field("field1", "the quick brown fox jumps over the lazy dog") .field("field2", "the quick brown fox falls down into the well") - .endObject()); + .endObject() + ); SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); + .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(response, 4); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getScore(), equalTo(Float.NaN)); @@ -477,74 +531,79 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { } public void testPercolatorQueryWithHighlighting() throws Exception { - StringBuilder fieldMapping = new StringBuilder("type=text") - .append(",store=").append(randomBoolean()); + StringBuilder fieldMapping = new StringBuilder("type=text").append(",store=").append(randomBoolean()); if (randomBoolean()) { fieldMapping.append(",term_vector=with_positions_offsets"); } else if (randomBoolean()) { fieldMapping.append(",index_options=offsets"); } - assertAcked(client().admin().indices().prepareCreate("test") + assertAcked( + client().admin() + .indices() + .prepareCreate("test") .addMapping("type", "id", "type=keyword", "field1", fieldMapping, "query", "type=percolator") ); client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject() - .field("id", "1") - .field("query", matchQuery("field1", "brown fox")).endObject()) - .execute().actionGet(); + .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchQuery("field1", "brown fox")).endObject()) + .execute() + .actionGet(); client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject() - .field("id", "2") - .field("query", matchQuery("field1", "lazy dog")).endObject()) - .execute().actionGet(); + .setSource(jsonBuilder().startObject().field("id", "2").field("query", matchQuery("field1", "lazy dog")).endObject()) + .execute() + .actionGet(); client().prepareIndex("test", "type", "3") - .setSource(jsonBuilder().startObject() - .field("id", "3") - .field("query", termQuery("field1", "jumps")).endObject()) - .execute().actionGet(); + .setSource(jsonBuilder().startObject().field("id", "3").field("query", termQuery("field1", "jumps")).endObject()) + .execute() + .actionGet(); client().prepareIndex("test", "type", "4") - .setSource(jsonBuilder().startObject() - .field("id", "4") - .field("query", termQuery("field1", "dog")).endObject()) - .execute().actionGet(); + .setSource(jsonBuilder().startObject().field("id", "4").field("query", termQuery("field1", "dog")).endObject()) + .execute() + .actionGet(); client().prepareIndex("test", "type", "5") - .setSource(jsonBuilder().startObject() - .field("id", "5") - .field("query", termQuery("field1", "fox")).endObject()) - .execute().actionGet(); + .setSource(jsonBuilder().startObject().field("id", "5").field("query", termQuery("field1", "fox")).endObject()) + .execute() + .actionGet(); client().admin().indices().prepareRefresh().get(); - BytesReference document = BytesReference.bytes(jsonBuilder().startObject() - .field("field1", "The quick brown fox jumps over the lazy dog") - .endObject()); + BytesReference document = BytesReference.bytes( + jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject() + ); SearchResponse searchResponse = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) - .highlighter(new HighlightBuilder().field("field1")) - .addSort("id", SortOrder.ASC) - .get(); + .setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) + .highlighter(new HighlightBuilder().field("field1")) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("field1").fragments()[0].string(), - equalTo("The quick brown fox jumps over the lazy dog")); - assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("field1").fragments()[0].string(), - equalTo("The quick brown fox jumps over the lazy dog")); - assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("field1").fragments()[0].string(), - equalTo("The quick brown fox jumps over the lazy dog")); - assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("field1").fragments()[0].string(), - equalTo("The quick brown fox jumps over the lazy dog")); - assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(), - equalTo("The quick brown fox jumps over the lazy dog")); + assertThat( + searchResponse.getHits().getAt(0).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog") + ); + assertThat( + searchResponse.getHits().getAt(1).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog") + ); + assertThat( + searchResponse.getHits().getAt(2).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog") + ); + assertThat( + searchResponse.getHits().getAt(3).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog") + ); + assertThat( + searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(), + equalTo("The quick brown fox jumps over the lazy dog") + ); - BytesReference document1 = BytesReference.bytes(jsonBuilder().startObject() - .field("field1", "The quick brown fox jumps") - .endObject()); - BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject() - .field("field1", "over the lazy dog") - .endObject()); + BytesReference document1 = BytesReference.bytes( + jsonBuilder().startObject().field("field1", "The quick brown fox jumps").endObject() + ); + BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject().field("field1", "over the lazy dog").endObject()); searchResponse = client().prepareSearch() - .setQuery(boolQuery() - .should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) - .should(new PercolateQueryBuilder("query", document2, XContentType.JSON).setName("query2")) + .setQuery( + boolQuery().should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) + .should(new PercolateQueryBuilder("query", document2, XContentType.JSON).setName("query2")) ) .highlighter(new HighlightBuilder().field("field1")) .addSort("id", SortOrder.ASC) @@ -552,157 +611,213 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { logger.info("searchResponse={}", searchResponse); assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("query1_field1").fragments()[0].string(), - equalTo("The quick brown fox jumps")); - assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("query2_field1").fragments()[0].string(), - equalTo("over the lazy dog")); - assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("query1_field1").fragments()[0].string(), - equalTo("The quick brown fox jumps")); - assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("query2_field1").fragments()[0].string(), - equalTo("over the lazy dog")); - assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("query1_field1").fragments()[0].string(), - equalTo("The quick brown fox jumps")); + assertThat( + searchResponse.getHits().getAt(0).getHighlightFields().get("query1_field1").fragments()[0].string(), + equalTo("The quick brown fox jumps") + ); + assertThat( + searchResponse.getHits().getAt(1).getHighlightFields().get("query2_field1").fragments()[0].string(), + equalTo("over the lazy dog") + ); + assertThat( + searchResponse.getHits().getAt(2).getHighlightFields().get("query1_field1").fragments()[0].string(), + equalTo("The quick brown fox jumps") + ); + assertThat( + searchResponse.getHits().getAt(3).getHighlightFields().get("query2_field1").fragments()[0].string(), + equalTo("over the lazy dog") + ); + assertThat( + searchResponse.getHits().getAt(4).getHighlightFields().get("query1_field1").fragments()[0].string(), + equalTo("The quick brown fox jumps") + ); searchResponse = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", Arrays.asList( + .setQuery( + new PercolateQueryBuilder( + "query", + Arrays.asList( BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) - ), XContentType.JSON)) - .highlighter(new HighlightBuilder().field("field1")) - .addSort("id", SortOrder.ASC) - .get(); + ), + XContentType.JSON + ) + ) + .highlighter(new HighlightBuilder().field("field1")) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), - equalTo(Arrays.asList(1, 3))); - assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("1_field1").fragments()[0].string(), - equalTo("fox")); - assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("3_field1").fragments()[0].string(), - equalTo("brown fox")); - assertThat(searchResponse.getHits().getAt(1).getFields().get("_percolator_document_slot").getValues(), - equalTo(Collections.singletonList(0))); - assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("0_field1").fragments()[0].string(), - equalTo("dog")); - assertThat(searchResponse.getHits().getAt(2).getFields().get("_percolator_document_slot").getValues(), - equalTo(Collections.singletonList(2))); - assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("2_field1").fragments()[0].string(), - equalTo("jumps")); - assertThat(searchResponse.getHits().getAt(3).getFields().get("_percolator_document_slot").getValues(), - equalTo(Collections.singletonList(0))); - assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("0_field1").fragments()[0].string(), - equalTo("dog")); - assertThat(searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot").getValues(), - equalTo(Arrays.asList(1, 3))); - assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("1_field1").fragments()[0].string(), - equalTo("fox")); - assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("3_field1").fragments()[0].string(), - equalTo("brown fox")); + assertThat( + searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), + equalTo(Arrays.asList(1, 3)) + ); + assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("1_field1").fragments()[0].string(), equalTo("fox")); + assertThat( + searchResponse.getHits().getAt(0).getHighlightFields().get("3_field1").fragments()[0].string(), + equalTo("brown fox") + ); + assertThat( + searchResponse.getHits().getAt(1).getFields().get("_percolator_document_slot").getValues(), + equalTo(Collections.singletonList(0)) + ); + assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("0_field1").fragments()[0].string(), equalTo("dog")); + assertThat( + searchResponse.getHits().getAt(2).getFields().get("_percolator_document_slot").getValues(), + equalTo(Collections.singletonList(2)) + ); + assertThat( + searchResponse.getHits().getAt(2).getHighlightFields().get("2_field1").fragments()[0].string(), + equalTo("jumps") + ); + assertThat( + searchResponse.getHits().getAt(3).getFields().get("_percolator_document_slot").getValues(), + equalTo(Collections.singletonList(0)) + ); + assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("0_field1").fragments()[0].string(), equalTo("dog")); + assertThat( + searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot").getValues(), + equalTo(Arrays.asList(1, 3)) + ); + assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("1_field1").fragments()[0].string(), equalTo("fox")); + assertThat( + searchResponse.getHits().getAt(4).getHighlightFields().get("3_field1").fragments()[0].string(), + equalTo("brown fox") + ); searchResponse = client().prepareSearch() - .setQuery(boolQuery() - .should(new PercolateQueryBuilder("query", Arrays.asList( - BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()) - ), XContentType.JSON).setName("query1")) - .should(new PercolateQueryBuilder("query", Arrays.asList( - BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) - ), XContentType.JSON).setName("query2")) + .setQuery( + boolQuery().should( + new PercolateQueryBuilder( + "query", + Arrays.asList( + BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()) + ), + XContentType.JSON + ).setName("query1") + ) + .should( + new PercolateQueryBuilder( + "query", + Arrays.asList( + BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) + ), + XContentType.JSON + ).setName("query2") + ) ) .highlighter(new HighlightBuilder().field("field1")) .addSort("id", SortOrder.ASC) .get(); logger.info("searchResponse={}", searchResponse); assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot_query1").getValues(), - equalTo(Collections.singletonList(1))); - assertThat(searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot_query2").getValues(), - equalTo(Collections.singletonList(1))); - assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("query1_1_field1").fragments()[0].string(), - equalTo("fox")); - assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("query2_1_field1").fragments()[0].string(), - equalTo("brown fox")); + assertThat( + searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot_query1").getValues(), + equalTo(Collections.singletonList(1)) + ); + assertThat( + searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot_query2").getValues(), + equalTo(Collections.singletonList(1)) + ); + assertThat( + searchResponse.getHits().getAt(0).getHighlightFields().get("query1_1_field1").fragments()[0].string(), + equalTo("fox") + ); + assertThat( + searchResponse.getHits().getAt(0).getHighlightFields().get("query2_1_field1").fragments()[0].string(), + equalTo("brown fox") + ); - assertThat(searchResponse.getHits().getAt(1).getFields().get("_percolator_document_slot_query1").getValues(), - equalTo(Collections.singletonList(0))); - assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("query1_0_field1").fragments()[0].string(), - equalTo("dog")); + assertThat( + searchResponse.getHits().getAt(1).getFields().get("_percolator_document_slot_query1").getValues(), + equalTo(Collections.singletonList(0)) + ); + assertThat( + searchResponse.getHits().getAt(1).getHighlightFields().get("query1_0_field1").fragments()[0].string(), + equalTo("dog") + ); - assertThat(searchResponse.getHits().getAt(2).getFields().get("_percolator_document_slot_query2").getValues(), - equalTo(Collections.singletonList(0))); - assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("query2_0_field1").fragments()[0].string(), - equalTo("jumps")); + assertThat( + searchResponse.getHits().getAt(2).getFields().get("_percolator_document_slot_query2").getValues(), + equalTo(Collections.singletonList(0)) + ); + assertThat( + searchResponse.getHits().getAt(2).getHighlightFields().get("query2_0_field1").fragments()[0].string(), + equalTo("jumps") + ); - assertThat(searchResponse.getHits().getAt(3).getFields().get("_percolator_document_slot_query1").getValues(), - equalTo(Collections.singletonList(0))); - assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("query1_0_field1").fragments()[0].string(), - equalTo("dog")); + assertThat( + searchResponse.getHits().getAt(3).getFields().get("_percolator_document_slot_query1").getValues(), + equalTo(Collections.singletonList(0)) + ); + assertThat( + searchResponse.getHits().getAt(3).getHighlightFields().get("query1_0_field1").fragments()[0].string(), + equalTo("dog") + ); - assertThat(searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot_query1").getValues(), - equalTo(Collections.singletonList(1))); - assertThat(searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot_query2").getValues(), - equalTo(Collections.singletonList(1))); - assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("query1_1_field1").fragments()[0].string(), - equalTo("fox")); - assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("query2_1_field1").fragments()[0].string(), - equalTo("brown fox")); + assertThat( + searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot_query1").getValues(), + equalTo(Collections.singletonList(1)) + ); + assertThat( + searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot_query2").getValues(), + equalTo(Collections.singletonList(1)) + ); + assertThat( + searchResponse.getHits().getAt(4).getHighlightFields().get("query1_1_field1").fragments()[0].string(), + equalTo("fox") + ); + assertThat( + searchResponse.getHits().getAt(4).getHighlightFields().get("query2_1_field1").fragments()[0].string(), + equalTo("brown fox") + ); } public void testTakePositionOffsetGapIntoAccount() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") + assertAcked( + client().admin() + .indices() + .prepareCreate("test") .addMapping("type", "field", "type=text,position_increment_gap=5", "query", "type=percolator") ); client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject().field("query", - new MatchPhraseQueryBuilder("field", "brown fox").slop(4)).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(4)).endObject()) + .get(); client().prepareIndex("test", "type", "2") - .setSource(jsonBuilder().startObject().field("query", - new MatchPhraseQueryBuilder("field", "brown fox").slop(5)).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(5)).endObject()) + .get(); client().admin().indices().prepareRefresh().get(); - SearchResponse response = client().prepareSearch().setQuery( - new PercolateQueryBuilder("query", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}"), XContentType.JSON) - ).get(); + SearchResponse response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}"), XContentType.JSON)) + .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } - public void testManyPercolatorFields() throws Exception { String queryFieldName = randomAlphaOfLength(8); - assertAcked(client().admin().indices().prepareCreate("test1") - .addMapping("type", queryFieldName, "type=percolator", "field", "type=keyword") + assertAcked( + client().admin().indices().prepareCreate("test1").addMapping("type", queryFieldName, "type=percolator", "field", "type=keyword") ); - assertAcked(client().admin().indices().prepareCreate("test2") - .addMapping("type", queryFieldName, "type=percolator", "second_query_field", "type=percolator", "field", "type=keyword") + assertAcked( + client().admin() + .indices() + .prepareCreate("test2") + .addMapping("type", queryFieldName, "type=percolator", "second_query_field", "type=percolator", "field", "type=keyword") ); - assertAcked(client().admin().indices().prepareCreate("test3") - .addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("field") - .field("type", "keyword") - .endObject() - .startObject("object_field") - .field("type", "object") - .startObject("properties") - .startObject(queryFieldName) - .field("type", "percolator") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject()) - ); - } - - public void testWithMultiplePercolatorFields() throws Exception { - String queryFieldName = randomAlphaOfLength(8); - assertAcked(client().admin().indices().prepareCreate("test1") - .addMapping("type", queryFieldName, "type=percolator", "field", "type=keyword")); - assertAcked(client().admin().indices().prepareCreate("test2") - .addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties") + assertAcked( + client().admin() + .indices() + .prepareCreate("test3") + .addMapping( + "type", + jsonBuilder().startObject() + .startObject("type") + .startObject("properties") .startObject("field") .field("type", "keyword") .endObject() @@ -715,34 +830,72 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { .endObject() .endObject() .endObject() - .endObject().endObject()) + .endObject() + .endObject() + ) + ); + } + + public void testWithMultiplePercolatorFields() throws Exception { + String queryFieldName = randomAlphaOfLength(8); + assertAcked( + client().admin().indices().prepareCreate("test1").addMapping("type", queryFieldName, "type=percolator", "field", "type=keyword") + ); + assertAcked( + client().admin() + .indices() + .prepareCreate("test2") + .addMapping( + "type", + jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "keyword") + .endObject() + .startObject("object_field") + .field("type", "object") + .startObject("properties") + .startObject(queryFieldName) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) ); // Acceptable: client().prepareIndex("test1", "type", "1") - .setSource(jsonBuilder().startObject().field(queryFieldName, matchQuery("field", "value")).endObject()) - .get(); + .setSource(jsonBuilder().startObject().field(queryFieldName, matchQuery("field", "value")).endObject()) + .get(); client().prepareIndex("test2", "type", "1") - .setSource(jsonBuilder().startObject().startObject("object_field") - .field(queryFieldName, matchQuery("field", "value")) - .endObject().endObject()) - .get(); + .setSource( + jsonBuilder().startObject() + .startObject("object_field") + .field(queryFieldName, matchQuery("field", "value")) + .endObject() + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field", "value").endObject()); SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) - .setIndices("test1") - .get(); + .setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) + .setIndices("test1") + .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getType(), equalTo("type")); assertThat(response.getHits().getAt(0).getIndex(), equalTo("test1")); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("object_field." + queryFieldName, source, XContentType.JSON)) - .setIndices("test2") - .get(); + .setQuery(new PercolateQueryBuilder("object_field." + queryFieldName, source, XContentType.JSON)) + .setIndices("test2") + .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getType(), equalTo("type")); @@ -751,11 +904,19 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { // Unacceptable: MapperParsingException e = expectThrows(MapperParsingException.class, () -> { client().prepareIndex("test2", "type", "1") - .setSource(jsonBuilder().startObject().startArray("object_field") - .startObject().field(queryFieldName, matchQuery("field", "value")).endObject() - .startObject().field(queryFieldName, matchQuery("field", "value")).endObject() - .endArray().endObject()) - .get(); + .setSource( + jsonBuilder().startObject() + .startArray("object_field") + .startObject() + .field(queryFieldName, matchQuery("field", "value")) + .endObject() + .startObject() + .field(queryFieldName, matchQuery("field", "value")) + .endObject() + .endArray() + .endObject() + ) + .get(); }); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query")); @@ -763,94 +924,173 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { public void testPercolateQueryWithNestedDocuments() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); - mapping.startObject().startObject("properties").startObject("query").field("type", "percolator").endObject() - .startObject("id").field("type", "keyword").endObject() - .startObject("companyname").field("type", "text").endObject().startObject("employee").field("type", "nested") - .startObject("properties").startObject("name").field("type", "text").endObject().endObject().endObject().endObject() - .endObject(); - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("employee", mapping) - ); - client().prepareIndex("test", "employee", "q1").setSource(jsonBuilder().startObject() - .field("id", "q1") - .field("query", QueryBuilders.nestedQuery("employee", - QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg) - ).endObject()) - .get(); + mapping.startObject() + .startObject("properties") + .startObject("query") + .field("type", "percolator") + .endObject() + .startObject("id") + .field("type", "keyword") + .endObject() + .startObject("companyname") + .field("type", "text") + .endObject() + .startObject("employee") + .field("type", "nested") + .startObject("properties") + .startObject("name") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); + client().prepareIndex("test", "employee", "q1") + .setSource( + jsonBuilder().startObject() + .field("id", "q1") + .field( + "query", + QueryBuilders.nestedQuery( + "employee", + QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), + ScoreMode.Avg + ) + ) + .endObject() + ) + .get(); // this query should never match as it doesn't use nested query: - client().prepareIndex("test", "employee", "q2").setSource(jsonBuilder().startObject() - .field("id", "q2") - .field("query", QueryBuilders.matchQuery("employee.name", "virginia")).endObject()) - .get(); + client().prepareIndex("test", "employee", "q2") + .setSource( + jsonBuilder().startObject() + .field("id", "q2") + .field("query", QueryBuilders.matchQuery("employee.name", "virginia")) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", "employee", "q3").setSource(jsonBuilder().startObject() - .field("id", "q3") - .field("query", QueryBuilders.matchAllQuery()).endObject()) + client().prepareIndex("test", "employee", "q3") + .setSource(jsonBuilder().startObject().field("id", "q3").field("query", QueryBuilders.matchAllQuery()).endObject()) .get(); client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject().field("companyname", "stark") - .startArray("employee") - .startObject().field("name", "virginia potts").endObject() - .startObject().field("name", "tony stark").endObject() - .endArray() - .endObject()), XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "stark") + .startArray("employee") + .startObject() + .field("name", "virginia potts") + .endObject() + .startObject() + .field("name", "tony stark") + .endObject() + .endArray() + .endObject() + ), + XContentType.JSON + ) + ) + .addSort("id", SortOrder.ASC) + .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); assertThat(response.getHits().getAt(1).getId(), equalTo("q3")); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject().field("companyname", "notstark") + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "notstark") + .startArray("employee") + .startObject() + .field("name", "virginia stark") + .endObject() + .startObject() + .field("name", "tony stark") + .endObject() + .endArray() + .endObject() + ), + XContentType.JSON + ) + ) + .addSort("id", SortOrder.ASC) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); + + response = client().prepareSearch() + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()), + XContentType.JSON + ) + ) + .addSort("id", SortOrder.ASC) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); + + response = client().prepareSearch() + .setQuery( + new PercolateQueryBuilder( + "query", + Arrays.asList( + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "stark") .startArray("employee") - .startObject().field("name", "virginia stark").endObject() - .startObject().field("name", "tony stark").endObject() + .startObject() + .field("name", "virginia potts") + .endObject() + .startObject() + .field("name", "tony stark") + .endObject() .endArray() - .endObject()), XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); - - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()), - XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); - - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", Arrays.asList( - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject().field("companyname", "stark") - .startArray("employee") - .startObject().field("name", "virginia potts").endObject() - .startObject().field("name", "tony stark").endObject() - .endArray() - .endObject()), - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject().field("companyname", "stark") - .startArray("employee") - .startObject().field("name", "peter parker").endObject() - .startObject().field("name", "virginia potts").endObject() - .endArray() - .endObject()), - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject().field("companyname", "stark") - .startArray("employee") - .startObject().field("name", "peter parker").endObject() - .endArray() - .endObject()) - ), XContentType.JSON)) + .endObject() + ), + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "stark") + .startArray("employee") + .startObject() + .field("name", "peter parker") + .endObject() + .startObject() + .field("name", "virginia potts") + .endObject() + .endArray() + .endObject() + ), + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "stark") + .startArray("employee") + .startObject() + .field("name", "peter parker") + .endObject() + .endArray() + .endObject() + ) + ), + XContentType.JSON + ) + ) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -861,47 +1101,80 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { } public void testPercolatorQueryViaMultiSearch() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type", "field1", "type=text", "query", "type=percolator") - ); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "field1", "type=text", "query", "type=percolator")); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) - .execute().actionGet(); + .execute() + .actionGet(); client().prepareIndex("test", "type", "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) - .execute().actionGet(); + .execute() + .actionGet(); client().prepareIndex("test", "type", "3") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .must(matchQuery("field1", "b")) - .must(matchQuery("field1", "c")) - ).endObject()) - .execute().actionGet(); + .setSource( + jsonBuilder().startObject() + .field("query", boolQuery().must(matchQuery("field1", "b")).must(matchQuery("field1", "c"))) + .endObject() + ) + .execute() + .actionGet(); client().prepareIndex("test", "type", "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .execute().actionGet(); + .execute() + .actionGet(); client().prepareIndex("test", "type", "5") .setSource(jsonBuilder().startObject().field("field1", "c").endObject()) - .execute().actionGet(); + .execute() + .actionGet(); client().admin().indices().prepareRefresh().get(); MultiSearchResponse response = client().prepareMultiSearch() - .add(client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON))) - .add(client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), XContentType.YAML))) - .add(client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "b c").endObject()), XContentType.JSON))) - .add(client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), XContentType.JSON))) - .add(client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null))) - .add(client().prepareSearch("test") // non existing doc, so error element - .setQuery(new PercolateQueryBuilder("query", "test", "type", "6", null, null, null))) + .add( + client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), + XContentType.JSON + ) + ) + ) + .add( + client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), + XContentType.YAML + ) + ) + ) + .add( + client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b c").endObject()), + XContentType.JSON + ) + ) + ) + .add( + client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), + XContentType.JSON + ) + ) + ) + .add(client().prepareSearch("test").setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null))) + .add( + client().prepareSearch("test") // non existing doc, so error element + .setQuery(new PercolateQueryBuilder("query", "test", "type", "6", null, null, null)) + ) .get(); MultiSearchResponse.Item item = response.getResponses()[0]; @@ -937,22 +1210,24 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { public void testDisallowExpensiveQueries() throws IOException { try { - assertAcked(client().admin().indices().prepareCreate("test") + assertAcked( + client().admin() + .indices() + .prepareCreate("test") .addMapping("_doc", "id", "type=keyword", "field1", "type=keyword", "query", "type=percolator") ); - client().prepareIndex("test", "_doc").setId("1") - .setSource(jsonBuilder().startObject() - .field("id", "1") - .field("query", matchQuery("field1", "value")).endObject()) - .get(); + client().prepareIndex("test", "_doc") + .setId("1") + .setSource(jsonBuilder().startObject().field("id", "1").field("query", matchQuery("field1", "value")).endObject()) + .get(); refresh(); // Execute with search.allow_expensive_queries = null => default value = false => success BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); @@ -962,21 +1237,21 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); - OpenSearchException e = expectThrows(OpenSearchException.class, - () -> client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get()); - assertEquals("[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", - e.getCause().getMessage()); + OpenSearchException e = expectThrows( + OpenSearchException.class, + () -> client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get() + ); + assertEquals( + "[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", + e.getCause().getMessage() + ); // Set search.allow_expensive_queries setting to "true" ==> success updateSettingsRequest = new ClusterUpdateSettingsRequest(); updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); @@ -989,37 +1264,56 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase { public void testWrappedWithConstantScore() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("_doc", "d", "type=date", "q", "type=percolator") - ); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", "d", "type=date", "q", "type=percolator")); - client().prepareIndex("test", "_doc").setId("1") - .setSource(jsonBuilder().startObject().field("q", - boolQuery().must(rangeQuery("d").gt("now")) - ).endObject()) - .execute().actionGet(); + client().prepareIndex("test", "_doc") + .setId("1") + .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").gt("now"))).endObject()) + .execute() + .actionGet(); - client().prepareIndex("test", "_doc").setId("2") - .setSource(jsonBuilder().startObject().field("q", - boolQuery().must(rangeQuery("d").lt("now")) - ).endObject()) - .execute().actionGet(); + client().prepareIndex("test", "_doc") + .setId("2") + .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").lt("now"))).endObject()) + .execute() + .actionGet(); client().admin().indices().prepareRefresh().get(); - SearchResponse response = client().prepareSearch("test").setQuery(new PercolateQueryBuilder("q", - BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), - XContentType.JSON)).get(); + SearchResponse response = client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON + ) + ) + .get(); assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test").setQuery(new PercolateQueryBuilder("q", - BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), - XContentType.JSON)).addSort("_doc", SortOrder.ASC).get(); + response = client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON + ) + ) + .addSort("_doc", SortOrder.ASC) + .get(); assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test").setQuery(constantScoreQuery(new PercolateQueryBuilder("q", - BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), - XContentType.JSON))).get(); + response = client().prepareSearch("test") + .setQuery( + constantScoreQuery( + new PercolateQueryBuilder( + "q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON + ) + ) + ) + .get(); assertEquals(1, response.getHits().getTotalHits().value); } diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java index 53202ad99f3..14e7973ec0c 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java @@ -71,9 +71,15 @@ final class PercolateQuery extends Query implements Accountable { private final IndexSearcher percolatorIndexSearcher; private final Query nonNestedDocsFilter; - PercolateQuery(String name, QueryStore queryStore, List documents, - Query candidateMatchesQuery, IndexSearcher percolatorIndexSearcher, - Query nonNestedDocsFilter, Query verifiedMatchesQuery) { + PercolateQuery( + String name, + QueryStore queryStore, + List documents, + Query candidateMatchesQuery, + IndexSearcher percolatorIndexSearcher, + Query nonNestedDocsFilter, + Query verifiedMatchesQuery + ) { this.name = name; this.documents = Objects.requireNonNull(documents); this.candidateMatchesQuery = Objects.requireNonNull(candidateMatchesQuery); @@ -87,8 +93,15 @@ final class PercolateQuery extends Query implements Accountable { public Query rewrite(IndexReader reader) throws IOException { Query rewritten = candidateMatchesQuery.rewrite(reader); if (rewritten != candidateMatchesQuery) { - return new PercolateQuery(name, queryStore, documents, rewritten, percolatorIndexSearcher, - nonNestedDocsFilter, verifiedMatchesQuery); + return new PercolateQuery( + name, + queryStore, + documents, + rewritten, + percolatorIndexSearcher, + nonNestedDocsFilter, + verifiedMatchesQuery + ); } else { return this; } @@ -100,8 +113,7 @@ final class PercolateQuery extends Query implements Accountable { final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, boost); return new Weight(this) { @Override - public void extractTerms(Set set) { - } + public void extractTerms(Set set) {} @Override public Explanation explain(LeafReaderContext leafReaderContext, int docId) throws IOException { @@ -143,10 +155,9 @@ final class PercolateQuery extends Query implements Accountable { Query query = percolatorQueries.apply(docId); if (query != null) { if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder() - .add(query, Occur.MUST) - .add(nonNestedDocsFilter, Occur.FILTER) - .build(); + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); } TopDocs topDocs = percolatorIndexSearcher.search(query, 1); if (topDocs.scoreDocs.length > 0) { @@ -189,10 +200,7 @@ final class PercolateQuery extends Query implements Accountable { return false; } if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder() - .add(query, Occur.MUST) - .add(nonNestedDocsFilter, Occur.FILTER) - .build(); + query = new BooleanQuery.Builder().add(query, Occur.MUST).add(nonNestedDocsFilter, Occur.FILTER).build(); } return Lucene.exists(percolatorIndexSearcher, query); } @@ -259,8 +267,7 @@ final class PercolateQuery extends Query implements Accountable { sources.append(document.utf8ToString()); sources.append('\n'); } - return "PercolateQuery{document_sources={" + sources + "},inner={" + - candidateMatchesQuery.toString(s) + "}}"; + return "PercolateQuery{document_sources={" + sources + "},inner={" + candidateMatchesQuery.toString(s) + "}}"; } @Override diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index f68467f241e..0897f894109 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -114,10 +114,10 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { if (getResponse.isExists() == false) { throw new ResourceNotFoundException( - "indexed document [{}{}/{}] couldn't be found", indexedDocumentIndex, - indexedDocumentType == null ? "" : "/" + indexedDocumentType, indexedDocumentId + "indexed document [{}{}/{}] couldn't be found", + indexedDocumentIndex, + indexedDocumentType == null ? "" : "/" + indexedDocumentType, + indexedDocumentId ); } - if(getResponse.isSourceEmpty()) { + if (getResponse.isSourceEmpty()) { throw new IllegalArgumentException( - "indexed document [" + indexedDocumentIndex + (indexedDocumentType == null ? "" : "/" + indexedDocumentType) + - "/" + indexedDocumentId + "] source disabled" + "indexed document [" + + indexedDocumentIndex + + (indexedDocumentType == null ? "" : "/" + indexedDocumentType) + + "/" + + indexedDocumentId + + "] source disabled" ); } documentSupplier.set(getResponse.getSourceAsBytesRef()); @@ -526,8 +577,9 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder docs = new ArrayList<>(); @@ -558,8 +612,9 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder 1 || docs.get(0).docs().size() > 1) { assert docs.size() != 1 || docMapper.hasNestedObjects(); docSearcher = createMultiDocumentSearcher(analyzer, docs); - excludeNestedDocuments = docMapper.hasNestedObjects() && docs.stream() - .map(ParsedDocument::docs) - .mapToInt(List::size) - .anyMatch(size -> size > 1); + excludeNestedDocuments = docMapper.hasNestedObjects() + && docs.stream().map(ParsedDocument::docs).mapToInt(List::size).anyMatch(size -> size > 1); } else { MemoryIndex memoryIndex = MemoryIndex.fromDocument(docs.get(0).rootDoc(), analyzer, true, false); docSearcher = memoryIndex.createSearcher(); @@ -600,9 +653,9 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder iterable = () -> docs.stream() - .map(ParsedDocument::docs) - .flatMap(Collection::stream) - .iterator(); + Iterable iterable = () -> docs.stream().map(ParsedDocument::docs).flatMap(Collection::stream).iterator(); indexWriter.addDocuments(iterable); DirectoryReader directoryReader = DirectoryReader.open(indexWriter); @@ -648,8 +698,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { @@ -663,8 +712,12 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder 0) { XContent xContent = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent(); - try (XContentParser sourceParser = xContent - .createParser(context.getXContentRegistry(), LoggingDeprecationHandler.INSTANCE, - qbSource.bytes, qbSource.offset, qbSource.length)) { - QueryBuilder queryBuilder = PercolatorFieldMapper.parseQueryBuilder(sourceParser, - sourceParser.getTokenLocation()); + try ( + XContentParser sourceParser = xContent.createParser( + context.getXContentRegistry(), + LoggingDeprecationHandler.INSTANCE, + qbSource.bytes, + qbSource.offset, + qbSource.length + ) + ) { + QueryBuilder queryBuilder = PercolatorFieldMapper.parseQueryBuilder( + sourceParser, + sourceParser.getTokenLocation() + ); queryBuilder = Rewriteable.rewrite(queryBuilder, context); return queryBuilder.toQuery(context); } @@ -739,8 +800,10 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder> IFD getForField(MappedFieldType fieldType) { - IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndex().getName(), - shardContext::lookup); + IndexFieldData.Builder builder = fieldType.fielddataBuilder( + shardContext.getFullyQualifiedIndex().getName(), + shardContext::lookup + ); IndexFieldDataCache cache = new IndexFieldDataCache.None(); CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService(); return (IFD) builder.build(cache, circuitBreaker); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java index 78d6fbe4702..b379b0596f3 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java @@ -116,7 +116,10 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { static final XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; static final Setting INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING = Setting.boolSetting( - "index.percolator.map_unmapped_fields_as_text", false, Setting.Property.IndexScope); + "index.percolator.map_unmapped_fields_as_text", + false, + Setting.Property.IndexScope + ); static final String CONTENT_TYPE = "percolator"; static final byte FIELD_VALUE_SEPARATOR = 0; // nul code point @@ -170,10 +173,19 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { fieldType.mapUnmappedFieldsAsText = getMapUnmappedFieldAsText(context.indexSettings()); context.path().remove(); - return new PercolatorFieldMapper(name(), fieldType, - multiFieldsBuilder.build(this, context), copyTo.build(), queryShardContext, extractedTermsField, - extractionResultField, queryBuilderField, rangeFieldMapper, minimumShouldMatchFieldMapper, - getMapUnmappedFieldAsText(context.indexSettings())); + return new PercolatorFieldMapper( + name(), + fieldType, + multiFieldsBuilder.build(this, context), + copyTo.build(), + queryShardContext, + extractedTermsField, + extractionResultField, + queryBuilderField, + rangeFieldMapper, + minimumShouldMatchFieldMapper, + getMapUnmappedFieldAsText(context.indexSettings()) + ); } private static boolean getMapUnmappedFieldAsText(Settings indexSettings) { @@ -199,8 +211,10 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { } static NumberFieldMapper createMinimumShouldMatchField(BuilderContext context) { - NumberFieldMapper.Builder builder = - NumberFieldMapper.Builder.docValuesOnly(MINIMUM_SHOULD_MATCH_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + NumberFieldMapper.Builder builder = NumberFieldMapper.Builder.docValuesOnly( + MINIMUM_SHOULD_MATCH_FIELD_NAME, + NumberFieldMapper.NumberType.INTEGER + ); return builder.build(context); } @@ -243,8 +257,14 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { return SourceValueFetcher.identity(name(), mapperService, format); } - Query percolateQuery(String name, PercolateQuery.QueryStore queryStore, List documents, - IndexSearcher searcher, boolean excludeNestedDocuments, Version indexVersion) throws IOException { + Query percolateQuery( + String name, + PercolateQuery.QueryStore queryStore, + List documents, + IndexSearcher searcher, + boolean excludeNestedDocuments, + Version indexVersion + ) throws IOException { IndexReader indexReader = searcher.getIndexReader(); Tuple t = createCandidateQuery(indexReader, indexVersion); Query candidateQuery = t.v1(); @@ -273,8 +293,8 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { List extractedTerms = t.v1(); Map> encodedPointValuesByField = t.v2(); // `1 + ` is needed to take into account the EXTRACTION_FAILED should clause - boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= - BooleanQuery.getMaxClauseCount(); + boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery + .getMaxClauseCount(); List subQueries = new ArrayList<>(); for (Map.Entry> entry : encodedPointValuesByField.entrySet()) { @@ -347,11 +367,19 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { private final RangeFieldMapper rangeFieldMapper; private final boolean mapUnmappedFieldsAsText; - PercolatorFieldMapper(String simpleName, MappedFieldType mappedFieldType, - MultiFields multiFields, CopyTo copyTo, Supplier queryShardContext, - KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField, - BinaryFieldMapper queryBuilderField, RangeFieldMapper rangeFieldMapper, - NumberFieldMapper minimumShouldMatchFieldMapper, boolean mapUnmappedFieldsAsText) { + PercolatorFieldMapper( + String simpleName, + MappedFieldType mappedFieldType, + MultiFields multiFields, + CopyTo copyTo, + Supplier queryShardContext, + KeywordFieldMapper queryTermsField, + KeywordFieldMapper extractionResultField, + BinaryFieldMapper queryBuilderField, + RangeFieldMapper rangeFieldMapper, + NumberFieldMapper minimumShouldMatchFieldMapper, + boolean mapUnmappedFieldsAsText + ) { super(simpleName, mappedFieldType, multiFields, copyTo); this.queryShardContext = queryShardContext; this.queryTermsField = queryTermsField; @@ -375,9 +403,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { configureContext(queryShardContext, isMapUnmappedFieldAsText()); XContentParser parser = context.parser(); - QueryBuilder queryBuilder = parseQueryBuilder( - parser, parser.getTokenLocation() - ); + QueryBuilder queryBuilder = parseQueryBuilder(parser, parser.getTokenLocation()); verifyQuery(queryBuilder); // Fetching of terms, shapes and indexed scripts happen during this rewrite: PlainActionFuture future = new PlainActionFuture<>(); @@ -392,11 +418,11 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { processQuery(query, context); } - static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField, - QueryBuilder queryBuilder, ParseContext context) throws IOException { + static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField, QueryBuilder queryBuilder, ParseContext context) + throws IOException { if (indexVersion.onOrAfter(LegacyESVersion.V_6_0_0_beta2)) { try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) { - try (OutputStreamStreamOutput out = new OutputStreamStreamOutput(stream)) { + try (OutputStreamStreamOutput out = new OutputStreamStreamOutput(stream)) { out.setVersion(indexVersion); out.writeNamedWriteable(queryBuilder); byte[] queryBuilderAsBytes = stream.toByteArray(); @@ -491,7 +517,11 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { @Override public Iterator iterator() { return Arrays.asList( - queryTermsField, extractionResultField, queryBuilderField, minimumShouldMatchFieldMapper, rangeFieldMapper + queryTermsField, + extractionResultField, + queryBuilderField, + minimumShouldMatchFieldMapper, + rangeFieldMapper ).iterator(); } diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java index 1fa4af76432..670aa74501f 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -88,8 +88,9 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { public void process(HitContext hit) throws IOException { boolean singlePercolateQuery = percolateQueries.size() == 1; for (PercolateQuery percolateQuery : percolateQueries) { - String fieldName = singlePercolateQuery ? PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX : - PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX + "_" + percolateQuery.getName(); + String fieldName = singlePercolateQuery + ? PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX + : PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX + "_" + percolateQuery.getName(); IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); PercolateQuery.QueryStore queryStore = percolateQuery.getQueryStore(); @@ -116,7 +117,8 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { ), percolatorLeafReaderContext, slot, - new SourceLookup()); + new SourceLookup() + ); subContext.sourceLookup().setSource(document); // force source because MemoryIndex does not store fields SearchHighlightContext highlight = new SearchHighlightContext(fetchContext.highlight().fields(), true); @@ -130,8 +132,9 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { } else { hlFieldName = percolateQuery.getName() + "_" + entry.getKey(); } - hit.hit().getHighlightFields().put(hlFieldName, - new HighlightField(hlFieldName, entry.getValue().fragments())); + hit.hit() + .getHighlightFields() + .put(hlFieldName, new HighlightField(hlFieldName, entry.getValue().fragments())); } else { // In case multiple documents are being percolated we need to identify to which document // a highlight belongs to. @@ -141,8 +144,9 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { } else { hlFieldName = percolateQuery.getName() + "_" + slot + "_" + entry.getKey(); } - hit.hit().getHighlightFields().put(hlFieldName, - new HighlightField(hlFieldName, entry.getValue().fragments())); + hit.hit() + .getHighlightFields() + .put(hlFieldName, new HighlightField(hlFieldName, entry.getValue().fragments())); } } } @@ -161,7 +165,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { @Override public void visitLeaf(Query query) { if (query instanceof PercolateQuery) { - queries.add((PercolateQuery)query); + queries.add((PercolateQuery) query); } } }); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index 1063a2af15f..244e3ef1bea 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -147,8 +147,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase { Query filterNestedDocs(Query in) { if (rootDocsBySlot != null) { // Ensures that we filter out nested documents - return new BooleanQuery.Builder() - .add(in, BooleanClause.Occur.MUST) + return new BooleanQuery.Builder().add(in, BooleanClause.Occur.MUST) .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.FILTER) .build(); } @@ -157,8 +156,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase { } static IntStream convertTopDocsToSlots(TopDocs topDocs, int[] rootDocsBySlot) { - IntStream stream = Arrays.stream(topDocs.scoreDocs) - .mapToInt(scoreDoc -> scoreDoc.doc); + IntStream stream = Arrays.stream(topDocs.scoreDocs).mapToInt(scoreDoc -> scoreDoc.doc); if (rootDocsBySlot != null) { stream = stream.map(docId -> Arrays.binarySearch(rootDocsBySlot, docId)); } diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorPlugin.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorPlugin.java index acf37a454ff..f841860d393 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorPlugin.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorPlugin.java @@ -54,10 +54,7 @@ public class PercolatorPlugin extends Plugin implements MapperPlugin, SearchPlug @Override public List getFetchSubPhases(FetchPhaseConstructionContext context) { - return Arrays.asList( - new PercolatorMatchedSlotSubFetchPhase(), - new PercolatorHighlightSubFetchPhase(context.getHighlighters()) - ); + return Arrays.asList(new PercolatorMatchedSlotSubFetchPhase(), new PercolatorHighlightSubFetchPhase(context.getHighlighters())); } @Override diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java index 4dd136818f4..9cb51670caa 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java @@ -72,8 +72,7 @@ import java.util.stream.Collectors; final class QueryAnalyzer { - private QueryAnalyzer() { - } + private QueryAnalyzer() {} /** * Extracts terms and ranges from the provided query. These terms and ranges are stored with the percolator query and @@ -108,15 +107,24 @@ final class QueryAnalyzer { return builder.getResult(); } - private static final Set> verifiedQueries = new HashSet<>(Arrays.asList( - TermQuery.class, TermInSetQuery.class, SynonymQuery.class, SpanTermQuery.class, SpanOrQuery.class, - BooleanQuery.class, DisjunctionMaxQuery.class, ConstantScoreQuery.class, BoostQuery.class, - BlendedTermQuery.class - )); + private static final Set> verifiedQueries = new HashSet<>( + Arrays.asList( + TermQuery.class, + TermInSetQuery.class, + SynonymQuery.class, + SpanTermQuery.class, + SpanOrQuery.class, + BooleanQuery.class, + DisjunctionMaxQuery.class, + ConstantScoreQuery.class, + BoostQuery.class, + BlendedTermQuery.class + ) + ); private static boolean isVerified(Query query) { if (query instanceof FunctionScoreQuery) { - return ((FunctionScoreQuery)query).getMinScore() == null; + return ((FunctionScoreQuery) query).getMinScore() == null; } for (Class cls : verifiedQueries) { if (cls.isAssignableFrom(query.getClass())) { @@ -162,7 +170,8 @@ final class QueryAnalyzer { if (partialResults.size() == 1) { result = partialResults.get(0); } else { - result = conjunction ? handleConjunction(partialResults, version) + result = conjunction + ? handleConjunction(partialResults, version) : handleDisjunction(partialResults, minimumShouldMatch, version); } if (verified == false) { @@ -206,14 +215,11 @@ final class QueryAnalyzer { public void visitLeaf(Query query) { if (query instanceof MatchAllDocsQuery) { terms.add(new Result(true, true)); - } - else if (query instanceof MatchNoDocsQuery) { + } else if (query instanceof MatchNoDocsQuery) { terms.add(Result.MATCH_NONE); - } - else if (query instanceof PointRangeQuery) { - terms.add(pointRangeQuery((PointRangeQuery)query)); - } - else { + } else if (query instanceof PointRangeQuery) { + terms.add(pointRangeQuery((PointRangeQuery) query)); + } else { terms.add(Result.UNKNOWN); } } @@ -269,8 +275,11 @@ final class QueryAnalyzer { byte[] interval = new byte[16]; NumericUtils.subtract(16, 0, prepad(upperPoint), prepad(lowerPoint), interval); - return new Result(false, Collections.singleton(new QueryExtraction( - new Range(query.getField(), lowerPoint, upperPoint, interval))), 1); + return new Result( + false, + Collections.singleton(new QueryExtraction(new Range(query.getField(), lowerPoint, upperPoint, interval))), + 1 + ); } private static byte[] prepad(byte[] original) { @@ -313,7 +322,7 @@ final class QueryAnalyzer { // so that can lead to more false positives for percolator queries with range queries // than term based queries. // This is because the way number fields are extracted from the document to be - // percolated. Per field a single range is extracted and if a percolator query has two or + // percolated. Per field a single range is extracted and if a percolator query has two or // more range queries on the same field, then the minimum should match can be higher than clauses // in the CoveringQuery. Therefore right now the minimum should match is only incremented once per // number field when processing the percolator query at index time. @@ -341,11 +350,7 @@ final class QueryAnalyzer { // add range fields from this Result to the seenRangeFields set so that minimumShouldMatch is correctly // calculated for subsequent Results - result.extractions.stream() - .map(e -> e.range) - .filter(Objects::nonNull) - .map(e -> e.fieldName) - .forEach(seenRangeFields::add); + result.extractions.stream().map(e -> e.range).filter(Objects::nonNull).map(e -> e.fieldName).forEach(seenRangeFields::add); if (result.verified == false // If some inner extractions are optional, the result can't be verified @@ -363,7 +368,6 @@ final class QueryAnalyzer { return new Result(verified, extractions, msm); } - } else { Result bestClause = null; for (Result result : conjunctions) { @@ -408,12 +412,12 @@ final class QueryAnalyzer { for (int i = 0; i < disjunctions.size(); i++) { Result subResult = disjunctions.get(i); if (subResult.verified == false - // one of the sub queries requires more than one term to match, we can't - // verify it with a single top-level min_should_match - || subResult.minimumShouldMatch > 1 - // One of the inner clauses has multiple extractions, we won't be able to - // verify it with a single top-level min_should_match - || (subResult.extractions.size() > 1 && requiredShouldClauses > 1)) { + // one of the sub queries requires more than one term to match, we can't + // verify it with a single top-level min_should_match + || subResult.minimumShouldMatch > 1 + // One of the inner clauses has multiple extractions, we won't be able to + // verify it with a single top-level min_should_match + || (subResult.extractions.size() > 1 && requiredShouldClauses > 1)) { verified = false; } if (subResult.matchAllDocs) { @@ -435,15 +439,12 @@ final class QueryAnalyzer { int msm = 0; if (version.onOrAfter(LegacyESVersion.V_6_1_0) && - // Having ranges would mean we need to juggle with the msm and that complicates this logic a lot, - // so for now lets not do it. + // Having ranges would mean we need to juggle with the msm and that complicates this logic a lot, + // so for now lets not do it. hasRangeExtractions == false) { // Figure out what the combined msm is for this disjunction: // (sum the lowest required clauses, otherwise we're too strict and queries may not match) - clauses = clauses.stream() - .filter(val -> val > 0) - .sorted() - .collect(Collectors.toList()); + clauses = clauses.stream().filter(val -> val > 0).sorted().collect(Collectors.toList()); // When there are duplicated query extractions, percolator can no longer reliably determine msm across this disjunction if (hasDuplicateTerms) { @@ -536,8 +537,8 @@ final class QueryAnalyzer { private static int minTermLength(Set extractions) { // In case there are only range extractions, then we return Integer.MIN_VALUE, // so that selectBestExtraction(...) we are likely to prefer the extractions that contains at least a single extraction - if (extractions.stream().filter(queryExtraction -> queryExtraction.term != null).count() == 0 && - extractions.stream().filter(queryExtraction -> queryExtraction.range != null).count() > 0) { + if (extractions.stream().filter(queryExtraction -> queryExtraction.term != null).count() == 0 + && extractions.stream().filter(queryExtraction -> queryExtraction.range != null).count() > 0) { return Integer.MIN_VALUE; } @@ -578,8 +579,9 @@ final class QueryAnalyzer { Result(boolean matchAllDocs, boolean verified, Set extractions, int minimumShouldMatch) { if (minimumShouldMatch > extractions.size()) { - throw new IllegalArgumentException("minimumShouldMatch can't be greater than the number of extractions: " - + minimumShouldMatch + " > " + extractions.size()); + throw new IllegalArgumentException( + "minimumShouldMatch can't be greater than the number of extractions: " + minimumShouldMatch + " > " + extractions.size() + ); } this.matchAllDocs = matchAllDocs; this.extractions = extractions; @@ -616,7 +618,7 @@ final class QueryAnalyzer { return matchAllDocs == false && extractions.isEmpty(); } - static final Result UNKNOWN = new Result(false, false, Collections.emptySet(), 0){ + static final Result UNKNOWN = new Result(false, false, Collections.emptySet(), 0) { @Override boolean isUnknown() { return true; @@ -673,8 +675,7 @@ final class QueryAnalyzer { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; QueryExtraction queryExtraction = (QueryExtraction) o; - return Objects.equals(term, queryExtraction.term) && - Objects.equals(range, queryExtraction.range); + return Objects.equals(term, queryExtraction.term) && Objects.equals(range, queryExtraction.range); } @Override @@ -684,10 +685,7 @@ final class QueryAnalyzer { @Override public String toString() { - return "QueryExtraction{" + - "term=" + term + - ",range=" + range + - '}'; + return "QueryExtraction{" + "term=" + term + ",range=" + range + '}'; } } @@ -711,9 +709,9 @@ final class QueryAnalyzer { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Range range = (Range) o; - return Objects.equals(fieldName, range.fieldName) && - Arrays.equals(lowerPoint, range.lowerPoint) && - Arrays.equals(upperPoint, range.upperPoint); + return Objects.equals(fieldName, range.fieldName) + && Arrays.equals(lowerPoint, range.lowerPoint) + && Arrays.equals(upperPoint, range.upperPoint); } @Override @@ -727,10 +725,7 @@ final class QueryAnalyzer { @Override public String toString() { - return "Range{" + - ", fieldName='" + fieldName + '\'' + - ", interval=" + interval + - '}'; + return "Range{" + ", fieldName='" + fieldName + '\'' + ", interval=" + interval + '}'; } } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 064df79ed54..5f44a9a507b 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -163,22 +163,51 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { indexService = createIndex(indexName, Settings.EMPTY); mapperService = indexService.mapperService(); - String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + String mapper = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("type") .startObject("properties") - .startObject("int_field").field("type", "integer").endObject() - .startObject("long_field").field("type", "long").endObject() - .startObject("half_float_field").field("type", "half_float").endObject() - .startObject("float_field").field("type", "float").endObject() - .startObject("double_field").field("type", "double").endObject() - .startObject("ip_field").field("type", "ip").endObject() - .startObject("field").field("type", "keyword").endObject() - .endObject().endObject().endObject()); + .startObject("int_field") + .field("type", "integer") + .endObject() + .startObject("long_field") + .field("type", "long") + .endObject() + .startObject("half_float_field") + .field("type", "half_float") + .endObject() + .startObject("float_field") + .field("type", "float") + .endObject() + .startObject("double_field") + .field("type", "double") + .endObject() + .startObject("ip_field") + .field("type", "ip") + .endObject() + .startObject("field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + ); documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); String queryField = "query_field"; - String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject() - .endObject().endObject()); + String percolatorMapper = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject(queryField) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + ); mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField); fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType(); @@ -226,12 +255,32 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { queryFunctions.add(() -> new TermQuery(new Term(field2, randomFrom(stringContent.get(field2))))); queryFunctions.add(() -> intFieldType.termQuery(randomFrom(intValues), context)); queryFunctions.add(() -> intFieldType.termsQuery(Arrays.asList(randomFrom(intValues), randomFrom(intValues)), context)); - queryFunctions.add(() -> intFieldType.rangeQuery(intValues.get(4), intValues.get(intValues.size() - 4), true, - true, ShapeRelation.WITHIN, null, null, context)); - queryFunctions.add(() -> new TermInSetQuery(field1, new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))))); - queryFunctions.add(() -> new TermInSetQuery(field2, new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))))); + queryFunctions.add( + () -> intFieldType.rangeQuery( + intValues.get(4), + intValues.get(intValues.size() - 4), + true, + true, + ShapeRelation.WITHIN, + null, + null, + context + ) + ); + queryFunctions.add( + () -> new TermInSetQuery( + field1, + new BytesRef(randomFrom(stringContent.get(field1))), + new BytesRef(randomFrom(stringContent.get(field1))) + ) + ); + queryFunctions.add( + () -> new TermInSetQuery( + field2, + new BytesRef(randomFrom(stringContent.get(field1))), + new BytesRef(randomFrom(stringContent.get(field1))) + ) + ); // many iterations with boolean queries, which are the most complex queries to deal with when nested int numRandomBoolQueries = 1000; for (int i = 0; i < numRandomBoolQueries; i++) { @@ -276,8 +325,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { document.add(new TextField(entry.getKey(), value, Field.Store.NO)); } for (Integer intValue : intValues) { - List numberFields = - NumberFieldMapper.NumberType.INTEGER.createFields("int_field", intValue, true, true, false); + List numberFields = NumberFieldMapper.NumberType.INTEGER.createFields("int_field", intValue, true, true, false); for (Field numberField : numberFields) { document.add(numberField); } @@ -286,8 +334,13 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { duelRun(queryStore, memoryIndex, shardSearcher); } - private BooleanQuery createRandomBooleanQuery(int depth, List fields, Map> content, - MappedFieldType intFieldType, List intValues) { + private BooleanQuery createRandomBooleanQuery( + int depth, + List fields, + Map> content, + MappedFieldType intFieldType, + List intValues + ) { BooleanQuery.Builder builder = new BooleanQuery.Builder(); int numClauses = randomIntBetween(1, 1 << randomIntBetween(2, 4)); // use low numbers of clauses more often int numShouldClauses = 0; @@ -348,9 +401,9 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { MappedFieldType intFieldType = mapperService.fieldType("int_field"); List ranges = new ArrayList<>(); - ranges.add(new int[]{-5, 5}); - ranges.add(new int[]{0, 10}); - ranges.add(new int[]{15, 50}); + ranges.add(new int[] { -5, 5 }); + ranges.add(new int[] { 0, 10 }); + ranges.add(new int[] { 15, 50 }); QueryShardContext context = createSearchContext(indexService).getQueryShardContext(); List documents = new ArrayList<>(); @@ -395,8 +448,13 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { } for (int[] range : ranges) { - List numberFields = - NumberFieldMapper.NumberType.INTEGER.createFields("int_field", between(range[0], range[1]), true, true, false); + List numberFields = NumberFieldMapper.NumberType.INTEGER.createFields( + "int_field", + between(range[0], range[1]), + true, + true, + false + ); for (Field numberField : numberFields) { document.add(numberField); } @@ -406,8 +464,13 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { } } - private BooleanQuery randomBQ(int depth, List stringValues, List ranges, - MappedFieldType intFieldType, QueryShardContext context) { + private BooleanQuery randomBQ( + int depth, + List stringValues, + List ranges, + MappedFieldType intFieldType, + QueryShardContext context + ) { final int numClauses = randomIntBetween(1, 4); final boolean onlyShouldClauses = randomBoolean(); final BooleanQuery.Builder builder = new BooleanQuery.Builder(); @@ -555,26 +618,23 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { commonTermsQuery.add(new Term("field", "fox")); addQuery(commonTermsQuery, documents); - BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(new Term[]{new Term("field", "quick"), - new Term("field", "brown"), new Term("field", "fox")}, 1.0f); + BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery( + new Term[] { new Term("field", "quick"), new Term("field", "brown"), new Term("field", "fox") }, + 1.0f + ); addQuery(blendedTermQuery, documents); - SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true) - .addClause(new SpanTermQuery(new Term("field", "quick"))) - .addClause(new SpanTermQuery(new Term("field", "brown"))) - .addClause(new SpanTermQuery(new Term("field", "fox"))) - .build(); + SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true).addClause(new SpanTermQuery(new Term("field", "quick"))) + .addClause(new SpanTermQuery(new Term("field", "brown"))) + .addClause(new SpanTermQuery(new Term("field", "fox"))) + .build(); addQuery(spanNearQuery, documents); - SpanNearQuery spanNearQuery2 = new SpanNearQuery.Builder("field", true) - .addClause(new SpanTermQuery(new Term("field", "the"))) - .addClause(new SpanTermQuery(new Term("field", "lazy"))) - .addClause(new SpanTermQuery(new Term("field", "doc"))) - .build(); - SpanOrQuery spanOrQuery = new SpanOrQuery( - spanNearQuery, - spanNearQuery2 - ); + SpanNearQuery spanNearQuery2 = new SpanNearQuery.Builder("field", true).addClause(new SpanTermQuery(new Term("field", "the"))) + .addClause(new SpanTermQuery(new Term("field", "lazy"))) + .addClause(new SpanTermQuery(new Term("field", "doc"))) + .build(); + SpanOrQuery spanOrQuery = new SpanOrQuery(spanNearQuery, spanNearQuery2); addQuery(spanOrQuery, documents); SpanNotQuery spanNotQuery = new SpanNotQuery(spanNearQuery, spanNearQuery); @@ -616,8 +676,14 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { Version v = LegacyESVersion.V_6_1_0; MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), - percolateSearcher, false, v); + Query query = fieldType.percolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + percolateSearcher, + false, + v + ); TopDocs topDocs = shardSearcher.search(query, 1); assertEquals(1L, topDocs.totalHits.value); assertEquals(1, topDocs.scoreDocs.length); @@ -631,8 +697,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { assertEquals(1, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); - memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new HalfFloatPoint("half_float_field", 12)), - new WhitespaceAnalyzer()); + memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new HalfFloatPoint("half_float_field", 12)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); @@ -656,8 +721,10 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { assertEquals(1, topDocs.scoreDocs.length); assertEquals(4, topDocs.scoreDocs[0].doc); - memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new InetAddressPoint("ip_field", - forString("192.168.0.4"))), new WhitespaceAnalyzer()); + memoryIndex = MemoryIndex.fromDocument( + Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.0.4"))), + new WhitespaceAnalyzer() + ); percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); @@ -691,8 +758,10 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { int lowerIpPart = randomIntBetween(0, 255); int upperIpPart = randomIntBetween(lowerIpPart, 255); - addQuery(InetAddressPoint.newRangeQuery("ip_field", forString("192.168.1." + lowerIpPart), - forString("192.168.1." + upperIpPart)), documents); + addQuery( + InetAddressPoint.newRangeQuery("ip_field", forString("192.168.1." + lowerIpPart), forString("192.168.1." + upperIpPart)), + documents + ); indexWriter.addDocuments(documents); indexWriter.close(); @@ -756,15 +825,13 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer()); duelRun(queryStore, memoryIndex, shardSearcher); - doc = Collections.singleton(new InetAddressPoint("ip_field", - forString("192.168.1." + randomIntBetween(lowerIpPart, upperIpPart)))); + doc = Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.1." + randomIntBetween(lowerIpPart, upperIpPart)))); memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer()); result = executeQuery(queryStore, memoryIndex, shardSearcher); assertThat(result.scoreDocs.length, equalTo(1)); assertThat(result.scoreDocs[0].doc, equalTo(5)); duelRun(queryStore, memoryIndex, shardSearcher); - doc = Collections.singleton(new InetAddressPoint("ip_field", - forString("192.168.1." + randomIntBetween(0, 255)))); + doc = Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.1." + randomIntBetween(0, 255)))); memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer()); duelRun(queryStore, memoryIndex, shardSearcher); } @@ -798,8 +865,14 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + percolateSearcher, + false, + Version.CURRENT + ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); assertEquals(3L, topDocs.totalHits.value); assertEquals(3, topDocs.scoreDocs.length); @@ -831,8 +904,14 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + percolateSearcher, + false, + Version.CURRENT + ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); assertEquals(2L, topDocs.totalHits.value); assertEquals(2, topDocs.scoreDocs.length); @@ -879,11 +958,16 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { documents.add(document); iw.addDocuments(documents); // IW#addDocuments(...) ensures we end up with a single segment } - try (IndexReader ir = DirectoryReader.open(directory)){ + try (IndexReader ir = DirectoryReader.open(directory)) { IndexSearcher percolateSearcher = new IndexSearcher(ir); - PercolateQuery query = (PercolateQuery) - fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), - percolateSearcher, false, v); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + percolateSearcher, + false, + v + ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); @@ -910,11 +994,16 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { } iw.addDocument(document); } - try (IndexReader ir = DirectoryReader.open(directory)){ + try (IndexReader ir = DirectoryReader.open(directory)) { IndexSearcher percolateSearcher = new IndexSearcher(ir); - PercolateQuery query = (PercolateQuery) - fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), - percolateSearcher, false, v); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + percolateSearcher, + false, + v + ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); @@ -947,8 +1036,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { builder.add(builder2.build(), Occur.MUST); addQuery(builder.build(), docs); - builder = new BooleanQuery.Builder() - .setMinimumNumberShouldMatch(2); + builder = new BooleanQuery.Builder().setMinimumNumberShouldMatch(2); builder1 = new BooleanQuery.Builder(); builder1.add(new TermQuery(new Term("field", "value1")), Occur.MUST); builder1.add(new TermQuery(new Term("field", "value2")), Occur.MUST); @@ -1069,8 +1157,14 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException { boolean requireScore = randomBoolean(); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - Query percolateQuery = fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); + Query percolateQuery = fieldType.percolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + percolateSearcher, + false, + Version.CURRENT + ); Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery); TopDocs topDocs = shardSearcher.search(query, 100); @@ -1120,8 +1214,10 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { } logger.error("controlTopDocs.scoreDocs[{}].query_terms_field={}", i, builder.toString()); - NumericDocValues numericValues = - MultiDocValues.getNumericValues(shardSearcher.getIndexReader(), fieldType.minimumShouldMatchField.name()); + NumericDocValues numericValues = MultiDocValues.getNumericValues( + shardSearcher.getIndexReader(), + fieldType.minimumShouldMatchField.name() + ); boolean exact = numericValues.advanceExact(controlTopDocs.scoreDocs[i].doc); if (exact) { logger.error("controlTopDocs.scoreDocs[{}].minimum_should_match_field={}", i, numericValues.longValue()); @@ -1137,10 +1233,17 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { private void addQuery(Query query, List docs) { IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); + .numberOfShards(1) + .numberOfReplicas(0) + .build(); IndexSettings settings = new IndexSettings(build, Settings.EMPTY); - ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings, - mapperService.documentMapperParser(), documentMapper, null, null); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext( + settings, + mapperService.documentMapperParser(), + documentMapper, + null, + null + ); fieldMapper.processQuery(query, parseContext); ParseContext.Document queryDocument = parseContext.doc(); // Add to string representation of the query to make debugging easier: @@ -1149,12 +1252,17 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { queries.add(query); } - private TopDocs executeQuery(PercolateQuery.QueryStore queryStore, - MemoryIndex memoryIndex, - IndexSearcher shardSearcher) throws IOException { + private TopDocs executeQuery(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) + throws IOException { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - Query percolateQuery = fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); + Query percolateQuery = fieldType.percolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + percolateSearcher, + false, + Version.CURRENT + ); return shardSearcher.search(percolateQuery, 10); } @@ -1224,7 +1332,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { @Override public Scorer scorer(LeafReaderContext context) throws IOException { - float _score[] = new float[]{boost}; + float _score[] = new float[] { boost }; DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); CheckedFunction leaf = queryStore.getQueries(context); FilteredDocIdSetIterator memoryIndexIterator = new FilteredDocIdSetIterator(allDocs) { diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java index 3a184ca7611..3b0830b7e45 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java @@ -80,8 +80,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase pqb.toQuery(createShardContext())); assertThat(e.getMessage(), equalTo("query builder must be rewritten first")); QueryBuilder rewrite = rewriteAndFetch(pqb, createShardContext()); - PercolateQueryBuilder geoShapeQueryBuilder = - new PercolateQueryBuilder(pqb.getField(), pqb.getDocumentType(), documentSource, XContentType.JSON); + PercolateQueryBuilder geoShapeQueryBuilder = new PercolateQueryBuilder( + pqb.getField(), + pqb.getDocumentType(), + documentSource, + XContentType.JSON + ); assertEquals(geoShapeQueryBuilder, rewrite); } public void testIndexedDocumentDoesNotExist() throws IOException { indexedDocumentExists = false; PercolateQueryBuilder pqb = doCreateTestQueryBuilder(true); - ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> rewriteAndFetch(pqb, - createShardContext())); - String expectedString = "indexed document [" + indexedDocumentIndex + "/" + - indexedDocumentId + "] couldn't be found"; - assertThat(e.getMessage() , equalTo(expectedString)); + ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> rewriteAndFetch(pqb, createShardContext())); + String expectedString = "indexed document [" + indexedDocumentIndex + "/" + indexedDocumentId + "] couldn't be found"; + assertThat(e.getMessage(), equalTo(expectedString)); } @Override protected Set getObjectsHoldingArbitraryContent() { - //document contains arbitrary content, no error expected when an object is added to it - return new HashSet<>(Arrays.asList(PercolateQueryBuilder.DOCUMENT_FIELD.getPreferredName(), - PercolateQueryBuilder.DOCUMENTS_FIELD.getPreferredName())); + // document contains arbitrary content, no error expected when an object is added to it + return new HashSet<>( + Arrays.asList(PercolateQueryBuilder.DOCUMENT_FIELD.getPreferredName(), PercolateQueryBuilder.DOCUMENTS_FIELD.getPreferredName()) + ); } public void testRequiredParameters() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - new PercolateQueryBuilder(null, new BytesArray("{}"), XContentType.JSON); - }); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> { new PercolateQueryBuilder(null, new BytesArray("{}"), XContentType.JSON); } + ); assertThat(e.getMessage(), equalTo("[field] is a required argument")); - e = expectThrows(IllegalArgumentException.class, - () -> new PercolateQueryBuilder("_field", "_document_type", null, null)); + e = expectThrows(IllegalArgumentException.class, () -> new PercolateQueryBuilder("_field", "_document_type", null, null)); assertThat(e.getMessage(), equalTo("[document] is a required argument")); - e = expectThrows(IllegalArgumentException.class, () -> { - new PercolateQueryBuilder(null, null, "_index", "_type", "_id", null, null, null); - }); + e = expectThrows( + IllegalArgumentException.class, + () -> { new PercolateQueryBuilder(null, null, "_index", "_type", "_id", null, null, null); } + ); assertThat(e.getMessage(), equalTo("[field] is a required argument")); - e = expectThrows(IllegalArgumentException.class, () -> { - new PercolateQueryBuilder("_field", "_document_type", null, "_type", "_id", null, null, null); - }); + e = expectThrows( + IllegalArgumentException.class, + () -> { new PercolateQueryBuilder("_field", "_document_type", null, "_type", "_id", null, null, null); } + ); assertThat(e.getMessage(), equalTo("[index] is a required argument")); - e = expectThrows(IllegalArgumentException.class, () -> { - new PercolateQueryBuilder("_field", "_document_type", "_index", "_type", null, null, null, null); - }); + e = expectThrows( + IllegalArgumentException.class, + () -> { new PercolateQueryBuilder("_field", "_document_type", "_index", "_type", null, null, null, null); } + ); assertThat(e.getMessage(), equalTo("[id] is a required argument")); } @@ -251,8 +298,9 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase())); QueryShardContext queryShardContext = createShardContext(); - QueryBuilder queryBuilder = parseQuery("{\"percolate\" : { \"index\": \"" + indexedDocumentIndex + "\", \"id\": \"" + - indexedDocumentId + "\", \"field\":\"" + queryField + "\"}}"); + QueryBuilder queryBuilder = parseQuery( + "{\"percolate\" : { \"index\": \"" + + indexedDocumentIndex + + "\", \"id\": \"" + + indexedDocumentId + + "\", \"field\":\"" + + queryField + + "\"}}" + ); rewriteAndFetch(queryBuilder, queryShardContext).toQuery(queryShardContext); } @@ -276,15 +331,24 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase())); QueryShardContext queryShardContext = createShardContext(); - QueryBuilder queryBuilder = parseQuery("{\"percolate\" : { \"index\": \"" + indexedDocumentIndex + - "\", \"type\": \"_doc\", \"id\": \"" + indexedDocumentId + "\", \"field\":\"" + queryField + "\"}}"); + QueryBuilder queryBuilder = parseQuery( + "{\"percolate\" : { \"index\": \"" + + indexedDocumentIndex + + "\", \"type\": \"_doc\", \"id\": \"" + + indexedDocumentId + + "\", \"field\":\"" + + queryField + + "\"}}" + ); rewriteAndFetch(queryBuilder, queryShardContext).toQuery(queryShardContext); assertWarnings(PercolateQueryBuilder.TYPE_DEPRECATION_MESSAGE); } public void testBothDocumentAndDocumentsSpecified() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parseQuery("{\"percolate\" : { \"document\": {}, \"documents\": [{}, {}], \"field\":\"" + queryField + "\"}}")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> parseQuery("{\"percolate\" : { \"document\": {}, \"documents\": [{}, {}], \"field\":\"" + queryField + "\"}}") + ); assertThat(e.getMessage(), containsString("The following fields are not allowed together: [document, documents]")); } @@ -348,9 +412,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(queryShardContext)); - assertEquals("[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", - e.getMessage()); + OpenSearchException e = expectThrows(OpenSearchException.class, () -> queryBuilder.toQuery(queryShardContext)); + assertEquals("[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", e.getMessage()); } } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java index 55883978b79..a4a6f9b6de2 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java @@ -129,8 +129,17 @@ public class PercolateQueryTests extends OpenSearchTestCase { memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); // no scoring, wrapping it in a constant score query: - Query query = new ConstantScoreQuery(new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("a")), - new TermQuery(new Term("select", "a")), percolateSearcher, null, new MatchNoDocsQuery(""))); + Query query = new ConstantScoreQuery( + new PercolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("a")), + new TermQuery(new Term("select", "a")), + percolateSearcher, + null, + new MatchNoDocsQuery("") + ) + ); TopDocs topDocs = shardSearcher.search(query, 10); assertThat(topDocs.totalHits.value, equalTo(1L)); assertThat(topDocs.scoreDocs.length, equalTo(1)); @@ -139,8 +148,17 @@ public class PercolateQueryTests extends OpenSearchTestCase { assertThat(explanation.isMatch(), is(true)); assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score)); - query = new ConstantScoreQuery(new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("b")), - new TermQuery(new Term("select", "b")), percolateSearcher, null, new MatchNoDocsQuery(""))); + query = new ConstantScoreQuery( + new PercolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("b")), + new TermQuery(new Term("select", "b")), + percolateSearcher, + null, + new MatchNoDocsQuery("") + ) + ); topDocs = shardSearcher.search(query, 10); assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); @@ -159,13 +177,29 @@ public class PercolateQueryTests extends OpenSearchTestCase { assertThat(explanation.isMatch(), is(true)); assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score)); - query = new ConstantScoreQuery(new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("c")), - new MatchAllDocsQuery(), percolateSearcher, null, new MatchAllDocsQuery())); + query = new ConstantScoreQuery( + new PercolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("c")), + new MatchAllDocsQuery(), + percolateSearcher, + null, + new MatchAllDocsQuery() + ) + ); topDocs = shardSearcher.search(query, 10); assertThat(topDocs.totalHits.value, equalTo(4L)); - query = new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), - new TermQuery(new Term("select", "b")), percolateSearcher, null, new MatchNoDocsQuery("")); + query = new PercolateQuery( + "_name", + queryStore, + Collections.singletonList(new BytesArray("{}")), + new TermQuery(new Term("select", "b")), + percolateSearcher, + null, + new MatchNoDocsQuery("") + ); topDocs = shardSearcher.search(query, 10); assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java index c22457d3d54..a25ab9a2bb7 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java @@ -48,21 +48,26 @@ public class PercolateWithNestedQueryBuilderTests extends PercolateQueryBuilderT @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { super.initializeAdditionalMappings(mapperService); - mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef( - "_doc", "some_nested_object", "type=nested"))), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge( + "_doc", + new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", "some_nested_object", "type=nested"))), + MapperService.MergeReason.MAPPING_UPDATE + ); } public void testDetectsNestedDocuments() throws IOException { QueryShardContext shardContext = createShardContext(); - PercolateQueryBuilder builder = new PercolateQueryBuilder(queryField, - new BytesArray("{ \"foo\": \"bar\" }"), XContentType.JSON); + PercolateQueryBuilder builder = new PercolateQueryBuilder(queryField, new BytesArray("{ \"foo\": \"bar\" }"), XContentType.JSON); QueryBuilder rewrittenBuilder = rewriteAndFetch(builder, shardContext); PercolateQuery query = (PercolateQuery) rewrittenBuilder.toQuery(shardContext); assertFalse(query.excludesNestedDocs()); - builder = new PercolateQueryBuilder(queryField, - new BytesArray("{ \"foo\": \"bar\", \"some_nested_object\": [ { \"baz\": 42 } ] }"), XContentType.JSON); + builder = new PercolateQueryBuilder( + queryField, + new BytesArray("{ \"foo\": \"bar\", \"some_nested_object\": [ { \"baz\": 42 } ] }"), + XContentType.JSON + ); rewrittenBuilder = rewriteAndFetch(builder, shardContext); query = (PercolateQuery) rewrittenBuilder.toQuery(shardContext); assertTrue(query.excludesNestedDocs()); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index b6c7516a3e9..89e651d9456 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -159,30 +159,71 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { indexService = createIndex("test"); mapperService = indexService.mapperService(); - String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") - .startObject("properties") - .startObject("field").field("type", "text").endObject() - .startObject("field1").field("type", "text").endObject() - .startObject("field2").field("type", "text").endObject() - .startObject("_field3").field("type", "text").endObject() - .startObject("field4").field("type", "text").endObject() - .startObject("number_field1").field("type", "integer").endObject() - .startObject("number_field2").field("type", "long").endObject() - .startObject("number_field3").field("type", "long").endObject() - .startObject("number_field4").field("type", "half_float").endObject() - .startObject("number_field5").field("type", "float").endObject() - .startObject("number_field6").field("type", "double").endObject() - .startObject("number_field7").field("type", "ip").endObject() - .startObject("date_field").field("type", "date").endObject() - .endObject().endObject().endObject()); + String mapper = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("doc") + .startObject("properties") + .startObject("field") + .field("type", "text") + .endObject() + .startObject("field1") + .field("type", "text") + .endObject() + .startObject("field2") + .field("type", "text") + .endObject() + .startObject("_field3") + .field("type", "text") + .endObject() + .startObject("field4") + .field("type", "text") + .endObject() + .startObject("number_field1") + .field("type", "integer") + .endObject() + .startObject("number_field2") + .field("type", "long") + .endObject() + .startObject("number_field3") + .field("type", "long") + .endObject() + .startObject("number_field4") + .field("type", "half_float") + .endObject() + .startObject("number_field5") + .field("type", "float") + .endObject() + .startObject("number_field6") + .field("type", "double") + .endObject() + .startObject("number_field7") + .field("type", "ip") + .endObject() + .startObject("date_field") + .field("type", "date") + .endObject() + .endObject() + .endObject() + .endObject() + ); mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { fieldName = randomAlphaOfLength(4); - String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") - .startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject() - .endObject().endObject()); + String percolatorMapper = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("doc") + .startObject("properties") + .startObject(fieldName) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + ); mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName); } @@ -199,10 +240,17 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); + .numberOfShards(1) + .numberOfReplicas(0) + .build(); IndexSettings settings = new IndexSettings(build, Settings.EMPTY); - ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings, - mapperService.documentMapperParser(), documentMapper, null, null); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext( + settings, + mapperService.documentMapperParser(), + documentMapper, + null, + null + ); fieldMapper.processQuery(bq.build(), parseContext); ParseContext.Document document = parseContext.doc(); @@ -223,8 +271,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { bq.add(termQuery1, Occur.MUST); bq.add(termQuery2, Occur.MUST); - parseContext = new ParseContext.InternalParseContext(settings, mapperService.documentMapperParser(), - documentMapper, null, null); + parseContext = new ParseContext.InternalParseContext(settings, mapperService.documentMapperParser(), documentMapper, null, null); fieldMapper.processQuery(bq.build(), parseContext); document = parseContext.doc(); @@ -244,21 +291,26 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { QueryShardContext context = createSearchContext(indexService).getQueryShardContext(); addQueryFieldMappings(); BooleanQuery.Builder bq = new BooleanQuery.Builder(); - Query rangeQuery1 = mapperService.fieldType("number_field1") - .rangeQuery(10, 20, true, true, null, null, null, context); + Query rangeQuery1 = mapperService.fieldType("number_field1").rangeQuery(10, 20, true, true, null, null, null, context); bq.add(rangeQuery1, Occur.MUST); - Query rangeQuery2 = mapperService.fieldType("number_field1") - .rangeQuery(15, 20, true, true, null, null, null, context); + Query rangeQuery2 = mapperService.fieldType("number_field1").rangeQuery(15, 20, true, true, null, null, null, context); bq.add(rangeQuery2, Occur.MUST); DocumentMapper documentMapper = mapperService.documentMapper("doc"); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); + .numberOfShards(1) + .numberOfReplicas(0) + .build(); IndexSettings settings = new IndexSettings(build, Settings.EMPTY); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); - ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings, - mapperService.documentMapperParser(), documentMapper, null, null); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext( + settings, + mapperService.documentMapperParser(), + documentMapper, + null, + null + ); fieldMapper.processQuery(bq.build(), parseContext); ParseContext.Document document = parseContext.doc(); @@ -279,12 +331,10 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { // Range queries on different fields: bq = new BooleanQuery.Builder(); bq.add(rangeQuery1, Occur.MUST); - rangeQuery2 = mapperService.fieldType("number_field2") - .rangeQuery(15, 20, true, true, null, null, null, context); + rangeQuery2 = mapperService.fieldType("number_field2").rangeQuery(15, 20, true, true, null, null, null, context); bq.add(rangeQuery2, Occur.MUST); - parseContext = new ParseContext.InternalParseContext(settings, - mapperService.documentMapperParser(), documentMapper, null, null); + parseContext = new ParseContext.InternalParseContext(settings, mapperService.documentMapperParser(), documentMapper, null, null); fieldMapper.processQuery(bq.build(), parseContext); document = parseContext.doc(); @@ -309,10 +359,17 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); + .numberOfShards(1) + .numberOfReplicas(0) + .build(); IndexSettings settings = new IndexSettings(build, Settings.EMPTY); - ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings, - mapperService.documentMapperParser(), documentMapper, null, null); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext( + settings, + mapperService.documentMapperParser(), + documentMapper, + null, + null + ); fieldMapper.processQuery(query, parseContext); ParseContext.Document document = parseContext.doc(); @@ -328,10 +385,17 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); + .numberOfShards(1) + .numberOfReplicas(0) + .build(); IndexSettings settings = new IndexSettings(build, Settings.EMPTY); - ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings, - mapperService.documentMapperParser(), documentMapper, null, null); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext( + settings, + mapperService.documentMapperParser(), + documentMapper, + null, + null + ); fieldMapper.processQuery(phraseQuery, parseContext); ParseContext.Document document = parseContext.doc(); @@ -382,7 +446,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { assertEquals("field4\u0000123", terms.get(13).utf8ToString()); } - public void testCreateCandidateQuery() throws Exception { addQueryFieldMappings(); @@ -493,67 +556,84 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { public void testPercolatorFieldMapper() throws Exception { addQueryFieldMappings(); QueryBuilder queryBuilder = termQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), + XContentType.JSON + ) + ); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value")); assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), - equalTo(EXTRACTION_COMPLETE)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), equalTo(EXTRACTION_COMPLETE)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); - doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject()), - XContentType.JSON)); + doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), + XContentType.JSON + ) + ); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), - equalTo(EXTRACTION_FAILED)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), equalTo(EXTRACTION_FAILED)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(0)); assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(1)); qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); queryBuilder = rangeQuery("date_field").from("now"); - doc = mapperService.documentMapper().parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject()), - XContentType.JSON)); + doc = mapperService.documentMapper() + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), + XContentType.JSON + ) + ); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), - equalTo(EXTRACTION_FAILED)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), equalTo(EXTRACTION_FAILED)); } public void testStoringQueries() throws Exception { addQueryFieldMappings(); - QueryBuilder[] queries = new QueryBuilder[]{ - termQuery("field", "value"), matchAllQuery(), matchQuery("field", "value"), matchPhraseQuery("field", "value"), - prefixQuery("field", "v"), wildcardQuery("field", "v*"), rangeQuery("number_field2").gte(0).lte(9), - rangeQuery("date_field").from("2015-01-01T00:00").to("2015-01-01T00:00") - }; + QueryBuilder[] queries = new QueryBuilder[] { + termQuery("field", "value"), + matchAllQuery(), + matchQuery("field", "value"), + matchPhraseQuery("field", "value"), + prefixQuery("field", "v"), + wildcardQuery("field", "v*"), + rangeQuery("number_field2").gte(0).lte(9), + rangeQuery("date_field").from("2015-01-01T00:00").to("2015-01-01T00:00") }; // note: it important that range queries never rewrite, otherwise it will cause results to be wrong. // (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex) for (QueryBuilder query : queries) { - ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field(fieldName, query) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, query).endObject()), + XContentType.JSON + ) + ); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, query); } @@ -563,56 +643,73 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { addQueryFieldMappings(); client().prepareIndex("remote", "doc", "1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "1", "field")); - ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), + XContentType.JSON + ) + ); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); QueryShardContext shardContext = indexService.newQueryShardContext( - randomInt(20), null, () -> { - throw new UnsupportedOperationException(); - }, null); + randomInt(20), + null, + () -> { throw new UnsupportedOperationException(); }, + null + ); PlainActionFuture future = new PlainActionFuture<>(); Rewriteable.rewriteAndFetch(queryBuilder, shardContext, future); assertQueryBuilder(qbSource, future.get()); } - public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryFieldMappings(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, termQuery("unmapped_field", "value")) - .endObject()), - XContentType.JSON)); + mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes( + XContentFactory.jsonBuilder().startObject().field(fieldName, termQuery("unmapped_field", "value")).endObject() + ), + XContentType.JSON + ) + ); }); assertThat(exception.getCause(), instanceOf(QueryShardException.class)); assertThat(exception.getCause().getMessage(), equalTo("No field mapping can be found for the field with name [unmapped_field]")); } - public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryFieldMappings(); - ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference - .bytes(XContentFactory - .jsonBuilder() - .startObject() - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), + XContentType.JSON + ) + ); assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { - mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory - .jsonBuilder() - .startObject() - .nullField(fieldName) - .endObject()), - XContentType.JSON)); + mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject()), + XContentType.JSON + ) + ); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object")); } @@ -623,32 +720,59 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { IndexService indexService = createIndex("test1", Settings.EMPTY); MapperService mapperService = indexService.mapperService(); - String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") - .startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject() - .endObject().endObject()); - MapperParsingException e = expectThrows(MapperParsingException.class, () -> - mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE)); + String percolatorMapper = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("doc") + .startObject("properties") + .startObject(fieldName) + .field("type", "percolator") + .field("index", "no") + .endObject() + .endObject() + .endObject() + .endObject() + ); + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE) + ); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } // multiple percolator fields are allowed in the mapping, but only one field can be used at index time. public void testMultiplePercolatorFields() throws Exception { String typeName = "doc"; - String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName) + String percolatorMapper = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject(typeName) .startObject("properties") - .startObject("query_field1").field("type", "percolator").endObject() - .startObject("query_field2").field("type", "percolator").endObject() + .startObject("query_field1") + .field("type", "percolator") .endObject() - .endObject().endObject()); + .startObject("query_field2") + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + ); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", - BytesReference.bytes(jsonBuilder().startObject() - .field("query_field1", queryBuilder) - .field("query_field2", queryBuilder) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper(typeName) + .parse( + new SourceToParse( + "test", + typeName, + "1", + BytesReference.bytes( + jsonBuilder().startObject().field("query_field1", queryBuilder).field("query_field2", queryBuilder).endObject() + ), + XContentType.JSON + ) + ); assertThat(doc.rootDoc().getFields().size(), equalTo(16)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); @@ -660,24 +784,38 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { // percolator field can be nested under an object field, but only one query can be specified per document public void testNestedPercolatorField() throws Exception { String typeName = "doc"; - String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName) + String percolatorMapper = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject(typeName) .startObject("properties") .startObject("object_field") - .field("type", "object") - .startObject("properties") - .startObject("query_field").field("type", "percolator").endObject() - .endObject() + .field("type", "object") + .startObject("properties") + .startObject("query_field") + .field("type", "percolator") .endObject() .endObject() - .endObject().endObject()); + .endObject() + .endObject() + .endObject() + .endObject() + ); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", - BytesReference.bytes(jsonBuilder().startObject().startObject("object_field") - .field("query_field", queryBuilder) - .endObject().endObject()), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper(typeName) + .parse( + new SourceToParse( + "test", + typeName, + "1", + BytesReference.bytes( + jsonBuilder().startObject().startObject("object_field").field("query_field", queryBuilder).endObject().endObject() + ), + XContentType.JSON + ) + ); assertThat(doc.rootDoc().getFields().size(), equalTo(12)); // also includes all other meta fields IndexableField queryBuilderField = doc.rootDoc().getField("object_field.query_field.query_builder_field"); assertTrue(queryBuilderField.fieldType().omitNorms()); @@ -686,28 +824,51 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { BytesRef queryBuilderAsBytes = queryBuilderField.binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); - doc = mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", - BytesReference.bytes(jsonBuilder().startObject() + doc = mapperService.documentMapper(typeName) + .parse( + new SourceToParse( + "test", + typeName, + "1", + BytesReference.bytes( + jsonBuilder().startObject() .startArray("object_field") - .startObject().field("query_field", queryBuilder).endObject() + .startObject() + .field("query_field", queryBuilder) + .endObject() .endArray() - .endObject()), - XContentType.JSON)); + .endObject() + ), + XContentType.JSON + ) + ); assertThat(doc.rootDoc().getFields().size(), equalTo(12)); // also includes all other meta fields queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", - BytesReference.bytes(jsonBuilder().startObject() - .startArray("object_field") - .startObject().field("query_field", queryBuilder).endObject() - .startObject().field("query_field", queryBuilder).endObject() - .endArray() - .endObject()), - XContentType.JSON)); - } - ); + mapperService.documentMapper(typeName) + .parse( + new SourceToParse( + "test", + typeName, + "1", + BytesReference.bytes( + jsonBuilder().startObject() + .startArray("object_field") + .startObject() + .field("query_field", queryBuilder) + .endObject() + .startObject() + .field("query_field", queryBuilder) + .endObject() + .endArray() + .endObject() + ), + XContentType.JSON + ) + ); + }); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query")); } @@ -719,19 +880,20 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { PercolatorFieldMapper.verifyQuery(rangeQuery2); HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); - expectThrows(IllegalArgumentException.class, () -> - PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); - expectThrows(IllegalArgumentException.class, () -> - PercolatorFieldMapper.verifyQuery(new DisMaxQueryBuilder().add(hasChildQuery))); + expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); + expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new DisMaxQueryBuilder().add(hasChildQuery))); PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1))); - expectThrows(IllegalArgumentException.class, () -> - PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder(hasChildQuery))); + expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder(hasChildQuery))); PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery1, new MatchAllQueryBuilder())); - expectThrows(IllegalArgumentException.class, () -> - PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(hasChildQuery, new MatchAllQueryBuilder()))); + expectThrows( + IllegalArgumentException.class, + () -> PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(hasChildQuery, new MatchAllQueryBuilder())) + ); PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery1, new RandomScoreFunctionBuilder())); - expectThrows(IllegalArgumentException.class, () -> - PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(hasChildQuery, new RandomScoreFunctionBuilder()))); + expectThrows( + IllegalArgumentException.class, + () -> PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(hasChildQuery, new RandomScoreFunctionBuilder())) + ); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); @@ -757,12 +919,22 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { } public void testEmptyName() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("").field("type", "percolator").endObject().endObject() - .endObject().endObject()); + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("") + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + ); DocumentMapperParser parser = mapperService.documentMapperParser(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, () -> parser.parse("type1", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); @@ -783,11 +955,21 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { query.endObject(); query.endObject(); - ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) - .endObject()), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .endObject() + ), + XContentType.JSON + ) + ); BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), writableRegistry())) { @@ -821,19 +1003,29 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { query.endObject(); query.endObject(); - doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) - .endObject()), - XContentType.JSON)); + doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .endObject() + ), + XContentType.JSON + ) + ); querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), writableRegistry())) { input.readVInt(); input.readVInt(); FunctionScoreQueryBuilder queryBuilder = (FunctionScoreQueryBuilder) input.readNamedWriteable(QueryBuilder.class); - ScriptScoreFunctionBuilder function = (ScriptScoreFunctionBuilder) - queryBuilder.filterFunctionBuilders()[0].getScoreFunction(); + ScriptScoreFunctionBuilder function = (ScriptScoreFunctionBuilder) queryBuilder.filterFunctionBuilders()[0] + .getScoreFunction(); assertEquals(Script.DEFAULT_SCRIPT_LANG, function.getScript().getLang()); } } @@ -904,19 +1096,23 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { public void testDuplicatedClauses() throws Exception { addQueryFieldMappings(); - QueryBuilder qb = boolQuery() - .must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) - .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); - ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field(fieldName, qb) - .endObject()), - XContentType.JSON)); + QueryBuilder qb = boolQuery().must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) + .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); + ParsedDocument doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), + XContentType.JSON + ) + ); List values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) - .map(f -> f.binaryValue().utf8ToString()) - .sorted() - .collect(Collectors.toList()); + .map(f -> f.binaryValue().utf8ToString()) + .sorted() + .collect(Collectors.toList()); assertThat(values.size(), equalTo(3)); assertThat(values.get(0), equalTo("field\0value1")); assertThat(values.get(1), equalTo("field\0value2")); @@ -924,21 +1120,25 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { int msm = doc.rootDoc().getFields(fieldType.minimumShouldMatchField.name())[0].numericValue().intValue(); assertThat(msm, equalTo(3)); - qb = boolQuery() - .must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) - .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))) - .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) - .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field(fieldName, qb) - .endObject()), - XContentType.JSON)); + qb = boolQuery().must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) + .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))) + .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) + .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); + doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), + XContentType.JSON + ) + ); values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) - .map(f -> f.binaryValue().utf8ToString()) - .sorted() - .collect(Collectors.toList()); + .map(f -> f.binaryValue().utf8ToString()) + .sorted() + .collect(Collectors.toList()); assertThat(values.size(), equalTo(5)); assertThat(values.get(0), equalTo("field\0value1")); assertThat(values.get(1), equalTo("field\0value2")); @@ -948,22 +1148,26 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase { msm = doc.rootDoc().getFields(fieldType.minimumShouldMatchField.name())[0].numericValue().intValue(); assertThat(msm, equalTo(4)); - qb = boolQuery() - .minimumShouldMatch(3) - .should(boolQuery().should(termQuery("field", "value1")).should(termQuery("field", "value2"))) - .should(boolQuery().should(termQuery("field", "value2")).should(termQuery("field", "value3"))) - .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) - .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject() - .field(fieldName, qb) - .endObject()), - XContentType.JSON)); + qb = boolQuery().minimumShouldMatch(3) + .should(boolQuery().should(termQuery("field", "value1")).should(termQuery("field", "value2"))) + .should(boolQuery().should(termQuery("field", "value2")).should(termQuery("field", "value3"))) + .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) + .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); + doc = mapperService.documentMapper("doc") + .parse( + new SourceToParse( + "test", + "doc", + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), + XContentType.JSON + ) + ); values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) - .map(f -> f.binaryValue().utf8ToString()) - .sorted() - .collect(Collectors.toList()); + .map(f -> f.binaryValue().utf8ToString()) + .sorted() + .collect(Collectors.toList()); assertThat(values.size(), equalTo(5)); assertThat(values.get(0), equalTo("field\0value1")); assertThat(values.get(1), equalTo("field\0value2")); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhaseTests.java index 1f3d111f5ab..83ca9037658 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -58,8 +58,15 @@ import static org.mockito.Mockito.mock; public class PercolatorHighlightSubFetchPhaseTests extends OpenSearchTestCase { public void testHitsExecutionNeeded() { - PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), - new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); + PercolateQuery percolateQuery = new PercolateQuery( + "_name", + ctx -> null, + Collections.singletonList(new BytesArray("{}")), + new MatchAllDocsQuery(), + Mockito.mock(IndexSearcher.class), + null, + new MatchAllDocsQuery() + ); PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap()); FetchContext fetchContext = mock(FetchContext.class); Mockito.when(fetchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList())); @@ -71,8 +78,15 @@ public class PercolatorHighlightSubFetchPhaseTests extends OpenSearchTestCase { } public void testLocatePercolatorQuery() { - PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), - new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); + PercolateQuery percolateQuery = new PercolateQuery( + "_name", + ctx -> null, + Collections.singletonList(new BytesArray("{}")), + new MatchAllDocsQuery(), + Mockito.mock(IndexSearcher.class), + null, + new MatchAllDocsQuery() + ); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(new MatchAllDocsQuery()).size(), equalTo(0)); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER); @@ -105,16 +119,25 @@ public class PercolatorHighlightSubFetchPhaseTests extends OpenSearchTestCase { assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery).size(), equalTo(1)); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery).get(0), sameInstance(percolateQuery)); - PercolateQuery percolateQuery2 = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), - new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); + PercolateQuery percolateQuery2 = new PercolateQuery( + "_name", + ctx -> null, + Collections.singletonList(new BytesArray("{}")), + new MatchAllDocsQuery(), + Mockito.mock(IndexSearcher.class), + null, + new MatchAllDocsQuery() + ); bq = new BooleanQuery.Builder(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).size(), equalTo(0)); bq.add(percolateQuery, BooleanClause.Occur.FILTER); bq.add(percolateQuery2, BooleanClause.Occur.FILTER); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).size(), equalTo(2)); - assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), - containsInAnyOrder(sameInstance(percolateQuery), sameInstance(percolateQuery2))); + assertThat( + PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), + containsInAnyOrder(sameInstance(percolateQuery), sameInstance(percolateQuery2)) + ); assertNotNull(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(null)); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(null).size(), equalTo(0)); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index 4ef7092408b..7eb9f95aa89 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -77,17 +77,20 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends OpenSearchTestCase LeafReaderContext context = reader.leaves().get(0); // A match: { - HitContext hit = new HitContext( - new SearchHit(0), - context, - 0, - new SourceLookup()); + HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup()); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); memoryIndex.addField(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 0), null); - PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), - new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); + PercolateQuery percolateQuery = new PercolateQuery( + "_name", + queryStore, + Collections.emptyList(), + new MatchAllDocsQuery(), + memoryIndex.createSearcher(), + null, + new MatchNoDocsQuery() + ); FetchContext sc = mock(FetchContext.class); when(sc.query()).thenReturn(percolateQuery); @@ -102,17 +105,20 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends OpenSearchTestCase // No match: { - HitContext hit = new HitContext( - new SearchHit(0), - context, - 0, - new SourceLookup()); + HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup()); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); memoryIndex.addField(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 0), null); - PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), - new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); + PercolateQuery percolateQuery = new PercolateQuery( + "_name", + queryStore, + Collections.emptyList(), + new MatchAllDocsQuery(), + memoryIndex.createSearcher(), + null, + new MatchNoDocsQuery() + ); FetchContext sc = mock(FetchContext.class); when(sc.query()).thenReturn(percolateQuery); @@ -126,17 +132,20 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends OpenSearchTestCase // No query: { - HitContext hit = new HitContext( - new SearchHit(0), - context, - 0, - new SourceLookup()); + HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup()); PercolateQuery.QueryStore queryStore = ctx -> docId -> null; MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); memoryIndex.addField(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 0), null); - PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), - new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); + PercolateQuery percolateQuery = new PercolateQuery( + "_name", + queryStore, + Collections.emptyList(), + new MatchAllDocsQuery(), + memoryIndex.createSearcher(), + null, + new MatchNoDocsQuery() + ); FetchContext sc = mock(FetchContext.class); when(sc.query()).thenReturn(percolateQuery); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java index 7f85c7dd796..fa7727d46ce 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java @@ -98,13 +98,25 @@ public class PercolatorQuerySearchTests extends OpenSearchSingleNodeTestCase { public void testPercolateScriptQuery() throws IOException { client().admin().indices().prepareCreate("index").addMapping("type", "query", "type=percolator").get(); client().prepareIndex("index", "type", "1") - .setSource(jsonBuilder().startObject().field("query", QueryBuilders.scriptQuery( - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "1==1", Collections.emptyMap()))).endObject()) + .setSource( + jsonBuilder().startObject() + .field( + "query", + QueryBuilders.scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "1==1", Collections.emptyMap())) + ) + .endObject() + ) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .execute().actionGet(); + .execute() + .actionGet(); SearchResponse response = client().prepareSearch("index") - .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), - XContentType.JSON)) + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), + XContentType.JSON + ) + ) .get(); assertHitCount(response, 1); assertSearchHits(response, "1"); @@ -112,33 +124,71 @@ public class PercolatorQuerySearchTests extends OpenSearchSingleNodeTestCase { public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); - mapping.startObject().startObject("properties").startObject("companyname").field("type", "text").endObject() - .startObject("query").field("type", "percolator").endObject() - .startObject("employee").field("type", "nested").startObject("properties") - .startObject("name").field("type", "text").endObject().endObject().endObject().endObject() + mapping.startObject() + .startObject("properties") + .startObject("companyname") + .field("type", "text") + .endObject() + .startObject("query") + .field("type", "percolator") + .endObject() + .startObject("employee") + .field("type", "nested") + .startObject("properties") + .startObject("name") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() .endObject(); - createIndex("test", client().admin().indices().prepareCreate("test") - // to avoid normal document from being cached by BitsetFilterCache - .setSettings(Settings.builder().put(BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), false)) - .addMapping("employee", mapping) + createIndex( + "test", + client().admin() + .indices() + .prepareCreate("test") + // to avoid normal document from being cached by BitsetFilterCache + .setSettings(Settings.builder().put(BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), false)) + .addMapping("employee", mapping) ); - client().prepareIndex("test", "employee", "q1").setSource(jsonBuilder().startObject() - .field("query", QueryBuilders.nestedQuery("employee", - matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg) - ).endObject()) + client().prepareIndex("test", "employee", "q1") + .setSource( + jsonBuilder().startObject() + .field( + "query", + QueryBuilders.nestedQuery( + "employee", + matchQuery("employee.name", "virginia potts").operator(Operator.AND), + ScoreMode.Avg + ) + ) + .endObject() + ) .get(); client().admin().indices().prepareRefresh().get(); for (int i = 0; i < 32; i++) { SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject().field("companyname", "stark") - .startArray("employee") - .startObject().field("name", "virginia potts").endObject() - .startObject().field("name", "tony stark").endObject() - .endArray() - .endObject()), XContentType.JSON)) + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "stark") + .startArray("employee") + .startObject() + .field("name", "virginia potts") + .endObject() + .startObject() + .field("name", "tony stark") + .endObject() + .endArray() + .endObject() + ), + XContentType.JSON + ) + ) .addSort("_doc", SortOrder.ASC) // size 0, because other wise load bitsets for normal document in FetchPhase#findRootDocumentIfNested(...) .setSize(0) @@ -149,8 +199,7 @@ public class PercolatorQuerySearchTests extends OpenSearchSingleNodeTestCase { // We can't check via api... because BitsetCacheListener requires that it can extract shardId from index reader // and for percolator it can't do that, but that means we don't keep track of // memory for BitsetCache in case of percolator - long bitsetSize = client().admin().cluster().prepareClusterStats().get() - .getIndicesStats().getSegments().getBitsetMemoryInBytes(); + long bitsetSize = client().admin().cluster().prepareClusterStats().get().getIndicesStats().getSegments().getBitsetMemoryInBytes(); assertEquals("The percolator works with in-memory index and therefor shouldn't use bitset cache", 0L, bitsetSize); } @@ -187,14 +236,15 @@ public class PercolatorQuerySearchTests extends OpenSearchSingleNodeTestCase { mapping.endObject(); } mapping.endObject(); - createIndex("test", client().admin().indices().prepareCreate("test") - .addMapping("employee", mapping) - ); + createIndex("test", client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "use_fielddata_please", Collections.emptyMap()); - client().prepareIndex("test", "employee", "q1").setSource(jsonBuilder().startObject() - .field("query", QueryBuilders.nestedQuery("employees", - QueryBuilders.scriptQuery(script), ScoreMode.Avg) - ).endObject()).get(); + client().prepareIndex("test", "employee", "q1") + .setSource( + jsonBuilder().startObject() + .field("query", QueryBuilders.nestedQuery("employees", QueryBuilders.scriptQuery(script), ScoreMode.Avg)) + .endObject() + ) + .get(); client().admin().indices().prepareRefresh().get(); XContentBuilder doc = jsonBuilder(); doc.startObject(); @@ -222,70 +272,81 @@ public class PercolatorQuerySearchTests extends OpenSearchSingleNodeTestCase { assertHitCount(response, 1); } - long fieldDataSize = client().admin().cluster().prepareClusterStats().get() - .getIndicesStats().getFieldData().getMemorySizeInBytes(); + long fieldDataSize = client().admin().cluster().prepareClusterStats().get().getIndicesStats().getFieldData().getMemorySizeInBytes(); assertEquals("The percolator works with in-memory index and therefor shouldn't use field-data cache", 0L, fieldDataSize); } public void testMapUnmappedFieldAsText() throws IOException { - Settings.Builder settings = Settings.builder() - .put("index.percolator.map_unmapped_fields_as_text", true); + Settings.Builder settings = Settings.builder().put("index.percolator.map_unmapped_fields_as_text", true); createIndex("test", settings.build(), "query", "query", "type=percolator"); client().prepareIndex("test", "query", "1") - .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()).get(); + .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()) + .get(); client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), - XContentType.JSON)) + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + XContentType.JSON + ) + ) .get(); assertHitCount(response, 1); assertSearchHits(response, "1"); } public void testRangeQueriesWithNow() throws Exception { - IndexService indexService = createIndex("test", Settings.builder().put("index.number_of_shards", 1).build(), "_doc", - "field1", "type=keyword", "field2", "type=date", "query", "type=percolator"); + IndexService indexService = createIndex( + "test", + Settings.builder().put("index.number_of_shards", 1).build(), + "_doc", + "field1", + "type=keyword", + "field2", + "type=date", + "query", + "type=percolator" + ); client().prepareIndex("test", "_doc", "1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from("now-1h").to("now+1h")).endObject()) .get(); client().prepareIndex("test", "_doc", "2") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .filter(termQuery("field1", "value")) - .filter(rangeQuery("field2").from("now-1h").to("now+1h")) - ).endObject()) + .setSource( + jsonBuilder().startObject() + .field( + "query", + boolQuery().filter(termQuery("field1", "value")).filter(rangeQuery("field2").from("now-1h").to("now+1h")) + ) + .endObject() + ) .get(); - Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "1==1", Collections.emptyMap()); client().prepareIndex("test", "_doc", "3") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .filter(scriptQuery(script)) - .filter(rangeQuery("field2").from("now-1h").to("now+1h")) - ).endObject()) + .setSource( + jsonBuilder().startObject() + .field("query", boolQuery().filter(scriptQuery(script)).filter(rangeQuery("field2").from("now-1h").to("now+1h"))) + .endObject() + ) .get(); client().admin().indices().prepareRefresh().get(); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { - long[] currentTime = new long[] {System.currentTimeMillis()}; - QueryShardContext queryShardContext = - indexService.newQueryShardContext(0, searcher, () -> currentTime[0], null); + long[] currentTime = new long[] { System.currentTimeMillis() }; + QueryShardContext queryShardContext = indexService.newQueryShardContext(0, searcher, () -> currentTime[0], null); - BytesReference source = BytesReference.bytes(jsonBuilder().startObject() - .field("field1", "value") - .field("field2", currentTime[0]) - .endObject()); + BytesReference source = BytesReference.bytes( + jsonBuilder().startObject().field("field1", "value").field("field2", currentTime[0]).endObject() + ); QueryBuilder queryBuilder = new PercolateQueryBuilder("query", source, XContentType.JSON); Query query = queryBuilder.toQuery(queryShardContext); assertThat(searcher.count(query), equalTo(3)); currentTime[0] = currentTime[0] + 10800000; // + 3 hours - source = BytesReference.bytes(jsonBuilder().startObject() - .field("field1", "value") - .field("field2", currentTime[0]) - .endObject()); + source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", currentTime[0]).endObject()); queryBuilder = new PercolateQueryBuilder("query", source, XContentType.JSON); query = queryBuilder.toQuery(queryShardContext); assertThat(searcher.count(query), equalTo(3)); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java index ecb44e90674..11f04377752 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java @@ -139,11 +139,10 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { } public void testExtractQueryMetadata_multiPhraseQuery() { - MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() - .add(new Term("_field", "_term1")) - .add(new Term[] {new Term("_field", "_term2"), new Term("_field", "_term3")}) - .add(new Term[] {new Term("_field", "_term4"), new Term("_field", "_term5")}) - .add(new Term[] {new Term("_field", "_term6")}) + MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder().add(new Term("_field", "_term1")) + .add(new Term[] { new Term("_field", "_term2"), new Term("_field", "_term3") }) + .add(new Term[] { new Term("_field", "_term4"), new Term("_field", "_term5") }) + .add(new Term[] { new Term("_field", "_term6") }) .build(); Result result = analyze(multiPhraseQuery, Version.CURRENT); assertThat(result.verified, is(false)); @@ -166,11 +165,10 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { } public void testExtractQueryMetadata_multiPhraseQuery_pre6dot1() { - MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() - .add(new Term("_field", "_long_term")) - .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_term")}) - .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_very_long_term")}) - .add(new Term[] {new Term("_field", "_very_long_term")}) + MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder().add(new Term("_field", "_long_term")) + .add(new Term[] { new Term("_field", "_long_term"), new Term("_field", "_term") }) + .add(new Term[] { new Term("_field", "_long_term"), new Term("_field", "_very_long_term") }) + .add(new Term[] { new Term("_field", "_very_long_term") }) .build(); Result result = analyze(multiPhraseQuery, LegacyESVersion.V_6_0_0); assertThat(result.verified, is(false)); @@ -182,9 +180,8 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { } public void testExtractQueryMetadata_multiPhraseQuery_dups() { - MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() - .add(new Term("_field", "_term1")) - .add(new Term[] {new Term("_field", "_term1"), new Term("_field", "_term2")}) + MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder().add(new Term("_field", "_term1")) + .add(new Term[] { new Term("_field", "_term1"), new Term("_field", "_term2") }) .build(); Result result = analyze(multiPhraseQuery, Version.CURRENT); @@ -194,7 +191,6 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertEquals(1, result.minimumShouldMatch); // because of the dup term } - public void testExtractQueryMetadata_booleanQuery() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "term0")); @@ -276,13 +272,10 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertThat(result.minimumShouldMatch, equalTo(2)); assertTermsEqual(result.extractions, term1, term2, term3); - builder = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(termQuery1, Occur.SHOULD) - .add(termQuery2, Occur.SHOULD) - .build(), Occur.SHOULD) - .add(termQuery3, Occur.SHOULD) - .setMinimumNumberShouldMatch(2); + builder = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(termQuery1, Occur.SHOULD).add(termQuery2, Occur.SHOULD).build(), + Occur.SHOULD + ).add(termQuery3, Occur.SHOULD).setMinimumNumberShouldMatch(2); booleanQuery = builder.build(); result = analyze(booleanQuery, Version.CURRENT); assertThat(result.verified, is(false)); @@ -291,15 +284,10 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { Term term4 = new Term("_field", "_term4"); TermQuery termQuery4 = new TermQuery(term4); - builder = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(termQuery1, Occur.MUST) - .add(termQuery2, Occur.FILTER) - .build(), Occur.SHOULD) - .add(new BooleanQuery.Builder() - .add(termQuery3, Occur.MUST) - .add(termQuery4, Occur.FILTER) - .build(), Occur.SHOULD); + builder = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(termQuery1, Occur.MUST).add(termQuery2, Occur.FILTER).build(), + Occur.SHOULD + ).add(new BooleanQuery.Builder().add(termQuery3, Occur.MUST).add(termQuery4, Occur.FILTER).build(), Occur.SHOULD); booleanQuery = builder.build(); result = analyze(booleanQuery, Version.CURRENT); assertThat(result.verified, is(false)); @@ -329,13 +317,10 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertThat(result.minimumShouldMatch, equalTo(5)); assertTermsEqual(result.extractions, term1, term2, term3, term4, term5); - builder = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(termQuery1, Occur.SHOULD) - .add(termQuery2, Occur.SHOULD) - .build(), Occur.SHOULD) - .add(new BooleanQuery.Builder().setMinimumNumberShouldMatch(1).build(), Occur.SHOULD) - .setMinimumNumberShouldMatch(2); + builder = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(termQuery1, Occur.SHOULD).add(termQuery2, Occur.SHOULD).build(), + Occur.SHOULD + ).add(new BooleanQuery.Builder().setMinimumNumberShouldMatch(1).build(), Occur.SHOULD).setMinimumNumberShouldMatch(2); booleanQuery = builder.build(); result = analyze(booleanQuery, Version.CURRENT); // ideally it would return no extractions, but the fact @@ -497,42 +482,34 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertThat("Prohibited clause, so candidate matches are not verified", result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(1)); - builder = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(termQuery1, Occur.FILTER) - .add(termQuery2, Occur.FILTER) - .build(), Occur.SHOULD) - .add(termQuery3, Occur.SHOULD); + builder = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(termQuery1, Occur.FILTER).add(termQuery2, Occur.FILTER).build(), + Occur.SHOULD + ).add(termQuery3, Occur.SHOULD); result = analyze(builder.build(), Version.CURRENT); assertThat("Inner clause that is not a pure disjunction, so candidate matches are not verified", result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(1)); - builder = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(termQuery1, Occur.SHOULD) - .add(termQuery2, Occur.SHOULD) - .build(), Occur.SHOULD) - .add(termQuery3, Occur.SHOULD); + builder = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(termQuery1, Occur.SHOULD).add(termQuery2, Occur.SHOULD).build(), + Occur.SHOULD + ).add(termQuery3, Occur.SHOULD); result = analyze(builder.build(), Version.CURRENT); assertThat("Inner clause that is a pure disjunction, so candidate matches are verified", result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); - builder = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(termQuery1, Occur.SHOULD) - .add(termQuery2, Occur.SHOULD) - .build(), Occur.MUST) - .add(termQuery3, Occur.FILTER); + builder = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(termQuery1, Occur.SHOULD).add(termQuery2, Occur.SHOULD).build(), + Occur.MUST + ).add(termQuery3, Occur.FILTER); result = analyze(builder.build(), Version.CURRENT); assertThat("Disjunctions of conjunctions can't be verified", result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(2)); - builder = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(termQuery1, Occur.MUST) - .add(termQuery2, Occur.FILTER) - .build(), Occur.SHOULD) - .add(termQuery3, Occur.SHOULD); + builder = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(termQuery1, Occur.MUST).add(termQuery2, Occur.FILTER).build(), + Occur.SHOULD + ).add(termQuery3, Occur.SHOULD); result = analyze(builder.build(), Version.CURRENT); assertThat("Conjunctions of disjunctions can't be verified", result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(1)); @@ -629,7 +606,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { } public void testExtractQueryMetadata_blendedTermQuery() { - Term[] termsArr = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")}; + Term[] termsArr = new Term[] { new Term("_field", "_term1"), new Term("_field", "_term2") }; BlendedTermQuery commonTermsQuery = BlendedTermQuery.dismaxBlendedQuery(termsArr, 1.0f); Result result = analyze(commonTermsQuery, Version.CURRENT); assertThat(result.verified, is(true)); @@ -665,8 +642,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { public void testExtractQueryMetadata_spanNearQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); - SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) - .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); + SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true).addClause(spanTermQuery1).addClause(spanTermQuery2).build(); Result result = analyze(spanNearQuery, Version.CURRENT); assertThat(result.verified, is(false)); @@ -677,8 +653,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { public void testExtractQueryMetadata_spanNearQuery_pre6dot1() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); - SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) - .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); + SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true).addClause(spanTermQuery1).addClause(spanTermQuery2).build(); Result result = analyze(spanNearQuery, LegacyESVersion.V_6_0_0); assertThat(result.verified, is(false)); @@ -892,7 +867,8 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3")); TermQuery termQuery4 = new TermQuery(new Term("_field", "_term4")); DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery( - Arrays.asList(termQuery1, termQuery2, termQuery3, termQuery4), 0.1f + Arrays.asList(termQuery1, termQuery2, termQuery3, termQuery4), + 0.1f ); Result result = analyze(disjunctionMaxQuery, Version.CURRENT); @@ -911,7 +887,8 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertThat(terms.get(3).bytes(), equalTo(termQuery4.getTerm().bytes())); disjunctionMaxQuery = new DisjunctionMaxQuery( - Arrays.asList(termQuery1, termQuery2, termQuery3, new PhraseQuery("_field", "_term4")), 0.1f + Arrays.asList(termQuery1, termQuery2, termQuery3, new PhraseQuery("_field", "_term4")), + 0.1f ); result = analyze(disjunctionMaxQuery, Version.CURRENT); @@ -952,8 +929,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("_field", "_value")); - functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(0, 0, null), - CombineFunction.MULTIPLY, 1f, 10f); + functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(0, 0, null), CombineFunction.MULTIPLY, 1f, 10f); result = analyze(functionScoreQuery, Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(1)); @@ -969,8 +945,13 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertThat(result.matchAllDocs, is(true)); assertThat(result.extractions.isEmpty(), is(true)); - FunctionScoreQuery functionScoreQuery2 = - new FunctionScoreQuery(innerQuery, new RandomScoreFunction(0, 0, null), CombineFunction.MULTIPLY, 1f, 10f); + FunctionScoreQuery functionScoreQuery2 = new FunctionScoreQuery( + innerQuery, + new RandomScoreFunction(0, 0, null), + CombineFunction.MULTIPLY, + 1f, + 10f + ); result = analyze(functionScoreQuery2, Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(0)); @@ -987,94 +968,97 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertSame(queryTerms2, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{1, 2, 3}); + queryTerms1 = terms(new int[] { 1, 2, 3 }); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{2, 3, 4}); + queryTerms2 = terms(new int[] { 2, 3, 4 }); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame(queryTerms1, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{4, 5, 6}); + queryTerms1 = terms(new int[] { 4, 5, 6 }); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1, 2, 3}); + queryTerms2 = terms(new int[] { 1, 2, 3 }); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame(queryTerms2, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); + queryTerms1 = terms(new int[] { 1, 2, 3 }, "123", "456"); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{2, 3, 4}, "123", "456"); + queryTerms2 = terms(new int[] { 2, 3, 4 }, "123", "456"); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame(queryTerms1, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{10}); + queryTerms1 = terms(new int[] { 10 }); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1}); + queryTerms2 = terms(new int[] { 1 }); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame(queryTerms2, result.extractions); - queryTerms1 = terms(new int[]{10}, "123"); + queryTerms1 = terms(new int[] { 10 }, "123"); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1}); + queryTerms2 = terms(new int[] { 1 }); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame(queryTerms1, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{10}, "1", "123"); + queryTerms1 = terms(new int[] { 10 }, "1", "123"); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1}, "1", "2"); + queryTerms2 = terms(new int[] { 1 }, "1", "2"); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame(queryTerms1, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); + queryTerms1 = terms(new int[] { 1, 2, 3 }, "123", "456"); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{2, 3, 4}, "1", "456"); + queryTerms2 = terms(new int[] { 2, 3, 4 }, "1", "456"); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); - assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", - queryTerms1, result.extractions); + assertSame( + "Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", + queryTerms1, + result.extractions + ); assertFalse(result.verified); - queryTerms1 = terms(new int[]{}); + queryTerms1 = terms(new int[] {}); result1 = new Result(false, queryTerms1, 0); - queryTerms2 = terms(new int[]{}); + queryTerms2 = terms(new int[] {}); result2 = new Result(false, queryTerms2, 0); result = selectBestResult(result1, result2); assertSame("In case query extractions are empty", queryTerms2, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{1}); + queryTerms1 = terms(new int[] { 1 }); result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{}); + queryTerms2 = terms(new int[] {}); result2 = new Result(false, queryTerms2, 0); result = selectBestResult(result1, result2); assertSame("In case query a single extraction is empty", queryTerms1, result.extractions); assertFalse(result.verified); - queryTerms1 = terms(new int[]{}); + queryTerms1 = terms(new int[] {}); result1 = new Result(false, queryTerms1, 0); - queryTerms2 = terms(new int[]{1}); + queryTerms2 = terms(new int[] { 1 }); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame("In case query a single extraction is empty", queryTerms2, result.extractions); assertFalse(result.verified); result1 = new Result(true, true); - queryTerms2 = terms(new int[]{1}); + queryTerms2 = terms(new int[] { 1 }); result2 = new Result(true, queryTerms2, 1); result = selectBestResult(result1, result2); assertSame("Conjunction with a match_all", result2, result); assertTrue(result.verified); - queryTerms1 = terms(new int[]{1}); + queryTerms1 = terms(new int[] { 1 }); result1 = new Result(true, queryTerms2, 1); result2 = new Result(true, true); result = selectBestResult(result1, result2); @@ -1169,8 +1153,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertDimension(ranges.get(0).range.lowerPoint, bytes -> DoublePoint.encodeDimension(10D, bytes, 0)); assertDimension(ranges.get(0).range.upperPoint, bytes -> DoublePoint.encodeDimension(20D, bytes, 0)); - query = InetAddressPoint.newRangeQuery("_field", InetAddresses.forString("192.168.1.0"), - InetAddresses.forString("192.168.1.255")); + query = InetAddressPoint.newRangeQuery("_field", InetAddresses.forString("192.168.1.0"), InetAddresses.forString("192.168.1.255")); result = analyze(query, Version.CURRENT); assertThat(result.minimumShouldMatch, equalTo(1)); assertFalse(result.verified); @@ -1187,7 +1170,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { Query query1 = LatLonPoint.newBoxQuery("_field", 0, 1, 0, 1); assertEquals(Result.UNKNOWN, analyze(query1, Version.CURRENT)); - Query query2 = LongPoint.newRangeQuery("_field", new long[]{0, 0, 0}, new long[]{1, 1, 1}); + Query query2 = LongPoint.newRangeQuery("_field", new long[] { 0, 0, 0 }, new long[] { 1, 1, 1 }); assertEquals(Result.UNKNOWN, analyze(query2, Version.CURRENT)); } @@ -1200,8 +1183,10 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { } public void testIndexOrDocValuesQuery() { - Query query = new IndexOrDocValuesQuery(IntPoint.newRangeQuery("_field", 10, 20), - SortedNumericDocValuesField.newSlowRangeQuery("_field", 10, 20)); + Query query = new IndexOrDocValuesQuery( + IntPoint.newRangeQuery("_field", 10, 20), + SortedNumericDocValuesField.newSlowRangeQuery("_field", 10, 20) + ); Result result = analyze(query, Version.CURRENT); assertFalse(result.verified); assertThat(result.minimumShouldMatch, equalTo(1)); @@ -1216,8 +1201,12 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { public void testToParentBlockJoinQuery() { TermQuery termQuery = new TermQuery(new Term("field", "value")); QueryBitSetProducer queryBitSetProducer = new QueryBitSetProducer(new TermQuery(new Term("_type", "child"))); - OpenSearchToParentBlockJoinQuery query = - new OpenSearchToParentBlockJoinQuery(termQuery, queryBitSetProducer, ScoreMode.None, "child"); + OpenSearchToParentBlockJoinQuery query = new OpenSearchToParentBlockJoinQuery( + termQuery, + queryBitSetProducer, + ScoreMode.None, + "child" + ); Result result = analyze(query, Version.CURRENT); assertFalse(result.verified); assertThat(result.minimumShouldMatch, equalTo(1)); @@ -1328,61 +1317,65 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { public void testExtractQueryMetadata_duplicatedClauses() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add( - new BooleanQuery.Builder() - .add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) - .build(), - BooleanClause.Occur.MUST + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) + .build(), + BooleanClause.Occur.MUST ); builder.add( - new BooleanQuery.Builder() - .add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) - .build(), - BooleanClause.Occur.MUST + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) + .build(), + BooleanClause.Occur.MUST ); builder.add( - new BooleanQuery.Builder() - .add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field", "value4")), BooleanClause.Occur.MUST) - .build(), - BooleanClause.Occur.MUST + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("field", "value4")), BooleanClause.Occur.MUST) + .build(), + BooleanClause.Occur.MUST ); Result result = analyze(builder.build(), Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.matchAllDocs, is(false)); assertThat(result.minimumShouldMatch, equalTo(4)); - assertTermsEqual(result.extractions, new Term("field", "value1"), new Term("field", "value2"), - new Term("field", "value3"), new Term("field", "value4")); + assertTermsEqual( + result.extractions, + new Term("field", "value1"), + new Term("field", "value2"), + new Term("field", "value3"), + new Term("field", "value4") + ); builder = new BooleanQuery.Builder().setMinimumNumberShouldMatch(2); builder.add( - new BooleanQuery.Builder() - .add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) - .build(), - BooleanClause.Occur.SHOULD + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) + .build(), + BooleanClause.Occur.SHOULD ); builder.add( - new BooleanQuery.Builder() - .add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) - .build(), - BooleanClause.Occur.SHOULD + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) + .build(), + BooleanClause.Occur.SHOULD ); builder.add( - new BooleanQuery.Builder() - .add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field", "value4")), BooleanClause.Occur.MUST) - .build(), - BooleanClause.Occur.SHOULD + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST) + .add(new TermQuery(new Term("field", "value4")), BooleanClause.Occur.MUST) + .build(), + BooleanClause.Occur.SHOULD ); result = analyze(builder.build(), Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.matchAllDocs, is(false)); assertThat(result.minimumShouldMatch, equalTo(2)); - assertTermsEqual(result.extractions, new Term("field", "value1"), new Term("field", "value2"), - new Term("field", "value3"), new Term("field", "value4")); + assertTermsEqual( + result.extractions, + new Term("field", "value1"), + new Term("field", "value2"), + new Term("field", "value3"), + new Term("field", "value4") + ); } public void testEmptyQueries() { @@ -1425,14 +1418,22 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("field", "term1"), new Term("field", "term2")); - source = Intervals.ordered(Intervals.term("term1"), Intervals.term("term2"), - Intervals.or(Intervals.term("term3"), Intervals.term("term4"))); + source = Intervals.ordered( + Intervals.term("term1"), + Intervals.term("term2"), + Intervals.or(Intervals.term("term3"), Intervals.term("term4")) + ); result = analyze(new IntervalQuery("field", source), Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.matchAllDocs, is(false)); assertThat(result.minimumShouldMatch, equalTo(3)); - assertTermsEqual(result.extractions, new Term("field", "term1"), new Term("field", "term2"), - new Term("field", "term3"), new Term("field", "term4")); + assertTermsEqual( + result.extractions, + new Term("field", "term1"), + new Term("field", "term2"), + new Term("field", "term3"), + new Term("field", "term4") + ); source = Intervals.ordered(Intervals.term("term1"), Intervals.wildcard(new BytesRef("a*"))); result = analyze(new IntervalQuery("field", source), Version.CURRENT); @@ -1523,11 +1524,8 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { public void testRangeAndTermWithNestedMSM() { - Query q1 = new BooleanQuery.Builder() - .add(new TermQuery(new Term("f", "v3")), Occur.SHOULD) - .add(new BooleanQuery.Builder() - .add(new TermQuery(new Term("f", "n1")), Occur.SHOULD) - .build(), Occur.SHOULD) + Query q1 = new BooleanQuery.Builder().add(new TermQuery(new Term("f", "v3")), Occur.SHOULD) + .add(new BooleanQuery.Builder().add(new TermQuery(new Term("f", "n1")), Occur.SHOULD).build(), Occur.SHOULD) .add(new TermQuery(new Term("f", "v4")), Occur.SHOULD) .setMinimumNumberShouldMatch(2) .build(); @@ -1538,15 +1536,12 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(r1.matchAllDocs); assertTrue(r1.verified); - Query q = new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 0, 10), Occur.FILTER) + Query q = new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 0, 10), Occur.FILTER) .add(new TermQuery(new Term("f", "v1")), Occur.MUST) .add(new TermQuery(new Term("f", "v2")), Occur.MUST) .add(IntPoint.newRangeQuery("i", 2, 20), Occur.FILTER) .add(new TermQuery(new Term("f", "v3")), Occur.SHOULD) - .add(new BooleanQuery.Builder() - .add(new TermQuery(new Term("f", "n1")), Occur.SHOULD) - .build(), Occur.SHOULD) + .add(new BooleanQuery.Builder().add(new TermQuery(new Term("f", "n1")), Occur.SHOULD).build(), Occur.SHOULD) .add(new TermQuery(new Term("f", "v4")), Occur.SHOULD) .setMinimumNumberShouldMatch(2) .build(); @@ -1560,8 +1555,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { public void testCombinedRangeAndTermWithMinimumShouldMatch() { - Query disj = new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 0, 10), Occur.SHOULD) + Query disj = new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 0, 10), Occur.SHOULD) .add(new TermQuery(new Term("f", "v1")), Occur.SHOULD) .add(new TermQuery(new Term("f", "v1")), Occur.SHOULD) .setMinimumNumberShouldMatch(2) @@ -1573,8 +1567,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(r.matchAllDocs); assertFalse(r.verified); - Query q = new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 0, 10), Occur.SHOULD) + Query q = new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 0, 10), Occur.SHOULD) .add(new TermQuery(new Term("f", "v1")), Occur.SHOULD) .add(new TermQuery(new Term("f", "v1")), Occur.SHOULD) .add(new TermQuery(new Term("f", "v1")), Occur.FILTER) @@ -1587,10 +1580,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(result.verified); assertFalse(result.matchAllDocs); - q = new BooleanQuery.Builder() - .add(q, Occur.MUST) - .add(q, Occur.MUST) - .build(); + q = new BooleanQuery.Builder().add(q, Occur.MUST).add(q, Occur.MUST).build(); result = analyze(q, Version.CURRENT); assertThat(result.minimumShouldMatch, equalTo(1)); @@ -1598,8 +1588,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(result.verified); assertFalse(result.matchAllDocs); - Query q2 = new BooleanQuery.Builder() - .add(new TermQuery(new Term("f", "v1")), Occur.FILTER) + Query q2 = new BooleanQuery.Builder().add(new TermQuery(new Term("f", "v1")), Occur.FILTER) .add(IntPoint.newRangeQuery("i", 15, 20), Occur.SHOULD) .add(new TermQuery(new Term("f", "v2")), Occur.SHOULD) .add(new TermQuery(new Term("f", "v2")), Occur.MUST) @@ -1613,8 +1602,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(result.matchAllDocs); // multiple range queries on different fields - Query q3 = new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 15, 20), Occur.SHOULD) + Query q3 = new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 15, 20), Occur.SHOULD) .add(IntPoint.newRangeQuery("i2", 15, 20), Occur.SHOULD) .add(new TermQuery(new Term("f", "v1")), Occur.SHOULD) .add(new TermQuery(new Term("f", "v2")), Occur.MUST) @@ -1627,8 +1615,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(result.matchAllDocs); // multiple disjoint range queries on the same field - Query q4 = new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 15, 20), Occur.SHOULD) + Query q4 = new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 15, 20), Occur.SHOULD) .add(IntPoint.newRangeQuery("i", 25, 30), Occur.SHOULD) .add(IntPoint.newRangeQuery("i", 35, 40), Occur.SHOULD) .add(new TermQuery(new Term("f", "v1")), Occur.SHOULD) @@ -1642,14 +1629,12 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(result.matchAllDocs); // multiple conjunction range queries on the same field - Query q5 = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 15, 20), Occur.MUST) + Query q5 = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 15, 20), Occur.MUST) .add(IntPoint.newRangeQuery("i", 25, 30), Occur.MUST) - .build(), Occur.MUST) - .add(IntPoint.newRangeQuery("i", 35, 40), Occur.MUST) - .add(new TermQuery(new Term("f", "v2")), Occur.MUST) - .build(); + .build(), + Occur.MUST + ).add(IntPoint.newRangeQuery("i", 35, 40), Occur.MUST).add(new TermQuery(new Term("f", "v2")), Occur.MUST).build(); result = analyze(q5, Version.CURRENT); assertThat(result.minimumShouldMatch, equalTo(2)); assertThat(result.extractions, hasSize(4)); @@ -1657,14 +1642,12 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(result.matchAllDocs); // multiple conjunction range queries on different fields - Query q6 = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 15, 20), Occur.MUST) + Query q6 = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 15, 20), Occur.MUST) .add(IntPoint.newRangeQuery("i2", 25, 30), Occur.MUST) - .build(), Occur.MUST) - .add(IntPoint.newRangeQuery("i", 35, 40), Occur.MUST) - .add(new TermQuery(new Term("f", "v2")), Occur.MUST) - .build(); + .build(), + Occur.MUST + ).add(IntPoint.newRangeQuery("i", 35, 40), Occur.MUST).add(new TermQuery(new Term("f", "v2")), Occur.MUST).build(); result = analyze(q6, Version.CURRENT); assertThat(result.minimumShouldMatch, equalTo(3)); assertThat(result.extractions, hasSize(4)); @@ -1672,15 +1655,18 @@ public class QueryAnalyzerTests extends OpenSearchTestCase { assertFalse(result.matchAllDocs); // mixed term and range conjunctions - Query q7 = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 1, 2), Occur.MUST) + Query q7 = new BooleanQuery.Builder().add( + new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 1, 2), Occur.MUST) .add(new TermQuery(new Term("f", "1")), Occur.MUST) - .build(), Occur.MUST) - .add(new BooleanQuery.Builder() - .add(IntPoint.newRangeQuery("i", 1, 2), Occur.MUST) - .add(new TermQuery(new Term("f", "2")), Occur.MUST) - .build(), Occur.MUST) + .build(), + Occur.MUST + ) + .add( + new BooleanQuery.Builder().add(IntPoint.newRangeQuery("i", 1, 2), Occur.MUST) + .add(new TermQuery(new Term("f", "2")), Occur.MUST) + .build(), + Occur.MUST + ) .build(); result = analyze(q7, Version.CURRENT); assertThat(result.minimumShouldMatch, equalTo(3)); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/QueryBuilderStoreTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/QueryBuilderStoreTests.java index e95bf011596..f37a7958d7d 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/QueryBuilderStoreTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/QueryBuilderStoreTests.java @@ -88,7 +88,8 @@ public class QueryBuilderStoreTests extends OpenSearchTestCase { config.setMergePolicy(NoMergePolicy.INSTANCE); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder( - new Mapper.BuilderContext(settings, new ContentPath(0))); + new Mapper.BuilderContext(settings, new ContentPath(0)) + ); Version version = LegacyESVersion.V_6_0_0_beta2; try (IndexWriter indexWriter = new IndexWriter(directory, config)) { @@ -97,8 +98,7 @@ public class QueryBuilderStoreTests extends OpenSearchTestCase { ParseContext parseContext = mock(ParseContext.class); ParseContext.Document document = new ParseContext.Document(); when(parseContext.doc()).thenReturn(document); - PercolatorFieldMapper.createQueryBuilderField(version, - fieldMapper, queryBuilders[i], parseContext); + PercolatorFieldMapper.createQueryBuilderField(version, fieldMapper, queryBuilders[i], parseContext); indexWriter.addDocument(document); } } @@ -107,8 +107,9 @@ public class QueryBuilderStoreTests extends OpenSearchTestCase { when(queryShardContext.indexVersionCreated()).thenReturn(version); when(queryShardContext.getWriteableRegistry()).thenReturn(writableRegistry()); when(queryShardContext.getXContentRegistry()).thenReturn(xContentRegistry()); - when(queryShardContext.getForField(fieldMapper.fieldType())) - .thenReturn(new BytesBinaryIndexFieldData(fieldMapper.name(), CoreValuesSourceType.BYTES)); + when(queryShardContext.getForField(fieldMapper.fieldType())).thenReturn( + new BytesBinaryIndexFieldData(fieldMapper.name(), CoreValuesSourceType.BYTES) + ); when(queryShardContext.fieldMapper(Mockito.anyString())).thenAnswer(invocation -> { final String fieldName = (String) invocation.getArguments()[0]; return new KeywordFieldMapper.KeywordFieldType(fieldName); diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java index b97bc5136cf..9c5c2c146d1 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java @@ -80,7 +80,8 @@ public class RankEvalRequestIT extends OpenSearchIntegTestCase { ensureGreen(); client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "1") - .setSource("id", 1, "text", "berlin", "title", "Berlin, Germany", "population", 3670622).get(); + .setSource("id", 1, "text", "berlin", "title", "Berlin, Germany", "population", 3670622) + .get(); client().prepareIndex(TEST_INDEX, MapperService.SINGLE_MAPPING_NAME, "2") .setSource("id", 2, "text", "amsterdam", "population", 851573) .get(); @@ -117,26 +118,22 @@ public class RankEvalRequestIT extends OpenSearchIntegTestCase { SearchSourceBuilder testQuery = new SearchSourceBuilder(); testQuery.query(new MatchAllQueryBuilder()); testQuery.sort("id"); - RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", - createRelevant("2", "3", "4", "5"), testQuery); + RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", createRelevant("2", "3", "4", "5"), testQuery); amsterdamRequest.addSummaryFields(Arrays.asList(new String[] { "text", "title" })); specifications.add(amsterdamRequest); - RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("1"), - testQuery); + RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("1"), testQuery); berlinRequest.addSummaryFields(Arrays.asList(new String[] { "text", "title" })); specifications.add(berlinRequest); PrecisionAtK metric = new PrecisionAtK(1, false, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), - RankEvalAction.INSTANCE, new RankEvalRequest()); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest()); builder.setRankEvalSpec(task); String indexToUse = randomBoolean() ? TEST_INDEX : INDEX_ALIAS; - RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request().indices(indexToUse)) - .actionGet(); + RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request().indices(indexToUse)).actionGet(); // the expected Prec@ for the first query is 4/6 and the expected Prec@ for the // second is 1/6, divided by 2 to get the average double expectedPrecision = (1.0 / 6.0 + 4.0 / 6.0) / 2.0; @@ -197,19 +194,23 @@ public class RankEvalRequestIT extends OpenSearchIntegTestCase { List specifications = new ArrayList<>(); List ratedDocs = Arrays.asList( - new RatedDocument(TEST_INDEX, "1", 3), - new RatedDocument(TEST_INDEX, "2", 2), - new RatedDocument(TEST_INDEX, "3", 3), - new RatedDocument(TEST_INDEX, "4", 0), - new RatedDocument(TEST_INDEX, "5", 1), - new RatedDocument(TEST_INDEX, "6", 2)); + new RatedDocument(TEST_INDEX, "1", 3), + new RatedDocument(TEST_INDEX, "2", 2), + new RatedDocument(TEST_INDEX, "3", 3), + new RatedDocument(TEST_INDEX, "4", 0), + new RatedDocument(TEST_INDEX, "5", 1), + new RatedDocument(TEST_INDEX, "6", 2) + ); specifications.add(new RatedRequest("amsterdam_query", ratedDocs, testQuery)); DiscountedCumulativeGain metric = new DiscountedCumulativeGain(false, null, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, - new RankEvalRequest(task, new String[] { TEST_INDEX })); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder( + client(), + RankEvalAction.INSTANCE, + new RankEvalRequest(task, new String[] { TEST_INDEX }) + ); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); assertEquals(DiscountedCumulativeGainTests.EXPECTED_DCG, response.getMetricScore(), 10E-14); @@ -236,8 +237,11 @@ public class RankEvalRequestIT extends OpenSearchIntegTestCase { MeanReciprocalRank metric = new MeanReciprocalRank(1, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, - new RankEvalRequest(task, new String[] { TEST_INDEX })); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder( + client(), + RankEvalAction.INSTANCE, + new RankEvalRequest(task, new String[] { TEST_INDEX }) + ); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); // the expected reciprocal rank for the amsterdam_query is 1/5 @@ -268,20 +272,21 @@ public class RankEvalRequestIT extends OpenSearchIntegTestCase { List specifications = new ArrayList<>(); SearchSourceBuilder amsterdamQuery = new SearchSourceBuilder(); amsterdamQuery.query(new MatchAllQueryBuilder()); - RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", - createRelevant("2", "3", "4", "5"), amsterdamQuery); + RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", createRelevant("2", "3", "4", "5"), amsterdamQuery); specifications.add(amsterdamRequest); SearchSourceBuilder brokenQuery = new SearchSourceBuilder(); brokenQuery.query(QueryBuilders.termQuery("population", "noStringOnNumericFields")); - RatedRequest brokenRequest = new RatedRequest("broken_query", createRelevant("1"), - brokenQuery); + RatedRequest brokenRequest = new RatedRequest("broken_query", createRelevant("1"), brokenQuery); specifications.add(brokenRequest); RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK()); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, - new RankEvalRequest(task, new String[] { TEST_INDEX })); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder( + client(), + RankEvalAction.INSTANCE, + new RankEvalRequest(task, new String[] { TEST_INDEX }) + ); builder.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/DiscountedCumulativeGain.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/DiscountedCumulativeGain.java index 2f70ce52350..35db325b70c 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/DiscountedCumulativeGain.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/DiscountedCumulativeGain.java @@ -130,15 +130,13 @@ public class DiscountedCumulativeGain implements EvaluationMetric { return this.unknownDocRating; } - @Override public OptionalInt forcedSearchSize() { return OptionalInt.of(k); } @Override - public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, - List ratedDocs) { + public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List ratedDocs) { List ratedHits = joinHitsWithRatings(hits, ratedDocs); List ratingsInSearchHits = new ArrayList<>(ratedHits.size()); int unratedResults = 0; @@ -159,8 +157,7 @@ public class DiscountedCumulativeGain implements EvaluationMetric { double idcg = 0; if (normalize) { - List allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed() - .collect(Collectors.toList()); + List allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed().collect(Collectors.toList()); Collections.sort(allRatings, Comparator.nullsLast(Collections.reverseOrder())); idcg = computeDCG(allRatings.subList(0, Math.min(ratingsInSearchHits.size(), allRatings.size()))); if (idcg != 0) { @@ -190,13 +187,19 @@ public class DiscountedCumulativeGain implements EvaluationMetric { private static final ParseField K_FIELD = new ParseField("k"); private static final ParseField NORMALIZE_FIELD = new ParseField("normalize"); private static final ParseField UNKNOWN_DOC_RATING_FIELD = new ParseField("unknown_doc_rating"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("dcg", false, - args -> { - Boolean normalized = (Boolean) args[0]; - Integer optK = (Integer) args[2]; - return new DiscountedCumulativeGain(normalized == null ? false : normalized, (Integer) args[1], - optK == null ? DEFAULT_K : optK); - }); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "dcg", + false, + args -> { + Boolean normalized = (Boolean) args[0]; + Integer optK = (Integer) args[2]; + return new DiscountedCumulativeGain( + normalized == null ? false : normalized, + (Integer) args[1], + optK == null ? DEFAULT_K : optK + ); + } + ); static { PARSER.declareBoolean(optionalConstructorArg(), NORMALIZE_FIELD); @@ -232,8 +235,8 @@ public class DiscountedCumulativeGain implements EvaluationMetric { } DiscountedCumulativeGain other = (DiscountedCumulativeGain) obj; return Objects.equals(normalize, other.normalize) - && Objects.equals(unknownDocRating, other.unknownDocRating) - && Objects.equals(k, other.k); + && Objects.equals(unknownDocRating, other.unknownDocRating) + && Objects.equals(k, other.k); } @Override @@ -264,8 +267,7 @@ public class DiscountedCumulativeGain implements EvaluationMetric { } @Override - public - String getMetricName() { + public String getMetricName() { return NAME; } @@ -280,9 +282,11 @@ public class DiscountedCumulativeGain implements EvaluationMetric { return builder; } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { - return new Detail((Double) args[0], (Double) args[1] != null ? (Double) args[1] : 0.0d, (Integer) args[2]); - }); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> { return new Detail((Double) args[0], (Double) args[1] != null ? (Double) args[1] : 0.0d, (Integer) args[2]); } + ); static { PARSER.declareDouble(constructorArg(), DCG_FIELD); @@ -343,9 +347,9 @@ public class DiscountedCumulativeGain implements EvaluationMetric { return false; } DiscountedCumulativeGain.Detail other = (DiscountedCumulativeGain.Detail) obj; - return Double.compare(this.dcg, other.dcg) == 0 && - Double.compare(this.idcg, other.idcg) == 0 && - this.unratedDocs == other.unratedDocs; + return Double.compare(this.dcg, other.dcg) == 0 + && Double.compare(this.idcg, other.idcg) == 0 + && this.unratedDocs == other.unratedDocs; } @Override @@ -354,4 +358,3 @@ public class DiscountedCumulativeGain implements EvaluationMetric { } } } - diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvalQueryQuality.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvalQueryQuality.java index 20570dd4acb..28f0a2f334e 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvalQueryQuality.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvalQueryQuality.java @@ -139,8 +139,11 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs"); private static final ParseField HITS_FIELD = new ParseField("hits"); private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details"); - private static final ObjectParser PARSER = new ObjectParser<>("eval_query_quality", - true, ParsedEvalQueryQuality::new); + private static final ObjectParser PARSER = new ObjectParser<>( + "eval_query_quality", + true, + ParsedEvalQueryQuality::new + ); private static class ParsedEvalQueryQuality { double evaluationResult; @@ -150,8 +153,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { static { PARSER.declareDouble((obj, value) -> obj.evaluationResult = value, METRIC_SCORE_FIELD); - PARSER.declareObject((obj, value) -> obj.optionalMetricDetails = value, (p, c) -> parseMetricDetail(p), - METRIC_DETAILS_FIELD); + PARSER.declareObject((obj, value) -> obj.optionalMetricDetails = value, (p, c) -> parseMetricDetail(p), METRIC_DETAILS_FIELD); PARSER.declareObjectArray((obj, list) -> obj.ratedHits = list, (p, c) -> RatedSearchHit.parse(p), HITS_FIELD); } @@ -176,10 +178,10 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { return false; } EvalQueryQuality other = (EvalQueryQuality) obj; - return Objects.equals(queryId, other.queryId) && - Objects.equals(metricScore, other.metricScore) && - Objects.equals(ratedHits, other.ratedHits) && - Objects.equals(optionalMetricDetails, other.optionalMetricDetails); + return Objects.equals(queryId, other.queryId) + && Objects.equals(metricScore, other.metricScore) + && Objects.equals(ratedHits, other.ratedHits) + && Objects.equals(optionalMetricDetails, other.optionalMetricDetails); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvaluationMetric.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvaluationMetric.java index 89a3fcef3d7..70a6e24d274 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvaluationMetric.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/EvaluationMetric.java @@ -71,7 +71,7 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { */ static List joinHitsWithRatings(SearchHit[] hits, List ratedDocs) { Map ratedDocumentMap = ratedDocs.stream() - .collect(Collectors.toMap(RatedDocument::getKey, item -> item)); + .collect(Collectors.toMap(RatedDocument::getKey, item -> item)); List ratedSearchHits = new ArrayList<>(hits.length); for (SearchHit hit : hits) { DocumentKey key = new DocumentKey(hit.getIndex(), hit.getId()); @@ -89,8 +89,10 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { * Filter {@link RatedSearchHit}s that do not have a rating. */ static List filterUnratedDocuments(List ratedHits) { - return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false) - .map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())).collect(Collectors.toList()); + return ratedHits.stream() + .filter(hit -> hit.getRating().isPresent() == false) + .map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())) + .collect(Collectors.toList()); } /** diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/ExpectedReciprocalRank.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/ExpectedReciprocalRank.java index c6d8188edba..3557e0576bb 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/ExpectedReciprocalRank.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/ExpectedReciprocalRank.java @@ -137,7 +137,6 @@ public class ExpectedReciprocalRank implements EvaluationMetric { return this.unknownDocRating; } - @Override public OptionalInt forcedSearchSize() { return OptionalInt.of(k); @@ -189,14 +188,15 @@ public class ExpectedReciprocalRank implements EvaluationMetric { private static final ParseField K_FIELD = new ParseField("k"); private static final ParseField UNKNOWN_DOC_RATING_FIELD = new ParseField("unknown_doc_rating"); private static final ParseField MAX_RELEVANCE_FIELD = new ParseField("maximum_relevance"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("dcg", false, - args -> { - int maxRelevance = (Integer) args[0]; - Integer optK = (Integer) args[2]; - return new ExpectedReciprocalRank(maxRelevance, (Integer) args[1], - optK == null ? DEFAULT_K : optK); - }); - + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "dcg", + false, + args -> { + int maxRelevance = (Integer) args[0]; + Integer optK = (Integer) args[2]; + return new ExpectedReciprocalRank(maxRelevance, (Integer) args[1], optK == null ? DEFAULT_K : optK); + } + ); static { PARSER.declareInt(constructorArg(), MAX_RELEVANCE_FIELD); @@ -231,9 +231,7 @@ public class ExpectedReciprocalRank implements EvaluationMetric { return false; } ExpectedReciprocalRank other = (ExpectedReciprocalRank) obj; - return this.k == other.k && - this.maxRelevance == other.maxRelevance - && Objects.equals(unknownDocRating, other.unknownDocRating); + return this.k == other.k && this.maxRelevance == other.maxRelevance && Objects.equals(unknownDocRating, other.unknownDocRating); } @Override @@ -255,8 +253,7 @@ public class ExpectedReciprocalRank implements EvaluationMetric { } @Override - public - String getMetricName() { + public String getMetricName() { return NAME; } @@ -265,9 +262,11 @@ public class ExpectedReciprocalRank implements EvaluationMetric { return builder.field(UNRATED_FIELD.getPreferredName(), this.unratedDocs); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { - return new Detail((Integer) args[0]); - }); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> { return new Detail((Integer) args[0]); } + ); static { PARSER.declareInt(constructorArg(), UNRATED_FIELD); @@ -312,4 +311,3 @@ public class ExpectedReciprocalRank implements EvaluationMetric { } } } - diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/MeanReciprocalRank.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/MeanReciprocalRank.java index e34b3139a20..59792537e89 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/MeanReciprocalRank.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/MeanReciprocalRank.java @@ -147,13 +147,17 @@ public class MeanReciprocalRank implements EvaluationMetric { private static final ParseField RELEVANT_RATING_FIELD = new ParseField("relevant_rating_threshold"); private static final ParseField K_FIELD = new ParseField("k"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("reciprocal_rank", - args -> { - Integer optionalThreshold = (Integer) args[0]; - Integer optionalK = (Integer) args[1]; - return new MeanReciprocalRank(optionalThreshold == null ? DEFAULT_RATING_THRESHOLD : optionalThreshold, - optionalK == null ? DEFAULT_K : optionalK); - }); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "reciprocal_rank", + args -> { + Integer optionalThreshold = (Integer) args[0]; + Integer optionalK = (Integer) args[1]; + return new MeanReciprocalRank( + optionalThreshold == null ? DEFAULT_RATING_THRESHOLD : optionalThreshold, + optionalK == null ? DEFAULT_K : optionalK + ); + } + ); static { PARSER.declareInt(optionalConstructorArg(), RELEVANT_RATING_FIELD); @@ -184,8 +188,7 @@ public class MeanReciprocalRank implements EvaluationMetric { return false; } MeanReciprocalRank other = (MeanReciprocalRank) obj; - return Objects.equals(relevantRatingThreshhold, other.relevantRatingThreshhold) - && Objects.equals(k, other.k); + return Objects.equals(relevantRatingThreshhold, other.relevantRatingThreshhold) && Objects.equals(k, other.k); } @Override @@ -207,20 +210,20 @@ public class MeanReciprocalRank implements EvaluationMetric { } @Override - public - String getMetricName() { + public String getMetricName() { return NAME; } @Override - public XContentBuilder innerToXContent(XContentBuilder builder, Params params) - throws IOException { + public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { return builder.field(FIRST_RELEVANT_RANK_FIELD.getPreferredName(), firstRelevantRank); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { - return new Detail((Integer) args[0]); - }); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> { return new Detail((Integer) args[0]); } + ); static { PARSER.declareInt(constructorArg(), FIRST_RELEVANT_RANK_FIELD); diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/PrecisionAtK.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/PrecisionAtK.java index f369056cb5a..a5a8dcab349 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/PrecisionAtK.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/PrecisionAtK.java @@ -120,7 +120,8 @@ public class PrecisionAtK implements EvaluationMetric { return new PrecisionAtK( relevantRatingThreshold == null ? DEFAULT_RELEVANT_RATING_THRESHOLD : relevantRatingThreshold, ignoreUnlabeled == null ? DEFAULT_IGNORE_UNLABELED : ignoreUnlabeled, - k == null ? DEFAULT_K : k); + k == null ? DEFAULT_K : k + ); }); static { @@ -201,8 +202,7 @@ public class PrecisionAtK implements EvaluationMetric { * @return precision at k for above {@link SearchResult} list. **/ @Override - public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, - List ratedDocs) { + public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List ratedDocs) { List ratedSearchHits = joinHitsWithRatings(hits, ratedDocs); @@ -268,8 +268,11 @@ public class PrecisionAtK implements EvaluationMetric { this(in.readVInt(), in.readVInt()); } - private static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(NAME, true, args -> new Detail((Integer) args[0], (Integer) args[1])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new Detail((Integer) args[0], (Integer) args[1]) + ); static { PARSER.declareInt(constructorArg(), RELEVANT_DOCS_RETRIEVED_FIELD); @@ -287,8 +290,7 @@ public class PrecisionAtK implements EvaluationMetric { } @Override - public XContentBuilder innerToXContent(XContentBuilder builder, Params params) - throws IOException { + public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { builder.field(RELEVANT_DOCS_RETRIEVED_FIELD.getPreferredName(), relevantRetrieved); builder.field(DOCS_RETRIEVED_FIELD.getPreferredName(), retrieved); return builder; @@ -316,8 +318,7 @@ public class PrecisionAtK implements EvaluationMetric { return false; } PrecisionAtK.Detail other = (PrecisionAtK.Detail) obj; - return Objects.equals(relevantRetrieved, other.relevantRetrieved) - && Objects.equals(retrieved, other.retrieved); + return Objects.equals(relevantRetrieved, other.relevantRetrieved) && Objects.equals(retrieved, other.retrieved); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalNamedXContentProvider.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalNamedXContentProvider.java index 7b22df56bad..da59a738589 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalNamedXContentProvider.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalNamedXContentProvider.java @@ -44,27 +44,59 @@ public class RankEvalNamedXContentProvider implements NamedXContentProvider { @Override public List getNamedXContentParsers() { List namedXContent = new ArrayList<>(); - namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(PrecisionAtK.NAME), - PrecisionAtK::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(RecallAtK.NAME), - RecallAtK::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(MeanReciprocalRank.NAME), - MeanReciprocalRank::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(DiscountedCumulativeGain.NAME), - DiscountedCumulativeGain::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(ExpectedReciprocalRank.NAME), - ExpectedReciprocalRank::fromXContent)); + namedXContent.add( + new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(PrecisionAtK.NAME), PrecisionAtK::fromXContent) + ); + namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(RecallAtK.NAME), RecallAtK::fromXContent)); + namedXContent.add( + new NamedXContentRegistry.Entry( + EvaluationMetric.class, + new ParseField(MeanReciprocalRank.NAME), + MeanReciprocalRank::fromXContent + ) + ); + namedXContent.add( + new NamedXContentRegistry.Entry( + EvaluationMetric.class, + new ParseField(DiscountedCumulativeGain.NAME), + DiscountedCumulativeGain::fromXContent + ) + ); + namedXContent.add( + new NamedXContentRegistry.Entry( + EvaluationMetric.class, + new ParseField(ExpectedReciprocalRank.NAME), + ExpectedReciprocalRank::fromXContent + ) + ); - namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(PrecisionAtK.NAME), - PrecisionAtK.Detail::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(RecallAtK.NAME), - RecallAtK.Detail::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(MeanReciprocalRank.NAME), - MeanReciprocalRank.Detail::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(DiscountedCumulativeGain.NAME), - DiscountedCumulativeGain.Detail::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(ExpectedReciprocalRank.NAME), - ExpectedReciprocalRank.Detail::fromXContent)); + namedXContent.add( + new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(PrecisionAtK.NAME), PrecisionAtK.Detail::fromXContent) + ); + namedXContent.add( + new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(RecallAtK.NAME), RecallAtK.Detail::fromXContent) + ); + namedXContent.add( + new NamedXContentRegistry.Entry( + MetricDetail.class, + new ParseField(MeanReciprocalRank.NAME), + MeanReciprocalRank.Detail::fromXContent + ) + ); + namedXContent.add( + new NamedXContentRegistry.Entry( + MetricDetail.class, + new ParseField(DiscountedCumulativeGain.NAME), + DiscountedCumulativeGain.Detail::fromXContent + ) + ); + namedXContent.add( + new NamedXContentRegistry.Entry( + MetricDetail.class, + new ParseField(ExpectedReciprocalRank.NAME), + ExpectedReciprocalRank.Detail::fromXContent + ) + ); return namedXContent; } } diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalPlugin.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalPlugin.java index d9646f17500..a1eaa0f62f0 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalPlugin.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalPlugin.java @@ -61,9 +61,15 @@ public class RankEvalPlugin extends Plugin implements ActionPlugin { } @Override - public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster) { + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { return Collections.singletonList(new RestRankEvalAction()); } @@ -74,16 +80,20 @@ public class RankEvalPlugin extends Plugin implements ActionPlugin { namedWriteables.add(new NamedWriteableRegistry.Entry(EvaluationMetric.class, RecallAtK.NAME, RecallAtK::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(EvaluationMetric.class, MeanReciprocalRank.NAME, MeanReciprocalRank::new)); namedWriteables.add( - new NamedWriteableRegistry.Entry(EvaluationMetric.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain::new)); + new NamedWriteableRegistry.Entry(EvaluationMetric.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain::new) + ); namedWriteables.add( - new NamedWriteableRegistry.Entry(EvaluationMetric.class, ExpectedReciprocalRank.NAME, ExpectedReciprocalRank::new)); + new NamedWriteableRegistry.Entry(EvaluationMetric.class, ExpectedReciprocalRank.NAME, ExpectedReciprocalRank::new) + ); namedWriteables.add(new NamedWriteableRegistry.Entry(MetricDetail.class, PrecisionAtK.NAME, PrecisionAtK.Detail::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(MetricDetail.class, RecallAtK.NAME, RecallAtK.Detail::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(MetricDetail.class, MeanReciprocalRank.NAME, MeanReciprocalRank.Detail::new)); namedWriteables.add( - new NamedWriteableRegistry.Entry(MetricDetail.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain.Detail::new)); + new NamedWriteableRegistry.Entry(MetricDetail.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain.Detail::new) + ); namedWriteables.add( - new NamedWriteableRegistry.Entry(MetricDetail.class, ExpectedReciprocalRank.NAME, ExpectedReciprocalRank.Detail::new)); + new NamedWriteableRegistry.Entry(MetricDetail.class, ExpectedReciprocalRank.NAME, ExpectedReciprocalRank.Detail::new) + ); return namedWriteables; } diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java index f45274f6116..66db397865a 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java @@ -54,7 +54,7 @@ public class RankEvalRequest extends ActionRequest implements IndicesRequest.Rep private RankEvalSpec rankingEvaluationSpec; - private IndicesOptions indicesOptions = SearchRequest.DEFAULT_INDICES_OPTIONS; + private IndicesOptions indicesOptions = SearchRequest.DEFAULT_INDICES_OPTIONS; private String[] indices = Strings.EMPTY_ARRAY; private SearchType searchType = SearchType.DEFAULT; @@ -74,8 +74,7 @@ public class RankEvalRequest extends ActionRequest implements IndicesRequest.Rep } } - RankEvalRequest() { - } + RankEvalRequest() {} @Override public ActionRequestValidationException validate() { @@ -165,10 +164,10 @@ public class RankEvalRequest extends ActionRequest implements IndicesRequest.Rep return false; } RankEvalRequest that = (RankEvalRequest) o; - return Objects.equals(indicesOptions, that.indicesOptions) && - Arrays.equals(indices, that.indices) && - Objects.equals(rankingEvaluationSpec, that.rankingEvaluationSpec) && - Objects.equals(searchType, that.searchType); + return Objects.equals(indicesOptions, that.indicesOptions) + && Arrays.equals(indices, that.indices) + && Objects.equals(rankingEvaluationSpec, that.rankingEvaluationSpec) + && Objects.equals(searchType, that.searchType); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequestBuilder.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequestBuilder.java index 5600a2ae1c1..84fc45527ec 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequestBuilder.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequestBuilder.java @@ -38,8 +38,7 @@ import org.opensearch.client.OpenSearchClient; public class RankEvalRequestBuilder extends ActionRequestBuilder { - public RankEvalRequestBuilder(OpenSearchClient client, ActionType action, - RankEvalRequest request) { + public RankEvalRequestBuilder(OpenSearchClient client, ActionType action, RankEvalRequest request) { super(client, action, request); } diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java index 378856cdddc..23b42846458 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java @@ -67,10 +67,9 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject /** exceptions for specific ranking evaluation queries, keyed by their id */ private Map failures; - public RankEvalResponse(double metricScore, Map partialResults, - Map failures) { + public RankEvalResponse(double metricScore, Map partialResults, Map failures) { this.metricScore = metricScore; - this.details = new HashMap<>(partialResults); + this.details = new HashMap<>(partialResults); this.failures = new HashMap<>(failures); } @@ -147,15 +146,22 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject private static final ParseField DETAILS_FIELD = new ParseField("details"); private static final ParseField FAILURES_FIELD = new ParseField("failures"); @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("rank_eval_response", - true, - a -> new RankEvalResponse((Double) a[0], - ((List) a[1]).stream().collect(Collectors.toMap(EvalQueryQuality::getId, Function.identity())), - ((List>) a[2]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)))); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "rank_eval_response", + true, + a -> new RankEvalResponse( + (Double) a[0], + ((List) a[1]).stream().collect(Collectors.toMap(EvalQueryQuality::getId, Function.identity())), + ((List>) a[2]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)) + ) + ); static { PARSER.declareDouble(ConstructingObjectParser.constructorArg(), EvalQueryQuality.METRIC_SCORE_FIELD); - PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> EvalQueryQuality.fromXContent(p, n), - DETAILS_FIELD); + PARSER.declareNamedObjects( + ConstructingObjectParser.optionalConstructorArg(), + (p, c, n) -> EvalQueryQuality.fromXContent(p, n), + DETAILS_FIELD + ); PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.nextToken(), p); XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java index 4d04a4c8f63..8708c94c353 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java @@ -76,14 +76,18 @@ public class RankEvalSpec implements Writeable, ToXContentObject { this.metric = Objects.requireNonNull(metric, "Cannot evaluate ranking if no evaluation metric is provided."); if (ratedRequests == null || ratedRequests.isEmpty()) { throw new IllegalArgumentException( - "Cannot evaluate ranking if no search requests with rated results are provided. Seen: " + ratedRequests); + "Cannot evaluate ranking if no search requests with rated results are provided. Seen: " + ratedRequests + ); } this.ratedRequests = ratedRequests; if (templates == null || templates.isEmpty()) { for (RatedRequest request : ratedRequests) { if (request.getEvaluationRequest() == null) { - throw new IllegalStateException("Cannot evaluate ranking if neither template nor evaluation request is " - + "provided. Seen for request id: " + request.getId()); + throw new IllegalStateException( + "Cannot evaluate ranking if neither template nor evaluation request is " + + "provided. Seen for request id: " + + request.getId() + ); } } } @@ -159,14 +163,19 @@ public class RankEvalSpec implements Writeable, ToXContentObject { private static final ParseField REQUESTS_FIELD = new ParseField("requests"); private static final ParseField MAX_CONCURRENT_SEARCHES_FIELD = new ParseField("max_concurrent_searches"); @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("rank_eval", - a -> new RankEvalSpec((List) a[0], (EvaluationMetric) a[1], (Collection) a[2])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "rank_eval", + a -> new RankEvalSpec((List) a[0], (EvaluationMetric) a[1], (Collection) a[2]) + ); static { PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> RatedRequest.fromXContent(p), REQUESTS_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> parseMetric(p), METRIC_FIELD); - PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> ScriptWithId.fromXContent(p), - TEMPLATES_FIELD); + PARSER.declareObjectArray( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> ScriptWithId.fromXContent(p), + TEMPLATES_FIELD + ); PARSER.declareInt(RankEvalSpec::setMaxConcurrentSearches, MAX_CONCURRENT_SEARCHES_FIELD); } @@ -194,9 +203,10 @@ public class RankEvalSpec implements Writeable, ToXContentObject { this.script = script; } - private static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>("script_with_id", - a -> new ScriptWithId((String) a[0], (Script) a[1])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "script_with_id", + a -> new ScriptWithId((String) a[0], (Script) a[1]) + ); public static ScriptWithId fromXContent(XContentParser parser) { return PARSER.apply(parser, null); @@ -252,10 +262,10 @@ public class RankEvalSpec implements Writeable, ToXContentObject { } RankEvalSpec other = (RankEvalSpec) obj; - return Objects.equals(ratedRequests, other.ratedRequests) && - Objects.equals(metric, other.metric) && - Objects.equals(maxConcurrentSearches, other.maxConcurrentSearches) && - Objects.equals(templates, other.templates); + return Objects.equals(ratedRequests, other.ratedRequests) + && Objects.equals(metric, other.metric) + && Objects.equals(maxConcurrentSearches, other.maxConcurrentSearches) + && Objects.equals(templates, other.templates); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java index 171895c699a..5f9a71cd25e 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java @@ -65,8 +65,10 @@ public class RatedDocument implements Writeable, ToXContentObject { static final ParseField DOC_ID_FIELD = new ParseField("_id"); static final ParseField INDEX_FIELD = new ParseField("_index"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("rated_document", - a -> new RatedDocument((String) a[0], (String) a[1], (Integer) a[2])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "rated_document", + a -> new RatedDocument((String) a[0], (String) a[1], (Integer) a[2]) + ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), INDEX_FIELD); diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java index 22ee2f9b898..5289a3945db 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java @@ -109,8 +109,7 @@ public class RatedRequest implements Writeable, ToXContentObject { * @param params template parameters * @param templateId a templare id */ - public RatedRequest(String id, List ratedDocs, Map params, - String templateId) { + public RatedRequest(String id, List ratedDocs, Map params, String templateId) { this(id, ratedDocs, null, params, templateId); } @@ -126,15 +125,22 @@ public class RatedRequest implements Writeable, ToXContentObject { this(id, ratedDocs, evaluatedQuery, new HashMap<>(), null); } - private RatedRequest(String id, List ratedDocs, SearchSourceBuilder evaluatedQuery, - Map params, String templateId) { + private RatedRequest( + String id, + List ratedDocs, + SearchSourceBuilder evaluatedQuery, + Map params, + String templateId + ) { if (params != null && (params.size() > 0 && evaluatedQuery != null)) { throw new IllegalArgumentException( - "Ambiguous rated request: Set both, verbatim test request and test request " + "template parameters."); + "Ambiguous rated request: Set both, verbatim test request and test request " + "template parameters." + ); } if (templateId != null && evaluatedQuery != null) { throw new IllegalArgumentException( - "Ambiguous rated request: Set both, verbatim test request and test request " + "template parameters."); + "Ambiguous rated request: Set both, verbatim test request and test request " + "template parameters." + ); } if ((params == null || params.size() < 1) && evaluatedQuery == null) { throw new IllegalArgumentException("Need to set at least test request or test request template parameters."); @@ -150,7 +156,8 @@ public class RatedRequest implements Writeable, ToXContentObject { if (docKeys.add(doc.getKey()) == false) { String docKeyToString = doc.getKey().toString().replaceAll("\n", "").replaceAll(" ", " "); throw new IllegalArgumentException( - "Found duplicate rated document key [" + docKeyToString + "] in evaluation request [" + id + "]"); + "Found duplicate rated document key [" + docKeyToString + "] in evaluation request [" + id + "]" + ); } } @@ -263,17 +270,29 @@ public class RatedRequest implements Writeable, ToXContentObject { private static final ParseField TEMPLATE_ID_FIELD = new ParseField("template_id"); @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("request", - a -> new RatedRequest((String) a[0], (List) a[1], (SearchSourceBuilder) a[2], (Map) a[3], - (String) a[4])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "request", + a -> new RatedRequest( + (String) a[0], + (List) a[1], + (SearchSourceBuilder) a[2], + (Map) a[3], + (String) a[4] + ) + ); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), ID_FIELD); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> { - return RatedDocument.fromXContent(p); - }, RATINGS_FIELD); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> - SearchSourceBuilder.fromXContent(p, false), REQUEST_FIELD); + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + (p, c) -> { return RatedDocument.fromXContent(p); }, + RATINGS_FIELD + ); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> SearchSourceBuilder.fromXContent(p, false), + REQUEST_FIELD + ); PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(), PARAMS_FIELD); PARSER.declareStringArray(RatedRequest::addSummaryFields, FIELDS_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TEMPLATE_ID_FIELD); @@ -335,16 +354,16 @@ public class RatedRequest implements Writeable, ToXContentObject { RatedRequest other = (RatedRequest) obj; - return Objects.equals(id, other.id) && Objects.equals(evaluationRequest, other.evaluationRequest) - && Objects.equals(summaryFields, other.summaryFields) - && Objects.equals(ratedDocs, other.ratedDocs) - && Objects.equals(params, other.params) - && Objects.equals(templateId, other.templateId); + return Objects.equals(id, other.id) + && Objects.equals(evaluationRequest, other.evaluationRequest) + && Objects.equals(summaryFields, other.summaryFields) + && Objects.equals(ratedDocs, other.ratedDocs) + && Objects.equals(params, other.params) + && Objects.equals(templateId, other.templateId); } @Override public final int hashCode() { - return Objects.hash(id, evaluationRequest, summaryFields, ratedDocs, params, - templateId); + return Objects.hash(id, evaluationRequest, summaryFields, ratedDocs, params, templateId); } } diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedSearchHit.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedSearchHit.java index 9761b6684a4..01c1c81f2bf 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedSearchHit.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedSearchHit.java @@ -83,8 +83,7 @@ public class RatedSearchHit implements Writeable, ToXContentObject { } @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) - throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field("hit", (ToXContent) searchHit); builder.field("rating", rating.isPresent() ? rating.getAsInt() : null); @@ -94,14 +93,20 @@ public class RatedSearchHit implements Writeable, ToXContentObject { private static final ParseField HIT_FIELD = new ParseField("hit"); private static final ParseField RATING_FIELD = new ParseField("rating"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("rated_hit", true, - a -> new RatedSearchHit((SearchHit) a[0], (OptionalInt) a[1])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "rated_hit", + true, + a -> new RatedSearchHit((SearchHit) a[0], (OptionalInt) a[1]) + ); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> SearchHit.fromXContent(p), HIT_FIELD); - PARSER.declareField(ConstructingObjectParser.constructorArg(), - (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? OptionalInt.empty() : OptionalInt.of(p.intValue()), - RATING_FIELD, ValueType.INT_OR_NULL); + PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? OptionalInt.empty() : OptionalInt.of(p.intValue()), + RATING_FIELD, + ValueType.INT_OR_NULL + ); } public static RatedSearchHit parse(XContentParser parser) throws IOException { @@ -117,8 +122,7 @@ public class RatedSearchHit implements Writeable, ToXContentObject { return false; } RatedSearchHit other = (RatedSearchHit) obj; - return Objects.equals(rating, other.rating) - && Objects.equals(searchHit, other.searchHit); + return Objects.equals(rating, other.rating) && Objects.equals(searchHit, other.searchHit); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RecallAtK.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RecallAtK.java index 39acf667f9c..4edd23133d2 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RecallAtK.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RecallAtK.java @@ -103,7 +103,8 @@ public class RecallAtK implements EvaluationMetric { Integer k = (Integer) args[1]; return new RecallAtK( relevantRatingThreshold == null ? DEFAULT_RELEVANT_RATING_THRESHOLD : relevantRatingThreshold, - k == null ? DEFAULT_K : k); + k == null ? DEFAULT_K : k + ); }); static { @@ -167,8 +168,7 @@ public class RecallAtK implements EvaluationMetric { * @return recall at k for above {@link SearchResult} list. **/ @Override - public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, - List ratedDocs) { + public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List ratedDocs) { List ratedSearchHits = joinHitsWithRatings(hits, ratedDocs); @@ -182,7 +182,7 @@ public class RecallAtK implements EvaluationMetric { int relevant = 0; for (RatedDocument rd : ratedDocs) { - if(isRelevant(rd.getRating())) { + if (isRelevant(rd.getRating())) { relevant++; } } @@ -207,8 +207,7 @@ public class RecallAtK implements EvaluationMetric { return false; } RecallAtK other = (RecallAtK) obj; - return Objects.equals(relevantRatingThreshold, other.relevantRatingThreshold) - && Objects.equals(k, other.k); + return Objects.equals(relevantRatingThreshold, other.relevantRatingThreshold) && Objects.equals(k, other.k); } @Override @@ -233,8 +232,11 @@ public class RecallAtK implements EvaluationMetric { this.relevant = in.readVLong(); } - private static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(NAME, true, args -> new Detail((Integer) args[0], (Integer) args[1])); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new Detail((Integer) args[0], (Integer) args[1]) + ); static { PARSER.declareInt(constructorArg(), RELEVANT_DOCS_RETRIEVED_FIELD); @@ -252,8 +254,7 @@ public class RecallAtK implements EvaluationMetric { } @Override - public XContentBuilder innerToXContent(XContentBuilder builder, Params params) - throws IOException { + public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { builder.field(RELEVANT_DOCS_RETRIEVED_FIELD.getPreferredName(), relevantRetrieved); builder.field(RELEVANT_DOCS_FIELD.getPreferredName(), relevant); return builder; @@ -281,8 +282,7 @@ public class RecallAtK implements EvaluationMetric { return false; } RecallAtK.Detail other = (RecallAtK.Detail) obj; - return Objects.equals(relevantRetrieved, other.relevantRetrieved) - && Objects.equals(relevant, other.relevant); + return Objects.equals(relevantRetrieved, other.relevantRetrieved) && Objects.equals(relevant, other.relevant); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RestRankEvalAction.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RestRankEvalAction.java index 6bb2b942d49..3c8748d13da 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RestRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RestRankEvalAction.java @@ -107,11 +107,14 @@ public class RestRankEvalAction extends BaseRestHandler { @Override public List routes() { - return unmodifiableList(asList( - new Route(GET, "/" + ENDPOINT), - new Route(POST, "/" + ENDPOINT), - new Route(GET, "/{index}/" + ENDPOINT), - new Route(POST, "/{index}/" + ENDPOINT))); + return unmodifiableList( + asList( + new Route(GET, "/" + ENDPOINT), + new Route(POST, "/" + ENDPOINT), + new Route(GET, "/{index}/" + ENDPOINT), + new Route(POST, "/{index}/" + ENDPOINT) + ) + ); } @Override @@ -120,8 +123,11 @@ public class RestRankEvalAction extends BaseRestHandler { try (XContentParser parser = request.contentOrSourceParamParser()) { parseRankEvalRequest(rankEvalRequest, request, parser); } - return channel -> client.executeLocally(RankEvalAction.INSTANCE, rankEvalRequest, - new RestToXContentListener(channel)); + return channel -> client.executeLocally( + RankEvalAction.INSTANCE, + rankEvalRequest, + new RestToXContentListener(channel) + ); } private static void parseRankEvalRequest(RankEvalRequest rankEvalRequest, RestRequest request, XContentParser parser) { diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java index b616fc1af02..8cfde2d2b41 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java @@ -85,10 +85,14 @@ public class TransportRankEvalAction extends HandledTransportAction) RankEvalRequest::new); + public TransportRankEvalAction( + ActionFilters actionFilters, + Client client, + TransportService transportService, + ScriptService scriptService, + NamedXContentRegistry namedXContentRegistry + ) { + super(RankEvalAction.NAME, transportService, actionFilters, (Writeable.Reader) RankEvalRequest::new); this.scriptService = scriptService; this.namedXContentRegistry = namedXContentRegistry; this.client = client; @@ -117,8 +121,14 @@ public class TransportRankEvalAction extends HandledTransportAction { @@ -158,8 +175,12 @@ public class TransportRankEvalAction extends HandledTransportAction errors; private final EvaluationMetric metric; - RankEvalActionListener(ActionListener listener, EvaluationMetric metric, RatedRequest[] specifications, - Map errors) { + RankEvalActionListener( + ActionListener listener, + EvaluationMetric metric, + RatedRequest[] specifications, + Map errors + ) { this.listener = listener; this.metric = metric; this.errors = errors; diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java index 510c27b8310..7c385cd45a8 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -87,8 +87,13 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase { SearchHit[] hits = new SearchHit[6]; for (int i = 0; i < 6; i++) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit( + i, + Integer.toString(i), + new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap(), + Collections.emptyMap() + ); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -138,8 +143,13 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } } - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit( + i, + Integer.toString(i), + new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap(), + Collections.emptyMap() + ); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -196,8 +206,13 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase { // only create four hits SearchHit[] hits = new SearchHit[4]; for (int i = 0; i < 4; i++) { - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit( + i, + Integer.toString(i), + new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap(), + Collections.emptyMap() + ); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -262,7 +277,7 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase { } private void assertParsedCorrect(String xContent, Integer expectedUnknownDocRating, boolean expectedNormalize, int expectedK) - throws IOException { + throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) { DiscountedCumulativeGain dcgAt = DiscountedCumulativeGain.fromXContent(parser); assertEquals(expectedUnknownDocRating, dcgAt.getUnknownDocRating()); @@ -299,8 +314,10 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase { try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); parser.nextToken(); - XContentParseException exception = expectThrows(XContentParseException.class, - () -> DiscountedCumulativeGain.fromXContent(parser)); + XContentParseException exception = expectThrows( + XContentParseException.class, + () -> DiscountedCumulativeGain.fromXContent(parser) + ); assertThat(exception.getMessage(), containsString("[dcg] unknown field")); } } @@ -316,8 +333,18 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase { assertEquals(expectedNdcg, detail.getNDCG(), 0.0); assertEquals(unratedDocs, detail.getUnratedDocs()); if (idcg != 0) { - assertEquals("{\"dcg\":{\"dcg\":" + dcg + ",\"ideal_dcg\":" + idcg + ",\"normalized_dcg\":" + expectedNdcg - + ",\"unrated_docs\":" + unratedDocs + "}}", Strings.toString(detail)); + assertEquals( + "{\"dcg\":{\"dcg\":" + + dcg + + ",\"ideal_dcg\":" + + idcg + + ",\"normalized_dcg\":" + + expectedNdcg + + ",\"unrated_docs\":" + + unratedDocs + + "}}", + Strings.toString(detail) + ); } else { assertEquals("{\"dcg\":{\"dcg\":" + dcg + ",\"unrated_docs\":" + unratedDocs + "}}", Strings.toString(detail)); } @@ -325,32 +352,42 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase { public void testSerialization() throws IOException { DiscountedCumulativeGain original = createTestItem(); - DiscountedCumulativeGain deserialized = - OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), - DiscountedCumulativeGain::new); + DiscountedCumulativeGain deserialized = OpenSearchTestCase.copyWriteable( + original, + new NamedWriteableRegistry(Collections.emptyList()), + DiscountedCumulativeGain::new + ); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); } public void testEqualsAndHash() throws IOException { - checkEqualsAndHashCode(createTestItem(), original -> { - return new DiscountedCumulativeGain(original.getNormalize(), original.getUnknownDocRating(), original.getK()); - }, DiscountedCumulativeGainTests::mutateTestItem); + checkEqualsAndHashCode( + createTestItem(), + original -> { return new DiscountedCumulativeGain(original.getNormalize(), original.getUnknownDocRating(), original.getK()); }, + DiscountedCumulativeGainTests::mutateTestItem + ); } private static DiscountedCumulativeGain mutateTestItem(DiscountedCumulativeGain original) { switch (randomIntBetween(0, 2)) { - case 0: - return new DiscountedCumulativeGain(!original.getNormalize(), original.getUnknownDocRating(), original.getK()); - case 1: - return new DiscountedCumulativeGain(original.getNormalize(), - randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)), original.getK()); - case 2: - return new DiscountedCumulativeGain(original.getNormalize(), original.getUnknownDocRating(), - randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))); - default: - throw new IllegalArgumentException("mutation variant not allowed"); + case 0: + return new DiscountedCumulativeGain(!original.getNormalize(), original.getUnknownDocRating(), original.getK()); + case 1: + return new DiscountedCumulativeGain( + original.getNormalize(), + randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)), + original.getK() + ); + case 2: + return new DiscountedCumulativeGain( + original.getNormalize(), + original.getUnknownDocRating(), + randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10)) + ); + default: + throw new IllegalArgumentException("mutation variant not allowed"); } } } diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/EvalQueryQualityTests.java index e194b780feb..56d23d8ca31 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/EvalQueryQualityTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/EvalQueryQualityTests.java @@ -73,23 +73,27 @@ public class EvalQueryQualityTests extends OpenSearchTestCase { ratedSearchHit.getSearchHit().shard(new SearchShardTarget("_na_", new ShardId("index", "_na_", 0), null, OriginalIndices.NONE)); ratedHits.add(ratedSearchHit); } - EvalQueryQuality evalQueryQuality = new EvalQueryQuality(randomAlphaOfLength(10), - randomDoubleBetween(0.0, 1.0, true)); + EvalQueryQuality evalQueryQuality = new EvalQueryQuality(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, true)); if (randomBoolean()) { int metricDetail = randomIntBetween(0, 2); switch (metricDetail) { - case 0: - evalQueryQuality.setMetricDetails(new PrecisionAtK.Detail(randomIntBetween(0, 1000), randomIntBetween(0, 1000))); - break; - case 1: - evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Detail(randomIntBetween(0, 1000))); - break; - case 2: - evalQueryQuality.setMetricDetails(new DiscountedCumulativeGain.Detail(randomDoubleBetween(0, 1, true), - randomBoolean() ? randomDoubleBetween(0, 1, true) : 0, randomInt())); - break; - default: - throw new IllegalArgumentException("illegal randomized value in test"); + case 0: + evalQueryQuality.setMetricDetails(new PrecisionAtK.Detail(randomIntBetween(0, 1000), randomIntBetween(0, 1000))); + break; + case 1: + evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Detail(randomIntBetween(0, 1000))); + break; + case 2: + evalQueryQuality.setMetricDetails( + new DiscountedCumulativeGain.Detail( + randomDoubleBetween(0, 1, true), + randomBoolean() ? randomDoubleBetween(0, 1, true) : 0, + randomInt() + ) + ); + break; + default: + throw new IllegalArgumentException("illegal randomized value in test"); } } evalQueryQuality.addHitsAndRatings(ratedHits); @@ -147,24 +151,24 @@ public class EvalQueryQualityTests extends OpenSearchTestCase { List ratedHits = new ArrayList<>(original.getHitsAndRatings()); MetricDetail metricDetails = original.getMetricDetails(); switch (randomIntBetween(0, 3)) { - case 0: - id = id + "_"; - break; - case 1: - metricScore = metricScore + 0.1; - break; - case 2: - if (metricDetails == null) { - metricDetails = new PrecisionAtK.Detail(1, 5); - } else { - metricDetails = null; - } - break; - case 3: - ratedHits.add(RatedSearchHitTests.randomRatedSearchHit()); - break; - default: - throw new IllegalStateException("The test should only allow four parameters mutated"); + case 0: + id = id + "_"; + break; + case 1: + metricScore = metricScore + 0.1; + break; + case 2: + if (metricDetails == null) { + metricDetails = new PrecisionAtK.Detail(1, 5); + } else { + metricDetails = null; + } + break; + case 3: + ratedHits.add(RatedSearchHitTests.randomRatedSearchHit()); + break; + default: + throw new IllegalStateException("The test should only allow four parameters mutated"); } EvalQueryQuality evalQueryQuality = new EvalQueryQuality(id, metricScore); evalQueryQuality.setMetricDetails(metricDetails); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java index c6812dae4c6..4fb0089a32c 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -65,11 +65,11 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase { public void testProbabilityOfRelevance() { ExpectedReciprocalRank err = new ExpectedReciprocalRank(5); assertEquals(0.0, err.probabilityOfRelevance(0), 0.0); - assertEquals(1d/32d, err.probabilityOfRelevance(1), 0.0); - assertEquals(3d/32d, err.probabilityOfRelevance(2), 0.0); - assertEquals(7d/32d, err.probabilityOfRelevance(3), 0.0); - assertEquals(15d/32d, err.probabilityOfRelevance(4), 0.0); - assertEquals(31d/32d, err.probabilityOfRelevance(5), 0.0); + assertEquals(1d / 32d, err.probabilityOfRelevance(1), 0.0); + assertEquals(3d / 32d, err.probabilityOfRelevance(2), 0.0); + assertEquals(7d / 32d, err.probabilityOfRelevance(3), 0.0); + assertEquals(15d / 32d, err.probabilityOfRelevance(4), 0.0); + assertEquals(31d / 32d, err.probabilityOfRelevance(5), 0.0); } /** @@ -88,7 +88,7 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase { */ public void testERRAt() { List rated = new ArrayList<>(); - Integer[] relevanceRatings = new Integer[] { 3, 2, 0, 1}; + Integer[] relevanceRatings = new Integer[] { 3, 2, 0, 1 }; SearchHit[] hits = createSearchHits(rated, relevanceRatings); ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, 0, 3); assertEquals(0.8984375, err.evaluate("id", hits, rated).metricScore(), DELTA); @@ -113,7 +113,7 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase { */ public void testERRMissingRatings() { List rated = new ArrayList<>(); - Integer[] relevanceRatings = new Integer[] { 3, null, 0, 1}; + Integer[] relevanceRatings = new Integer[] { 3, null, 0, 1 }; SearchHit[] hits = createSearchHits(rated, relevanceRatings); ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, null, 4); EvalQueryQuality evaluation = err.evaluate("id", hits, rated); @@ -130,8 +130,13 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase { if (relevanceRatings[i] != null) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit( + i, + Integer.toString(i), + new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap(), + Collections.emptyMap() + ); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } return hits; @@ -152,7 +157,7 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase { } private void assertParsedCorrect(String xContent, Integer expectedUnknownDocRating, int expectedMaxRelevance, int expectedK) - throws IOException { + throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) { ExpectedReciprocalRank errAt = ExpectedReciprocalRank.fromXContent(parser); assertEquals(expectedUnknownDocRating, errAt.getUnknownDocRating()); @@ -189,8 +194,10 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase { try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); parser.nextToken(); - XContentParseException exception = expectThrows(XContentParseException.class, - () -> DiscountedCumulativeGain.fromXContent(parser)); + XContentParseException exception = expectThrows( + XContentParseException.class, + () -> DiscountedCumulativeGain.fromXContent(parser) + ); assertThat(exception.getMessage(), containsString("[dcg] unknown field")); } } @@ -203,32 +210,42 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase { public void testSerialization() throws IOException { ExpectedReciprocalRank original = createTestItem(); - ExpectedReciprocalRank deserialized = - OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), - ExpectedReciprocalRank::new); + ExpectedReciprocalRank deserialized = OpenSearchTestCase.copyWriteable( + original, + new NamedWriteableRegistry(Collections.emptyList()), + ExpectedReciprocalRank::new + ); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); } public void testEqualsAndHash() throws IOException { - checkEqualsAndHashCode(createTestItem(), original -> { - return new ExpectedReciprocalRank(original.getMaxRelevance(), original.getUnknownDocRating(), original.getK()); - }, ExpectedReciprocalRankTests::mutateTestItem); + checkEqualsAndHashCode( + createTestItem(), + original -> { return new ExpectedReciprocalRank(original.getMaxRelevance(), original.getUnknownDocRating(), original.getK()); }, + ExpectedReciprocalRankTests::mutateTestItem + ); } private static ExpectedReciprocalRank mutateTestItem(ExpectedReciprocalRank original) { switch (randomIntBetween(0, 2)) { - case 0: - return new ExpectedReciprocalRank(original.getMaxRelevance() + 1, original.getUnknownDocRating(), original.getK()); - case 1: - return new ExpectedReciprocalRank(original.getMaxRelevance(), - randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)), original.getK()); - case 2: - return new ExpectedReciprocalRank(original.getMaxRelevance(), original.getUnknownDocRating(), - randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))); - default: - throw new IllegalArgumentException("mutation variant not allowed"); + case 0: + return new ExpectedReciprocalRank(original.getMaxRelevance() + 1, original.getUnknownDocRating(), original.getK()); + case 1: + return new ExpectedReciprocalRank( + original.getMaxRelevance(), + randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)), + original.getK() + ); + case 2: + return new ExpectedReciprocalRank( + original.getMaxRelevance(), + original.getUnknownDocRating(), + randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10)) + ); + default: + throw new IllegalArgumentException("mutation variant not allowed"); } } } diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java index 7dd6d58fdd1..befb9bdf371 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java @@ -145,11 +145,11 @@ public class MeanReciprocalRankTests extends OpenSearchTestCase { */ public void testPrecisionAtFiveRelevanceThreshold() { List rated = new ArrayList<>(); - rated.add(new RatedDocument("test", "0", 0)); - rated.add(new RatedDocument("test", "1", 1)); - rated.add(new RatedDocument("test", "2", 2)); - rated.add(new RatedDocument("test", "3", 3)); - rated.add(new RatedDocument("test", "4", 4)); + rated.add(new RatedDocument("test", "0", 0)); + rated.add(new RatedDocument("test", "1", 1)); + rated.add(new RatedDocument("test", "2", 2)); + rated.add(new RatedDocument("test", "3", 3)); + rated.add(new RatedDocument("test", "4", 4)); SearchHit[] hits = createSearchHits(0, 5, "test"); MeanReciprocalRank reciprocalRank = new MeanReciprocalRank(2, 10); @@ -204,8 +204,7 @@ public class MeanReciprocalRankTests extends OpenSearchTestCase { try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); parser.nextToken(); - XContentParseException exception = expectThrows(XContentParseException.class, - () -> MeanReciprocalRank.fromXContent(parser)); + XContentParseException exception = expectThrows(XContentParseException.class, () -> MeanReciprocalRank.fromXContent(parser)); assertThat(exception.getMessage(), containsString("[reciprocal_rank] unknown field")); } } @@ -229,8 +228,11 @@ public class MeanReciprocalRankTests extends OpenSearchTestCase { public void testSerialization() throws IOException { MeanReciprocalRank original = createTestItem(); - MeanReciprocalRank deserialized = OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), - MeanReciprocalRank::new); + MeanReciprocalRank deserialized = OpenSearchTestCase.copyWriteable( + original, + new NamedWriteableRegistry(Collections.emptyList()), + MeanReciprocalRank::new + ); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java index 55637694d9b..ea9eadb0c9c 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java @@ -146,8 +146,7 @@ public class PrecisionAtKTests extends OpenSearchTestCase { public void testNoRatedDocs() throws Exception { SearchHit[] hits = new SearchHit[5]; for (int i = 0; i < 5; i++) { - hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); @@ -229,8 +228,11 @@ public class PrecisionAtKTests extends OpenSearchTestCase { public void testSerialization() throws IOException { PrecisionAtK original = createTestItem(); - PrecisionAtK deserialized = OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), - PrecisionAtK::new); + PrecisionAtK deserialized = OpenSearchTestCase.copyWriteable( + original, + new NamedWriteableRegistry(Collections.emptyList()), + PrecisionAtK::new + ); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); @@ -241,27 +243,39 @@ public class PrecisionAtKTests extends OpenSearchTestCase { } private static PrecisionAtK copy(PrecisionAtK original) { - return new PrecisionAtK(original.getRelevantRatingThreshold(), original.getIgnoreUnlabeled(), - original.forcedSearchSize().getAsInt()); + return new PrecisionAtK( + original.getRelevantRatingThreshold(), + original.getIgnoreUnlabeled(), + original.forcedSearchSize().getAsInt() + ); } private static PrecisionAtK mutate(PrecisionAtK original) { PrecisionAtK pAtK; switch (randomIntBetween(0, 2)) { - case 0: - pAtK = new PrecisionAtK(original.getRelevantRatingThreshold(), !original.getIgnoreUnlabeled(), - original.forcedSearchSize().getAsInt()); - break; - case 1: - pAtK = new PrecisionAtK(randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)), - original.getIgnoreUnlabeled(), original.forcedSearchSize().getAsInt()); - break; - case 2: - pAtK = new PrecisionAtK(original.getRelevantRatingThreshold(), - original.getIgnoreUnlabeled(), original.forcedSearchSize().getAsInt() + 1); - break; - default: - throw new IllegalStateException("The test should only allow three parameters mutated"); + case 0: + pAtK = new PrecisionAtK( + original.getRelevantRatingThreshold(), + !original.getIgnoreUnlabeled(), + original.forcedSearchSize().getAsInt() + ); + break; + case 1: + pAtK = new PrecisionAtK( + randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)), + original.getIgnoreUnlabeled(), + original.forcedSearchSize().getAsInt() + ); + break; + case 2: + pAtK = new PrecisionAtK( + original.getRelevantRatingThreshold(), + original.getIgnoreUnlabeled(), + original.forcedSearchSize().getAsInt() + 1 + ); + break; + default: + throw new IllegalStateException("The test should only allow three parameters mutated"); } return pAtK; } diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalRequestTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalRequestTests.java index c84d5376806..401bb1b2a7b 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalRequestTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalRequestTests.java @@ -68,13 +68,20 @@ public class RankEvalRequestTests extends AbstractWireSerializingTestCase mutators = new ArrayList<>(); mutators.add(() -> mutation.indices(ArrayUtils.concat(instance.indices(), new String[] { randomAlphaOfLength(10) }))); - mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(instance.indicesOptions(), - () -> IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())))); + mutators.add( + () -> mutation.indicesOptions( + randomValueOtherThan( + instance.indicesOptions(), + () -> IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()) + ) + ) + ); mutators.add(() -> { if (instance.searchType() == SearchType.DFS_QUERY_THEN_FETCH) { mutation.searchType(SearchType.QUERY_THEN_FETCH); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java index e3ae1b2b764..7e81dde0cab 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java @@ -78,23 +78,30 @@ import static org.hamcrest.Matchers.instanceOf; public class RankEvalResponseTests extends OpenSearchTestCase { private static final Exception[] RANDOM_EXCEPTIONS = new Exception[] { - new ClusterBlockException(singleton(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)), - new CircuitBreakingException("Data too large", 123, 456, CircuitBreaker.Durability.PERMANENT), - new SearchParseException(SHARD_TARGET, "Parse failure", new XContentLocation(12, 98)), - new IllegalArgumentException("Closed resource", new RuntimeException("Resource")), - new SearchPhaseExecutionException("search", "all shards failed", - new ShardSearchFailure[] { new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)) }), - new OpenSearchException("Parsing failed", - new ParsingException(9, 42, "Wrong state", new NullPointerException("Unexpected null value"))) }; + new ClusterBlockException(singleton(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)), + new CircuitBreakingException("Data too large", 123, 456, CircuitBreaker.Durability.PERMANENT), + new SearchParseException(SHARD_TARGET, "Parse failure", new XContentLocation(12, 98)), + new IllegalArgumentException("Closed resource", new RuntimeException("Resource")), + new SearchPhaseExecutionException( + "search", + "all shards failed", + new ShardSearchFailure[] { + new ShardSearchFailure( + new ParsingException(1, 2, "foobar", null), + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE) + ) } + ), + new OpenSearchException( + "Parsing failed", + new ParsingException(9, 42, "Wrong state", new NullPointerException("Unexpected null value")) + ) }; private static RankEvalResponse createRandomResponse() { int numberOfRequests = randomIntBetween(0, 5); Map partials = new HashMap<>(numberOfRequests); for (int i = 0; i < numberOfRequests; i++) { String id = randomAlphaOfLengthBetween(3, 10); - EvalQueryQuality evalQuality = new EvalQueryQuality(id, - randomDoubleBetween(0.0, 1.0, true)); + EvalQueryQuality evalQuality = new EvalQueryQuality(id, randomDoubleBetween(0.0, 1.0, true)); int numberOfDocs = randomIntBetween(0, 5); List ratedHits = new ArrayList<>(numberOfDocs); for (int d = 0; d < numberOfDocs; d++) { @@ -144,13 +151,16 @@ public class RankEvalResponseTests extends OpenSearchTestCase { } assertNotSame(testItem, parsedItem); // We cannot check equality of object here because some information (e.g. - // SearchHit#shard) cannot fully be parsed back. + // SearchHit#shard) cannot fully be parsed back. assertEquals(testItem.getMetricScore(), parsedItem.getMetricScore(), 0.0); assertEquals(testItem.getPartialResults().keySet(), parsedItem.getPartialResults().keySet()); for (EvalQueryQuality metricDetail : testItem.getPartialResults().values()) { EvalQueryQuality parsedEvalQueryQuality = parsedItem.getPartialResults().get(metricDetail.getId()); - assertToXContentEquivalent(toXContent(metricDetail, xContentType, humanReadable), - toXContent(parsedEvalQueryQuality, xContentType, humanReadable), xContentType); + assertToXContentEquivalent( + toXContent(metricDetail, xContentType, humanReadable), + toXContent(parsedEvalQueryQuality, xContentType, humanReadable), + xContentType + ); } // Also exceptions that are parsed back will be different since they are re-wrapped during parsing. // However, we can check that there is the expected number @@ -164,38 +174,49 @@ public class RankEvalResponseTests extends OpenSearchTestCase { public void testToXContent() throws IOException { EvalQueryQuality coffeeQueryQuality = new EvalQueryQuality("coffee_query", 0.1); coffeeQueryQuality.addHitsAndRatings(Arrays.asList(searchHit("index", 123, 5), searchHit("index", 456, null))); - RankEvalResponse response = new RankEvalResponse(0.123, Collections.singletonMap("coffee_query", coffeeQueryQuality), - Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg"))); + RankEvalResponse response = new RankEvalResponse( + 0.123, + Collections.singletonMap("coffee_query", coffeeQueryQuality), + Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg")) + ); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString(); - assertEquals(("{" + - " \"metric_score\": 0.123," + - " \"details\": {" + - " \"coffee_query\": {" + - " \"metric_score\": 0.1," + - " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + - " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"123\",\"_score\":1.0}," + - " \"rating\":5}," + - " {\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"456\",\"_score\":1.0}," + - " \"rating\":null}" + - " ]" + - " }" + - " }," + - " \"failures\": {" + - " \"beer_query\": {" + - " \"error\" : {\"root_cause\": [{\"type\":\"parsing_exception\", \"reason\":\"someMsg\",\"line\":0,\"col\":0}]," + - " \"type\":\"parsing_exception\"," + - " \"reason\":\"someMsg\"," + - " \"line\":0,\"col\":0" + - " }" + - " }" + - " }" + - "}").replaceAll("\\s+", ""), xContent); + assertEquals( + ("{" + + " \"metric_score\": 0.123," + + " \"details\": {" + + " \"coffee_query\": {" + + " \"metric_score\": 0.1," + + " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"123\",\"_score\":1.0}," + + " \"rating\":5}," + + " {\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"456\",\"_score\":1.0}," + + " \"rating\":null}" + + " ]" + + " }" + + " }," + + " \"failures\": {" + + " \"beer_query\": {" + + " \"error\" : {\"root_cause\": [{\"type\":\"parsing_exception\", \"reason\":\"someMsg\",\"line\":0,\"col\":0}]," + + " \"type\":\"parsing_exception\"," + + " \"reason\":\"someMsg\"," + + " \"line\":0,\"col\":0" + + " }" + + " }" + + " }" + + "}").replaceAll("\\s+", ""), + xContent + ); } private static RatedSearchHit searchHit(String index, int docId, Integer rating) { - SearchHit hit = new SearchHit(docId, docId + "", new Text(MapperService.SINGLE_MAPPING_NAME), - Collections.emptyMap(), Collections.emptyMap()); + SearchHit hit = new SearchHit( + docId, + docId + "", + new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap(), + Collections.emptyMap() + ); hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hit.score(1.0f); return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java index 98b335e2b5f..b79b26eb0af 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java @@ -84,11 +84,14 @@ public class RankEvalSpecTests extends OpenSearchTestCase { } static RankEvalSpec createTestItem() { - Supplier metric = randomFrom(Arrays.asList( + Supplier metric = randomFrom( + Arrays.asList( () -> PrecisionAtKTests.createTestItem(), () -> RecallAtKTests.createTestItem(), () -> MeanReciprocalRankTests.createTestItem(), - () -> DiscountedCumulativeGainTests.createTestItem())); + () -> DiscountedCumulativeGainTests.createTestItem() + ) + ); List ratedRequests = null; Collection templates = null; @@ -111,12 +114,19 @@ public class RankEvalSpecTests extends OpenSearchTestCase { Map templateParams = new HashMap<>(); templateParams.put("key", "value"); - RatedRequest ratedRequest = new RatedRequest("id", Arrays.asList(RatedDocumentTests.createRatedDocument()), templateParams, - "templateId"); + RatedRequest ratedRequest = new RatedRequest( + "id", + Arrays.asList(RatedDocumentTests.createRatedDocument()), + templateParams, + "templateId" + ); ratedRequests = Arrays.asList(ratedRequest); } else { - RatedRequest ratedRequest = new RatedRequest("id", Arrays.asList(RatedDocumentTests.createRatedDocument()), - new SearchSourceBuilder()); + RatedRequest ratedRequest = new RatedRequest( + "id", + Arrays.asList(RatedDocumentTests.createRatedDocument()), + new SearchSourceBuilder() + ); ratedRequests = Arrays.asList(ratedRequest); } RankEvalSpec spec = new RankEvalSpec(ratedRequests, metric.get(), templates); @@ -165,7 +175,8 @@ public class RankEvalSpecTests extends OpenSearchTestCase { namedWriteables.add(new NamedWriteableRegistry.Entry(EvaluationMetric.class, PrecisionAtK.NAME, PrecisionAtK::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(EvaluationMetric.class, RecallAtK.NAME, RecallAtK::new)); namedWriteables.add( - new NamedWriteableRegistry.Entry(EvaluationMetric.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain::new)); + new NamedWriteableRegistry.Entry(EvaluationMetric.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain::new) + ); namedWriteables.add(new NamedWriteableRegistry.Entry(EvaluationMetric.class, MeanReciprocalRank.NAME, MeanReciprocalRank::new)); return OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(namedWriteables), RankEvalSpec::new); } @@ -181,22 +192,22 @@ public class RankEvalSpecTests extends OpenSearchTestCase { int mutate = randomIntBetween(0, 2); switch (mutate) { - case 0: - RatedRequest request = RatedRequestsTests.createTestItem(true); - ratedRequests.add(request); - break; - case 1: - if (metric instanceof PrecisionAtK) { - metric = new DiscountedCumulativeGain(); - } else { - metric = new PrecisionAtK(); - } - break; - case 2: - templates.put("mutation", new Script(ScriptType.INLINE, "mustache", randomAlphaOfLength(10), new HashMap<>())); - break; - default: - throw new IllegalStateException("Requested to modify more than available parameters."); + case 0: + RatedRequest request = RatedRequestsTests.createTestItem(true); + ratedRequests.add(request); + break; + case 1: + if (metric instanceof PrecisionAtK) { + metric = new DiscountedCumulativeGain(); + } else { + metric = new PrecisionAtK(); + } + break; + case 2: + templates.put("mutation", new Script(ScriptType.INLINE, "mustache", randomAlphaOfLength(10), new HashMap<>())); + break; + default: + throw new IllegalStateException("Requested to modify more than available parameters."); } List scripts = new ArrayList<>(); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java index 524828dfb75..f3fb01b153c 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java @@ -80,17 +80,22 @@ public class RatedDocumentTests extends OpenSearchTestCase { public void testSerialization() throws IOException { RatedDocument original = createRatedDocument(); - RatedDocument deserialized = OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), - RatedDocument::new); + RatedDocument deserialized = OpenSearchTestCase.copyWriteable( + original, + new NamedWriteableRegistry(Collections.emptyList()), + RatedDocument::new + ); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); } public void testEqualsAndHash() throws IOException { - checkEqualsAndHashCode(createRatedDocument(), original -> { - return new RatedDocument(original.getIndex(), original.getDocID(), original.getRating()); - }, RatedDocumentTests::mutateTestItem); + checkEqualsAndHashCode( + createRatedDocument(), + original -> { return new RatedDocument(original.getIndex(), original.getDocID(), original.getRating()); }, + RatedDocumentTests::mutateTestItem + ); } private static RatedDocument mutateTestItem(RatedDocument original) { @@ -99,17 +104,17 @@ public class RatedDocumentTests extends OpenSearchTestCase { String docId = original.getDocID(); switch (randomIntBetween(0, 2)) { - case 0: - rating = randomValueOtherThan(rating, () -> randomInt()); - break; - case 1: - index = randomValueOtherThan(index, () -> randomAlphaOfLength(10)); - break; - case 2: - docId = randomValueOtherThan(docId, () -> randomAlphaOfLength(10)); - break; - default: - throw new IllegalStateException("The test should only allow two parameters mutated"); + case 0: + rating = randomValueOtherThan(rating, () -> randomInt()); + break; + case 1: + index = randomValueOtherThan(index, () -> randomAlphaOfLength(10)); + break; + case 2: + docId = randomValueOtherThan(docId, () -> randomAlphaOfLength(10)); + break; + default: + throw new IllegalStateException("The test should only allow two parameters mutated"); } return new RatedDocument(index, docId, rating); } diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java index 8c618140b5c..900f6dd7112 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java @@ -77,8 +77,10 @@ public class RatedRequestsTests extends OpenSearchTestCase { @BeforeClass public static void init() { xContentRegistry = new NamedXContentRegistry( - Stream.of(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedXContents().stream()).flatMap(Function.identity()) - .collect(toList())); + Stream.of(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedXContents().stream()) + .flatMap(Function.identity()) + .collect(toList()) + ); } @AfterClass @@ -187,34 +189,34 @@ public class RatedRequestsTests extends OpenSearchTestCase { int mutate = randomIntBetween(0, 3); switch (mutate) { - case 0: - id = randomValueOtherThan(id, () -> randomAlphaOfLength(10)); - break; - case 1: - if (evaluationRequest != null) { - int size = randomValueOtherThan(evaluationRequest.size(), () -> randomInt(Integer.MAX_VALUE)); - evaluationRequest = new SearchSourceBuilder(); - evaluationRequest.size(size); - evaluationRequest.query(new MatchAllQueryBuilder()); - } else { - if (randomBoolean()) { - Map mutated = new HashMap<>(); - mutated.putAll(params); - mutated.put("one_more_key", "one_more_value"); - params = mutated; + case 0: + id = randomValueOtherThan(id, () -> randomAlphaOfLength(10)); + break; + case 1: + if (evaluationRequest != null) { + int size = randomValueOtherThan(evaluationRequest.size(), () -> randomInt(Integer.MAX_VALUE)); + evaluationRequest = new SearchSourceBuilder(); + evaluationRequest.size(size); + evaluationRequest.query(new MatchAllQueryBuilder()); } else { - templateId = randomValueOtherThan(templateId, () -> randomAlphaOfLength(5)); + if (randomBoolean()) { + Map mutated = new HashMap<>(); + mutated.putAll(params); + mutated.put("one_more_key", "one_more_value"); + params = mutated; + } else { + templateId = randomValueOtherThan(templateId, () -> randomAlphaOfLength(5)); + } } - } - break; - case 2: - ratedDocs = Arrays.asList(randomValueOtherThanMany(ratedDocs::contains, () -> RatedDocumentTests.createRatedDocument())); - break; - case 3: - summaryFields = Arrays.asList(randomValueOtherThanMany(summaryFields::contains, () -> randomAlphaOfLength(10))); - break; - default: - throw new IllegalStateException("Requested to modify more than available parameters."); + break; + case 2: + ratedDocs = Arrays.asList(randomValueOtherThanMany(ratedDocs::contains, () -> RatedDocumentTests.createRatedDocument())); + break; + case 3: + summaryFields = Arrays.asList(randomValueOtherThanMany(summaryFields::contains, () -> randomAlphaOfLength(10))); + break; + default: + throw new IllegalStateException("Requested to modify more than available parameters."); } RatedRequest ratedRequest; @@ -230,15 +232,21 @@ public class RatedRequestsTests extends OpenSearchTestCase { public void testDuplicateRatedDocThrowsException() { List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1), new RatedDocument("index1", "id1", 5)); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> new RatedRequest("test_query", ratedDocs, new SearchSourceBuilder())); - assertEquals("Found duplicate rated document key [{\"_index\":\"index1\",\"_id\":\"id1\"}] in evaluation request [test_query]", - ex.getMessage()); + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new RatedRequest("test_query", ratedDocs, new SearchSourceBuilder()) + ); + assertEquals( + "Found duplicate rated document key [{\"_index\":\"index1\",\"_id\":\"id1\"}] in evaluation request [test_query]", + ex.getMessage() + ); Map params = new HashMap<>(); params.put("key", "value"); ex = expectThrows(IllegalArgumentException.class, () -> new RatedRequest("test_query", ratedDocs, params, "templateId")); - assertEquals("Found duplicate rated document key [{\"_index\":\"index1\",\"_id\":\"id1\"}] in evaluation request [test_query]", - ex.getMessage()); + assertEquals( + "Found duplicate rated document key [{\"_index\":\"index1\",\"_id\":\"id1\"}] in evaluation request [test_query]", + ex.getMessage() + ); } public void testNullSummaryFieldsTreatment() { @@ -298,15 +306,19 @@ public class RatedRequestsTests extends OpenSearchTestCase { public void testExplainNotAllowed() { List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new RatedRequest("id", ratedDocs, new SearchSourceBuilder().explain(true))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new RatedRequest("id", ratedDocs, new SearchSourceBuilder().explain(true)) + ); assertEquals("Query in rated requests should not use explain.", e.getMessage()); } public void testProfileNotAllowed() { List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new RatedRequest("id", ratedDocs, new SearchSourceBuilder().profile(true))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new RatedRequest("id", ratedDocs, new SearchSourceBuilder().profile(true)) + ); assertEquals("Query in rated requests should not use profile.", e.getMessage()); } @@ -316,26 +328,26 @@ public class RatedRequestsTests extends OpenSearchTestCase { */ public void testParseFromXContent() throws IOException { String querySpecString = " {\n" - + " \"id\": \"my_qa_query\",\n" - + " \"request\": {\n" - + " \"query\": {\n" - + " \"bool\": {\n" - + " \"must\": [\n" - + " {\"match\": {\"beverage\": \"coffee\"}},\n" - + " {\"term\": {\"browser\": {\"value\": \"safari\"}}},\n" - + " {\"term\": {\"time_of_day\": " - + " {\"value\": \"morning\",\"boost\": 2}}},\n" - + " {\"term\": {\"ip_location\": " - + " {\"value\": \"ams\",\"boost\": 10}}}]}\n" - + " },\n" - + " \"size\": 10\n" - + " },\n" - + " \"summary_fields\" : [\"title\"],\n" - + " \"ratings\": [\n" - + " {\"_index\": \"test\" , \"_id\": \"1\", \"rating\" : 1 },\n" - + " {\"_index\": \"test\", \"rating\" : 0, \"_id\": \"2\"},\n" - + " {\"_id\": \"3\", \"_index\": \"test\", \"rating\" : 1} ]" - + "}\n"; + + " \"id\": \"my_qa_query\",\n" + + " \"request\": {\n" + + " \"query\": {\n" + + " \"bool\": {\n" + + " \"must\": [\n" + + " {\"match\": {\"beverage\": \"coffee\"}},\n" + + " {\"term\": {\"browser\": {\"value\": \"safari\"}}},\n" + + " {\"term\": {\"time_of_day\": " + + " {\"value\": \"morning\",\"boost\": 2}}},\n" + + " {\"term\": {\"ip_location\": " + + " {\"value\": \"ams\",\"boost\": 10}}}]}\n" + + " },\n" + + " \"size\": 10\n" + + " },\n" + + " \"summary_fields\" : [\"title\"],\n" + + " \"ratings\": [\n" + + " {\"_index\": \"test\" , \"_id\": \"1\", \"rating\" : 1 },\n" + + " {\"_index\": \"test\", \"rating\" : 0, \"_id\": \"2\"},\n" + + " {\"_id\": \"3\", \"_index\": \"test\", \"rating\" : 1} ]" + + "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, querySpecString)) { RatedRequest specification = RatedRequest.fromXContent(parser); assertEquals("my_qa_query", specification.getId()); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java index 89426ab91e5..bfc9098f59e 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedSearchHitTests.java @@ -51,10 +51,14 @@ import static org.opensearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode public class RatedSearchHitTests extends OpenSearchTestCase { public static RatedSearchHit randomRatedSearchHit() { - OptionalInt rating = randomBoolean() ? OptionalInt.empty() - : OptionalInt.of(randomIntBetween(0, 5)); - SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), - new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); + OptionalInt rating = randomBoolean() ? OptionalInt.empty() : OptionalInt.of(randomIntBetween(0, 5)); + SearchHit searchHit = new SearchHit( + randomIntBetween(0, 10), + randomAlphaOfLength(10), + new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap(), + Collections.emptyMap() + ); RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating); return ratedSearchHit; } @@ -63,15 +67,20 @@ public class RatedSearchHitTests extends OpenSearchTestCase { OptionalInt rating = original.getRating(); SearchHit hit = original.getSearchHit(); switch (randomIntBetween(0, 1)) { - case 0: - rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5)); - break; - case 1: - hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), - new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - break; - default: - throw new IllegalStateException("The test should only allow two parameters mutated"); + case 0: + rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5)); + break; + case 1: + hit = new SearchHit( + hit.docId(), + hit.getId() + randomAlphaOfLength(10), + new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap(), + Collections.emptyMap() + ); + break; + default: + throw new IllegalStateException("The test should only allow two parameters mutated"); } return new RatedSearchHit(hit, rating); } diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java index 727aab72c30..6ffaaa8e89e 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java @@ -101,7 +101,7 @@ public class RecallAtKTests extends OpenSearchTestCase { RecallAtK recallAtN = new RecallAtK(2, 5); - EvalQueryQuality evaluated = recallAtN.evaluate("id", toSearchHits(rated.subList(0,3), "test"), rated); + EvalQueryQuality evaluated = recallAtN.evaluate("id", toSearchHits(rated.subList(0, 3), "test"), rated); assertEquals((double) 1 / 3, evaluated.metricScore(), 0.00001); assertEquals(1, ((RecallAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(3, ((RecallAtK.Detail) evaluated.getMetricDetails()).getRelevant()); @@ -213,8 +213,11 @@ public class RecallAtKTests extends OpenSearchTestCase { public void testSerialization() throws IOException { RecallAtK original = createTestItem(); - RecallAtK deserialized = OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), - RecallAtK::new); + RecallAtK deserialized = OpenSearchTestCase.copyWriteable( + original, + new NamedWriteableRegistry(Collections.emptyList()), + RecallAtK::new + ); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); @@ -234,12 +237,11 @@ public class RecallAtKTests extends OpenSearchTestCase { case 0: recallAtK = new RecallAtK( randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)), - original.forcedSearchSize().getAsInt()); + original.forcedSearchSize().getAsInt() + ); break; case 1: - recallAtK = new RecallAtK( - original.getRelevantRatingThreshold(), - original.forcedSearchSize().getAsInt() + 1); + recallAtK = new RecallAtK(original.getRelevantRatingThreshold(), original.forcedSearchSize().getAsInt() + 1); break; default: throw new IllegalStateException("The test should only allow two parameters mutated"); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/TransportRankEvalActionTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/TransportRankEvalActionTests.java index 174fb7cfeff..b2052bcb792 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/TransportRankEvalActionTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/TransportRankEvalActionTests.java @@ -55,8 +55,11 @@ import static org.mockito.Mockito.mock; public class TransportRankEvalActionTests extends OpenSearchTestCase { - private Settings settings = Settings.builder().put("path.home", createTempDir().toString()).put("node.name", "test-" + getTestName()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); + private Settings settings = Settings.builder() + .put("path.home", createTempDir().toString()) + .put("node.name", "test-" + getTestName()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); /** * Test that request parameters like indicesOptions or searchType from ranking evaluation request are transfered to msearch request @@ -64,14 +67,25 @@ public class TransportRankEvalActionTests extends OpenSearchTestCase { public void testTransferRequestParameters() throws Exception { String indexName = "test_index"; List specifications = new ArrayList<>(); - specifications - .add(new RatedRequest("amsterdam_query", Arrays.asList(new RatedDocument(indexName, "1", 3)), new SearchSourceBuilder())); - RankEvalRequest rankEvalRequest = new RankEvalRequest(new RankEvalSpec(specifications, new DiscountedCumulativeGain()), - new String[] { indexName }); + specifications.add( + new RatedRequest("amsterdam_query", Arrays.asList(new RatedDocument(indexName, "1", 3)), new SearchSourceBuilder()) + ); + RankEvalRequest rankEvalRequest = new RankEvalRequest( + new RankEvalSpec(specifications, new DiscountedCumulativeGain()), + new String[] { indexName } + ); SearchType expectedSearchType = randomFrom(SearchType.CURRENTLY_SUPPORTED); rankEvalRequest.searchType(expectedSearchType); - IndicesOptions expectedIndicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), - randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); + IndicesOptions expectedIndicesOptions = IndicesOptions.fromOptions( + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean() + ); rankEvalRequest.indicesOptions(expectedIndicesOptions); NodeClient client = new NodeClient(settings, null) { @@ -79,13 +93,18 @@ public class TransportRankEvalActionTests extends OpenSearchTestCase { public void multiSearch(MultiSearchRequest request, ActionListener listener) { assertEquals(1, request.requests().size()); assertEquals(expectedSearchType, request.requests().get(0).searchType()); - assertArrayEquals(new String[]{indexName}, request.requests().get(0).indices()); + assertArrayEquals(new String[] { indexName }, request.requests().get(0).indices()); assertEquals(expectedIndicesOptions, request.requests().get(0).indicesOptions()); } }; - TransportRankEvalAction action = new TransportRankEvalAction(mock(ActionFilters.class), client, mock(TransportService.class), - mock(ScriptService.class), NamedXContentRegistry.EMPTY); + TransportRankEvalAction action = new TransportRankEvalAction( + mock(ActionFilters.class), + client, + mock(TransportService.class), + mock(ScriptService.class), + NamedXContentRegistry.EMPTY + ); action.doExecute(null, rankEvalRequest, null); } } diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java index 0745036b35a..b3dac95125f 100644 --- a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java @@ -302,9 +302,14 @@ public class ReindexDocumentationIT extends OpenSearchIntegTestCase { final int numDocs = randomIntBetween(10, 100); ALLOWED_OPERATIONS.release(numDocs); - indexRandom(true, false, true, IntStream.range(0, numDocs) - .mapToObj(i -> client().prepareIndex(INDEX_NAME, "_doc", Integer.toString(i)).setSource("n", Integer.toString(i))) - .collect(Collectors.toList())); + indexRandom( + true, + false, + true, + IntStream.range(0, numDocs) + .mapToObj(i -> client().prepareIndex(INDEX_NAME, "_doc", Integer.toString(i)).setSource("n", Integer.toString(i))) + .collect(Collectors.toList()) + ); // Checks that the all documents have been indexed and correctly counted assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0).get(), numDocs); @@ -323,12 +328,10 @@ public class ReindexDocumentationIT extends OpenSearchIntegTestCase { builder.execute(); // 10 seconds is usually fine but on heavily loaded machines this can take a while - assertBusy( - () -> { - assertTrue("Expected some queued threads", ALLOWED_OPERATIONS.hasQueuedThreads()); - assertEquals("Expected that no permits are available", 0, ALLOWED_OPERATIONS.availablePermits()); - }, - 1, TimeUnit.MINUTES); + assertBusy(() -> { + assertTrue("Expected some queued threads", ALLOWED_OPERATIONS.hasQueuedThreads()); + assertEquals("Expected that no permits are available", 0, ALLOWED_OPERATIONS.availablePermits()); + }, 1, TimeUnit.MINUTES); return builder; } diff --git a/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ManyDocumentsIT.java b/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ManyDocumentsIT.java index 5f064d1395b..49205a725ba 100644 --- a/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ManyDocumentsIT.java +++ b/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ManyDocumentsIT.java @@ -66,14 +66,15 @@ public class ManyDocumentsIT extends OpenSearchRestTestCase { public void testReindex() throws IOException { Request request = new Request("POST", "/_reindex"); request.setJsonEntity( - "{\n" + - " \"source\":{\n" + - " \"index\":\"test\"\n" + - " },\n" + - " \"dest\":{\n" + - " \"index\":\"des\"\n" + - " }\n" + - "}"); + "{\n" + + " \"source\":{\n" + + " \"index\":\"test\"\n" + + " },\n" + + " \"dest\":{\n" + + " \"index\":\"des\"\n" + + " }\n" + + "}" + ); Map response = entityAsMap(client().performRequest(request)); assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("created", count)); @@ -84,43 +85,48 @@ public class ManyDocumentsIT extends OpenSearchRestTestCase { nodesInfo = (Map) nodesInfo.get("nodes"); Map nodeInfo = (Map) nodesInfo.values().iterator().next(); Map http = (Map) nodeInfo.get("http"); - String remote = "http://"+ http.get("publish_address"); + String remote = "http://" + http.get("publish_address"); Request request = new Request("POST", "/_reindex"); if (randomBoolean()) { request.setJsonEntity( - "{\n" + - " \"source\":{\n" + - " \"index\":\"test\",\n" + - " \"remote\":{\n" + - " \"host\":\"" + remote + "\"\n" + - " }\n" + - " }\n," + - " \"dest\":{\n" + - " \"index\":\"des\"\n" + - " }\n" + - "}"); + "{\n" + + " \"source\":{\n" + + " \"index\":\"test\",\n" + + " \"remote\":{\n" + + " \"host\":\"" + + remote + + "\"\n" + + " }\n" + + " }\n," + + " \"dest\":{\n" + + " \"index\":\"des\"\n" + + " }\n" + + "}" + ); } else { // Test with external version_type request.setJsonEntity( - "{\n" + - " \"source\":{\n" + - " \"index\":\"test\",\n" + - " \"remote\":{\n" + - " \"host\":\"" + remote + "\"\n" + - " }\n" + - " }\n," + - " \"dest\":{\n" + - " \"index\":\"des\",\n" + - " \"version_type\": \"external\"\n" + - " }\n" + - "}"); + "{\n" + + " \"source\":{\n" + + " \"index\":\"test\",\n" + + " \"remote\":{\n" + + " \"host\":\"" + + remote + + "\"\n" + + " }\n" + + " }\n," + + " \"dest\":{\n" + + " \"index\":\"des\",\n" + + " \"version_type\": \"external\"\n" + + " }\n" + + "}" + ); } Map response = entityAsMap(client().performRequest(request)); assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("created", count)); } - public void testUpdateByQuery() throws IOException { Map response = entityAsMap(client().performRequest(new Request("POST", "/test/_update_by_query"))); assertThat(response, hasEntry("total", count)); @@ -129,12 +135,7 @@ public class ManyDocumentsIT extends OpenSearchRestTestCase { public void testDeleteByQuery() throws IOException { Request request = new Request("POST", "/test/_delete_by_query"); - request.setJsonEntity( - "{\n" + - " \"query\":{\n" + - " \"match_all\": {}\n" + - " }\n" + - "}"); + request.setJsonEntity("{\n" + " \"query\":{\n" + " \"match_all\": {}\n" + " }\n" + "}"); Map response = entityAsMap(client().performRequest(request)); assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("deleted", count)); diff --git a/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ReindexWithoutContentIT.java b/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ReindexWithoutContentIT.java index 9d296ab5678..da6de97452d 100644 --- a/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ReindexWithoutContentIT.java +++ b/modules/reindex/src/javaRestTest/java/org/opensearch/index/reindex/ReindexWithoutContentIT.java @@ -43,8 +43,10 @@ import static org.hamcrest.CoreMatchers.containsString; public class ReindexWithoutContentIT extends OpenSearchRestTestCase { public void testReindexMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - new Request("POST", "/_reindex"))); + ResponseException responseException = expectThrows( + ResponseException.class, + () -> client().performRequest(new Request("POST", "/_reindex")) + ); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); assertThat(responseException.getMessage(), containsString("request body is required")); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 9b031e98700..07d67290d8f 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -62,13 +62,7 @@ import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper; -import org.opensearch.index.reindex.AbstractBulkByScrollRequest; -import org.opensearch.index.reindex.BulkByScrollResponse; -import org.opensearch.index.reindex.BulkByScrollTask; -import org.opensearch.index.reindex.ClientScrollableHitSource; -import org.opensearch.index.reindex.ScrollableHitSource; import org.opensearch.index.reindex.ScrollableHitSource.SearchFailure; -import org.opensearch.index.reindex.WorkerBulkByScrollTaskState; import org.opensearch.script.Script; import org.opensearch.script.ScriptService; import org.opensearch.script.UpdateScript; @@ -105,7 +99,8 @@ import static org.opensearch.search.sort.SortBuilders.fieldSort; * Abstract base for scrolling across a search and executing bulk actions on all results. All package private methods are package private so * their tests can use them. Most methods run in the listener thread pool because the are meant to be fast and don't expect to block. */ -public abstract class AbstractAsyncBulkByScrollAction, +public abstract class AbstractAsyncBulkByScrollAction< + Request extends AbstractBulkByScrollRequest, Action extends TransportAction> { protected final Logger logger; @@ -138,19 +133,46 @@ public abstract class AbstractAsyncBulkByScrollAction, ScrollableHitSource.Hit, RequestWrapper> scriptApplier; private int lastBatchSize; - AbstractAsyncBulkByScrollAction(BulkByScrollTask task, boolean needsSourceDocumentVersions, - boolean needsSourceDocumentSeqNoAndPrimaryTerm, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, ActionListener listener, - @Nullable ScriptService scriptService, @Nullable ReindexSslConfig sslConfig) { - this(task, needsSourceDocumentVersions, needsSourceDocumentSeqNoAndPrimaryTerm, logger, client, - threadPool, mainRequest, listener, scriptService, sslConfig, Optional.empty()); + AbstractAsyncBulkByScrollAction( + BulkByScrollTask task, + boolean needsSourceDocumentVersions, + boolean needsSourceDocumentSeqNoAndPrimaryTerm, + Logger logger, + ParentTaskAssigningClient client, + ThreadPool threadPool, + Request mainRequest, + ActionListener listener, + @Nullable ScriptService scriptService, + @Nullable ReindexSslConfig sslConfig + ) { + this( + task, + needsSourceDocumentVersions, + needsSourceDocumentSeqNoAndPrimaryTerm, + logger, + client, + threadPool, + mainRequest, + listener, + scriptService, + sslConfig, + Optional.empty() + ); } - AbstractAsyncBulkByScrollAction(BulkByScrollTask task, boolean needsSourceDocumentVersions, - boolean needsSourceDocumentSeqNoAndPrimaryTerm, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, ActionListener listener, - @Nullable ScriptService scriptService, @Nullable ReindexSslConfig sslConfig, - Optional interceptor) { + AbstractAsyncBulkByScrollAction( + BulkByScrollTask task, + boolean needsSourceDocumentVersions, + boolean needsSourceDocumentSeqNoAndPrimaryTerm, + Logger logger, + ParentTaskAssigningClient client, + ThreadPool threadPool, + Request mainRequest, + ActionListener listener, + @Nullable ScriptService scriptService, + @Nullable ReindexSslConfig sslConfig, + Optional interceptor + ) { this.task = task; this.scriptService = scriptService; this.sslConfig = sslConfig; @@ -246,16 +268,27 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, - List searchFailures, boolean timedOut) { + protected BulkByScrollResponse buildResponse( + TimeValue took, + List indexingFailures, + List searchFailures, + boolean timedOut + ) { return new BulkByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); } @@ -298,10 +331,9 @@ public abstract class AbstractAsyncBulkByScrollAction 0) - // Timeouts aren't shard failures but we still need to pass them back to the user. - || response.isTimedOut() - ) { + (response.getFailures().size() > 0) + // Timeouts aren't shard failures but we still need to pass them back to the user. + || response.isTimedOut()) { refreshAndFinish(emptyList(), response.getFailures(), response.isTimedOut()); return; } @@ -373,8 +405,12 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, - List searchFailures, boolean timedOut) { + protected void finishHim(Exception failure, List indexingFailures, List searchFailures, boolean timedOut) { logger.debug("[{}]: finishing without any catastrophic failures", task.getId()); scrollSource.close(() -> { if (failure == null) { BulkByScrollResponse response = buildResponse( - timeValueNanos(System.nanoTime() - startTime.get()), - indexingFailures, searchFailures, timedOut); + timeValueNanos(System.nanoTime() - startTime.get()), + indexingFailures, + searchFailures, + timedOut + ); listener.onResponse(response); } else { listener.onFailure(failure); @@ -772,10 +810,12 @@ public abstract class AbstractAsyncBulkByScrollAction params; - public ScriptApplier(WorkerBulkByScrollTaskState taskWorker, - ScriptService scriptService, - Script script, - Map params) { + public ScriptApplier( + WorkerBulkByScrollTaskState taskWorker, + ScriptService scriptService, + Script script, + Map params + ) { this.taskWorker = taskWorker; this.scriptService = scriptService; this.script = script; @@ -855,17 +895,17 @@ public abstract class AbstractAsyncBulkByScrollAction scriptChangedOpType(RequestWrapper request, OpType oldOpType, OpType newOpType) { switch (newOpType) { - case NOOP: - taskWorker.countNoop(); - return null; - case DELETE: - RequestWrapper delete = wrap(new DeleteRequest(request.getIndex(), request.getType(), request.getId())); - delete.setVersion(request.getVersion()); - delete.setVersionType(VersionType.INTERNAL); - delete.setRouting(request.getRouting()); - return delete; - default: - throw new IllegalArgumentException("Unsupported operation type change from [" + oldOpType + "] to [" + newOpType + "]"); + case NOOP: + taskWorker.countNoop(); + return null; + case DELETE: + RequestWrapper delete = wrap(new DeleteRequest(request.getIndex(), request.getType(), request.getId())); + delete.setVersion(request.getVersion()); + delete.setVersionType(VersionType.INTERNAL); + delete.setRouting(request.getRouting()); + return delete; + default: + throw new IllegalArgumentException("Unsupported operation type change from [" + oldOpType + "] to [" + newOpType + "]"); } } @@ -903,8 +943,9 @@ public abstract class AbstractAsyncBulkByScrollAction, - A extends ActionType - > extends BaseRestHandler { + Request extends AbstractBulkByScrollRequest, + A extends ActionType> extends BaseRestHandler { private final A action; @@ -60,8 +59,8 @@ public abstract class AbstractBaseReindexRestHandler< this.action = action; } - protected RestChannelConsumer doPrepareRequest(RestRequest request, NodeClient client, - boolean includeCreated, boolean includeUpdated) throws IOException { + protected RestChannelConsumer doPrepareRequest(RestRequest request, NodeClient client, boolean includeCreated, boolean includeUpdated) + throws IOException { // Build the internal request Request internal = setCommonOptions(request, buildRequest(request, client.getNamedWriteableRegistry())); @@ -151,12 +150,15 @@ public abstract class AbstractBaseReindexRestHandler< slices = Integer.parseInt(slicesString); } catch (NumberFormatException e) { throw new IllegalArgumentException( - "[slices] must be a positive integer or the string \"auto\", but was [" + slicesString + "]", e); + "[slices] must be a positive integer or the string \"auto\", but was [" + slicesString + "]", + e + ); } if (slices < 1) { throw new IllegalArgumentException( - "[slices] must be a positive integer or the string \"auto\", but was [" + slicesString + "]"); + "[slices] must be a positive integer or the string \"auto\", but was [" + slicesString + "]" + ); } return slices; @@ -174,24 +176,23 @@ public abstract class AbstractBaseReindexRestHandler< try { requestsPerSecond = Float.parseFloat(requestsPerSecondString); } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use -1 to disable throttling.", e); + throw new IllegalArgumentException("[requests_per_second] must be a float greater than 0. Use -1 to disable throttling.", e); } if (requestsPerSecond == -1) { return Float.POSITIVE_INFINITY; } if (requestsPerSecond <= 0) { // We validate here and in the setters because the setters use "Float.POSITIVE_INFINITY" instead of -1 - throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use -1 to disable throttling."); + throw new IllegalArgumentException("[requests_per_second] must be a float greater than 0. Use -1 to disable throttling."); } return requestsPerSecond; } static void setMaxDocsValidateIdentical(AbstractBulkByScrollRequest request, int maxDocs) { if (request.getMaxDocs() != AbstractBulkByScrollRequest.MAX_DOCS_ALL_MATCHES && request.getMaxDocs() != maxDocs) { - throw new IllegalArgumentException("[max_docs] set to two different values [" + request.getMaxDocs() + "]" + - " and [" + maxDocs + "]"); + throw new IllegalArgumentException( + "[max_docs] set to two different values [" + request.getMaxDocs() + "]" + " and [" + maxDocs + "]" + ); } else { request.setMaxDocs(maxDocs); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java index c7951978786..512e7f430b7 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -51,15 +51,19 @@ import java.util.function.Consumer; * Rest handler for reindex actions that accepts a search request like Update-By-Query or Delete-By-Query */ public abstract class AbstractBulkByQueryRestHandler< - Request extends AbstractBulkByScrollRequest, - A extends ActionType> extends AbstractBaseReindexRestHandler { + Request extends AbstractBulkByScrollRequest, + A extends ActionType> extends AbstractBaseReindexRestHandler { protected AbstractBulkByQueryRestHandler(A action) { super(action); } - protected void parseInternalRequest(Request internal, RestRequest restRequest, NamedWriteableRegistry namedWriteableRegistry, - Map> bodyConsumers) throws IOException { + protected void parseInternalRequest( + Request internal, + RestRequest restRequest, + NamedWriteableRegistry namedWriteableRegistry, + Map> bodyConsumers + ) throws IOException { assert internal != null : "Request should not be null"; assert restRequest != null : "RestRequest should not be null"; @@ -67,7 +71,12 @@ public abstract class AbstractBulkByQueryRestHandler< try (XContentParser parser = extractRequestSpecificFields(restRequest, bodyConsumers)) { RestSearchAction.parseSearchRequest( - searchRequest, restRequest, parser, namedWriteableRegistry, size -> setMaxDocsFromSearchSize(internal, size)); + searchRequest, + restRequest, + parser, + namedWriteableRegistry, + size -> setMaxDocsFromSearchSize(internal, size) + ); } searchRequest.source().size(restRequest.paramAsInt("scroll_size", searchRequest.source().size())); @@ -90,13 +99,15 @@ public abstract class AbstractBulkByQueryRestHandler< * should get better when SearchRequest has full ObjectParser support * then we can delegate and stuff. */ - private XContentParser extractRequestSpecificFields(RestRequest restRequest, - Map> bodyConsumers) throws IOException { + private XContentParser extractRequestSpecificFields(RestRequest restRequest, Map> bodyConsumers) + throws IOException { if (restRequest.hasContentOrSourceParam() == false) { return null; // body is optional } - try (XContentParser parser = restRequest.contentOrSourceParamParser(); - XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType())) { + try ( + XContentParser parser = restRequest.contentOrSourceParamParser(); + XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()) + ) { Map body = parser.map(); for (Map.Entry> consumer : bodyConsumers.entrySet()) { @@ -105,8 +116,13 @@ public abstract class AbstractBulkByQueryRestHandler< consumer.getValue().accept(value); } } - return parser.contentType().xContent().createParser(parser.getXContentRegistry(), - parser.getDeprecationHandler(), BytesReference.bytes(builder.map(body)).streamInput()); + return parser.contentType() + .xContent() + .createParser( + parser.getXContentRegistry(), + parser.getDeprecationHandler(), + BytesReference.bytes(builder.map(body)).streamInput() + ); } } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java index d10b2a38f55..ac1a7c22a4d 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AsyncDeleteByQueryAction.java @@ -44,9 +44,15 @@ import org.opensearch.threadpool.ThreadPool; */ public class AsyncDeleteByQueryAction extends AbstractAsyncBulkByScrollAction { - public AsyncDeleteByQueryAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, DeleteByQueryRequest request, ScriptService scriptService, - ActionListener listener) { + public AsyncDeleteByQueryAction( + BulkByScrollTask task, + Logger logger, + ParentTaskAssigningClient client, + ThreadPool threadPool, + DeleteByQueryRequest request, + ScriptService scriptService, + ActionListener listener + ) { super(task, false, true, logger, client, threadPool, request, listener, scriptService, null); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java index 5c14bd2872c..334390ca730 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelper.java @@ -80,7 +80,8 @@ class BulkByScrollParallelizationHelper { ActionListener listener, Client client, DiscoveryNode node, - Runnable workerAction) { + Runnable workerAction + ) { initTaskState(task, request, client, new ActionListener() { @Override public void onResponse(Void aVoid) { @@ -111,7 +112,8 @@ class BulkByScrollParallelizationHelper { ActionListener listener, Client client, DiscoveryNode node, - Runnable workerAction) { + Runnable workerAction + ) { if (task.isLeader()) { sendSubRequests(client, action, node.getId(), task, request, listener); } else if (task.isWorker()) { @@ -133,7 +135,8 @@ class BulkByScrollParallelizationHelper { BulkByScrollTask task, Request request, Client client, - ActionListener listener) { + ActionListener listener + ) { int configuredSlices = request.getSlices(); if (configuredSlices == AbstractBulkByScrollRequest.AUTO_SLICES) { ClusterSearchShardsRequest shardsRequest = new ClusterSearchShardsRequest(); @@ -159,7 +162,8 @@ class BulkByScrollParallelizationHelper { private static > void setWorkerCount( Request request, BulkByScrollTask task, - int slices) { + int slices + ) { if (slices > 1) { task.setWorkerCount(slices); } else { @@ -170,23 +174,21 @@ class BulkByScrollParallelizationHelper { } private static int countSlicesBasedOnShards(ClusterSearchShardsResponse response) { - Map countsByIndex = Arrays.stream(response.getGroups()).collect(Collectors.toMap( - group -> group.getShardId().getIndex(), - group -> 1, - (sum, term) -> sum + term - )); + Map countsByIndex = Arrays.stream(response.getGroups()) + .collect(Collectors.toMap(group -> group.getShardId().getIndex(), group -> 1, (sum, term) -> sum + term)); Set counts = new HashSet<>(countsByIndex.values()); int leastShards = counts.isEmpty() ? 1 : Collections.min(counts); return Math.min(leastShards, AUTO_SLICE_CEILING); } private static > void sendSubRequests( - Client client, - ActionType action, - String localNodeId, - BulkByScrollTask task, - Request request, - ActionListener listener) { + Client client, + ActionType action, + String localNodeId, + BulkByScrollTask task, + Request request, + ActionListener listener + ) { LeaderBulkByScrollTaskState worker = task.getLeaderState(); int totalSlices = worker.getSlices(); @@ -195,8 +197,9 @@ class BulkByScrollParallelizationHelper { // TODO move the request to the correct node. maybe here or somehow do it as part of startup for reindex in general.... Request requestForSlice = request.forSlice(parentTaskId, slice, totalSlices); ActionListener sliceListener = ActionListener.wrap( - r -> worker.onSliceResponse(listener, slice.source().slice().getId(), r), - e -> worker.onSliceFailure(listener, slice.source().slice().getId(), e)); + r -> worker.onSliceResponse(listener, slice.source().slice().getId(), r), + e -> worker.onSliceFailure(listener, slice.source().slice().getId(), e) + ); client.execute(action, requestForSlice, sliceListener); } } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkIndexByScrollResponseContentListener.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkIndexByScrollResponseContentListener.java index 769c6d0e553..a41eb12cdd1 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkIndexByScrollResponseContentListener.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/BulkIndexByScrollResponseContentListener.java @@ -78,7 +78,7 @@ public class BulkIndexByScrollResponseContentListener extends RestBuilderListene status = failure.getStatus(); } } - for (SearchFailure failure: response.getSearchFailures()) { + for (SearchFailure failure : response.getSearchFailures()) { RestStatus failureStatus = failure.getStatus(); if (failureStatus.getStatus() > status.getStatus()) { status = failureStatus; diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java index 5530ce096f0..04619efb43c 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java @@ -79,36 +79,53 @@ public class ReindexPlugin extends Plugin implements ActionPlugin, ExtensiblePlu @Override public List> getActions() { - return Arrays.asList(new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class), - new ActionHandler<>(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class), - new ActionHandler<>(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class), - new ActionHandler<>(RethrottleAction.INSTANCE, TransportRethrottleAction.class)); + return Arrays.asList( + new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class), + new ActionHandler<>(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class), + new ActionHandler<>(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class), + new ActionHandler<>(RethrottleAction.INSTANCE, TransportRethrottleAction.class) + ); } @Override public List getNamedWriteables() { return singletonList( - new NamedWriteableRegistry.Entry(Task.Status.class, BulkByScrollTask.Status.NAME, BulkByScrollTask.Status::new)); + new NamedWriteableRegistry.Entry(Task.Status.class, BulkByScrollTask.Status.NAME, BulkByScrollTask.Status::new) + ); } @Override - public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster) { + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { return Arrays.asList( - new RestReindexAction(), - new RestUpdateByQueryAction(), - new RestDeleteByQueryAction(), - new RestRethrottleAction(nodesInCluster)); + new RestReindexAction(), + new RestUpdateByQueryAction(), + new RestDeleteByQueryAction(), + new RestRethrottleAction(nodesInCluster) + ); } @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier) { + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver expressionResolver, + Supplier repositoriesServiceSupplier + ) { return Collections.singletonList(new ReindexSslConfig(environment.settings(), environment, resourceWatcherService)); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java index 983937d5457..f48422d41ea 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java @@ -74,7 +74,9 @@ class ReindexSslConfig { static { Setting.Property[] defaultProperties = new Setting.Property[] { Setting.Property.NodeScope, Setting.Property.Filtered }; - Setting.Property[] deprecatedProperties = new Setting.Property[] { Setting.Property.Deprecated, Setting.Property.NodeScope, + Setting.Property[] deprecatedProperties = new Setting.Property[] { + Setting.Property.Deprecated, + Setting.Property.NodeScope, Setting.Property.Filtered }; for (String key : SslConfigurationKeys.getStringKeys()) { String settingName = "reindex.ssl." + key; diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java index d5771d5c7ba..06b84096513 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java @@ -56,16 +56,20 @@ import java.util.List; class ReindexValidator { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ReindexValidator.class); - static final String SORT_DEPRECATED_MESSAGE = "The sort option in reindex is deprecated. " + - "Instead consider using query filtering to find the desired subset of data."; + static final String SORT_DEPRECATED_MESSAGE = "The sort option in reindex is deprecated. " + + "Instead consider using query filtering to find the desired subset of data."; private final CharacterRunAutomaton remoteWhitelist; private final ClusterService clusterService; private final IndexNameExpressionResolver resolver; private final AutoCreateIndex autoCreateIndex; - ReindexValidator(Settings settings, ClusterService clusterService, IndexNameExpressionResolver resolver, - AutoCreateIndex autoCreateIndex) { + ReindexValidator( + Settings settings, + ClusterService clusterService, + IndexNameExpressionResolver resolver, + AutoCreateIndex autoCreateIndex + ) { this.remoteWhitelist = buildRemoteWhitelist(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings)); this.clusterService = clusterService; this.resolver = resolver; @@ -75,8 +79,14 @@ class ReindexValidator { void initialValidation(ReindexRequest request) { checkRemoteWhitelist(remoteWhitelist, request.getRemoteInfo()); ClusterState state = clusterService.state(); - validateAgainstAliases(request.getSearchRequest(), request.getDestination(), request.getRemoteInfo(), resolver, autoCreateIndex, - state); + validateAgainstAliases( + request.getSearchRequest(), + request.getDestination(), + request.getRemoteInfo(), + resolver, + autoCreateIndex, + state + ); SearchSourceBuilder searchSource = request.getSearchRequest().source(); if (searchSource != null && searchSource.sorts() != null && searchSource.sorts().isEmpty() == false) { deprecationLogger.deprecate("reindex_sort", SORT_DEPRECATED_MESSAGE); @@ -106,9 +116,13 @@ class ReindexValidator { Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES); if (Operations.isTotal(automaton)) { - throw new IllegalArgumentException("Refusing to start because whitelist " + whitelist + " accepts all addresses. " - + "This would allow users to reindex-from-remote any URL they like effectively having OpenSearch make HTTP GETs " - + "for them."); + throw new IllegalArgumentException( + "Refusing to start because whitelist " + + whitelist + + " accepts all addresses. " + + "This would allow users to reindex-from-remote any URL they like effectively having OpenSearch make HTTP GETs " + + "for them." + ); } return new CharacterRunAutomaton(automaton); } @@ -119,20 +133,23 @@ class ReindexValidator { * This cannot be done during request validation because the cluster state * isn't available then. Package private for testing. */ - static void validateAgainstAliases(SearchRequest source, IndexRequest destination, RemoteInfo remoteInfo, - IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, - ClusterState clusterState) { + static void validateAgainstAliases( + SearchRequest source, + IndexRequest destination, + RemoteInfo remoteInfo, + IndexNameExpressionResolver indexNameExpressionResolver, + AutoCreateIndex autoCreateIndex, + ClusterState clusterState + ) { if (remoteInfo != null) { return; } String target = destination.index(); if (destination.isRequireAlias() && (false == clusterState.getMetadata().hasAlias(target))) { - throw new IndexNotFoundException("[" - + DocWriteRequest.REQUIRE_ALIAS - + "] request flag is [true] and [" - + target - + "] is not an alias", - target); + throw new IndexNotFoundException( + "[" + DocWriteRequest.REQUIRE_ALIAS + "] request flag is [true] and [" + target + "] is not an alias", + target + ); } if (false == autoCreateIndex.shouldAutoCreate(target, clusterState)) { /* diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java index 8e4b062285e..0037e1d06a1 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java @@ -96,13 +96,24 @@ public class Reindexer { private final ReindexSslConfig reindexSslConfig; private final Optional remoteExtension; - Reindexer(ClusterService clusterService, Client client, ThreadPool threadPool, ScriptService scriptService, - ReindexSslConfig reindexSslConfig) { + Reindexer( + ClusterService clusterService, + Client client, + ThreadPool threadPool, + ScriptService scriptService, + ReindexSslConfig reindexSslConfig + ) { this(clusterService, client, threadPool, scriptService, reindexSslConfig, Optional.empty()); } - Reindexer(ClusterService clusterService, Client client, ThreadPool threadPool, ScriptService scriptService, - ReindexSslConfig reindexSslConfig, Optional remoteExtension) { + Reindexer( + ClusterService clusterService, + Client client, + ThreadPool threadPool, + ScriptService scriptService, + ReindexSslConfig reindexSslConfig, + Optional remoteExtension + ) { this.clusterService = clusterService; this.client = client; this.threadPool = threadPool; @@ -117,14 +128,29 @@ public class Reindexer { public void execute(BulkByScrollTask task, ReindexRequest request, ActionListener listener) { ActionListener remoteReindexActionListener = getRemoteReindexWrapperListener(listener, request); - BulkByScrollParallelizationHelper.executeSlicedAction(task, request, ReindexAction.INSTANCE, listener, client, + BulkByScrollParallelizationHelper.executeSlicedAction( + task, + request, + ReindexAction.INSTANCE, + listener, + client, clusterService.localNode(), () -> { ParentTaskAssigningClient assigningClient = new ParentTaskAssigningClient(client, clusterService.localNode(), task); - AsyncIndexBySearchAction searchAction = new AsyncIndexBySearchAction(task, logger, assigningClient, threadPool, - scriptService, reindexSslConfig, request, remoteReindexActionListener, getInterceptor(request)); + AsyncIndexBySearchAction searchAction = new AsyncIndexBySearchAction( + task, + logger, + assigningClient, + threadPool, + scriptService, + reindexSslConfig, + request, + remoteReindexActionListener, + getInterceptor(request) + ); searchAction.start(); - }); + } + ); } @@ -132,13 +158,15 @@ public class Reindexer { if (request.getRemoteInfo() == null) { return Optional.empty(); } else { - return remoteExtension.map(x -> x.getInterceptorProvider()).flatMap(provider -> - provider.getRestInterceptor(request, threadPool.getThreadContext())); + return remoteExtension.map(x -> x.getInterceptorProvider()) + .flatMap(provider -> provider.getRestInterceptor(request, threadPool.getThreadContext())); } } private ActionListener getRemoteReindexWrapperListener( - ActionListener listener, ReindexRequest reindexRequest) { + ActionListener listener, + ReindexRequest reindexRequest + ) { if (reindexRequest.getRemoteInfo() == null) { return listener; } @@ -162,45 +190,47 @@ public class Reindexer { * @param threadCollector a list in which we collect all the threads created by the client * @param restInterceptor an optional HttpRequestInterceptor */ - static RestClient buildRestClient(RemoteInfo remoteInfo, ReindexSslConfig sslConfig, long taskId, List threadCollector, - Optional restInterceptor) { + static RestClient buildRestClient( + RemoteInfo remoteInfo, + ReindexSslConfig sslConfig, + long taskId, + List threadCollector, + Optional restInterceptor + ) { Header[] clientHeaders = new Header[remoteInfo.getHeaders().size()]; int i = 0; for (Map.Entry header : remoteInfo.getHeaders().entrySet()) { clientHeaders[i++] = new BasicHeader(header.getKey(), header.getValue()); } - final RestClientBuilder builder = - RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) - .setDefaultHeaders(clientHeaders) - .setRequestConfigCallback(c -> { - c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis())); - c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis())); - return c; - }) - .setHttpClientConfigCallback(c -> { - // Enable basic auth if it is configured - if (remoteInfo.getUsername() != null) { - UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), - remoteInfo.getPassword()); - CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, creds); - c.setDefaultCredentialsProvider(credentialsProvider); - } else { - restInterceptor.ifPresent(interceptor -> c.addInterceptorLast(interceptor)); - } - // Stick the task id in the thread name so we can track down tasks from stack traces - AtomicInteger threads = new AtomicInteger(); - c.setThreadFactory(r -> { - String name = "es-client-" + taskId + "-" + threads.getAndIncrement(); - Thread t = new Thread(r, name); - threadCollector.add(t); - return t; - }); - // Limit ourselves to one reactor thread because for now the search process is single threaded. - c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); - c.setSSLStrategy(sslConfig.getStrategy()); - return c; - }); + final RestClientBuilder builder = RestClient.builder( + new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme()) + ).setDefaultHeaders(clientHeaders).setRequestConfigCallback(c -> { + c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis())); + c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis())); + return c; + }).setHttpClientConfigCallback(c -> { + // Enable basic auth if it is configured + if (remoteInfo.getUsername() != null) { + UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), remoteInfo.getPassword()); + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, creds); + c.setDefaultCredentialsProvider(credentialsProvider); + } else { + restInterceptor.ifPresent(interceptor -> c.addInterceptorLast(interceptor)); + } + // Stick the task id in the thread name so we can track down tasks from stack traces + AtomicInteger threads = new AtomicInteger(); + c.setThreadFactory(r -> { + String name = "es-client-" + taskId + "-" + threads.getAndIncrement(); + Thread t = new Thread(r, name); + threadCollector.add(t); + return t; + }); + // Limit ourselves to one reactor thread because for now the search process is single threaded. + c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); + c.setSSLStrategy(sslConfig.getStrategy()); + return c; + }); if (Strings.hasLength(remoteInfo.getPathPrefix()) && "/".equals(remoteInfo.getPathPrefix()) == false) { builder.setPathPrefix(remoteInfo.getPathPrefix()); } @@ -223,22 +253,47 @@ public class Reindexer { */ private List createdThreads = emptyList(); - AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, ScriptService scriptService, ReindexSslConfig sslConfig, ReindexRequest request, - ActionListener listener) { + AsyncIndexBySearchAction( + BulkByScrollTask task, + Logger logger, + ParentTaskAssigningClient client, + ThreadPool threadPool, + ScriptService scriptService, + ReindexSslConfig sslConfig, + ReindexRequest request, + ActionListener listener + ) { this(task, logger, client, threadPool, scriptService, sslConfig, request, listener, Optional.empty()); } - AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, ScriptService scriptService, ReindexSslConfig sslConfig, ReindexRequest request, - ActionListener listener, Optional interceptor) { - super(task, + AsyncIndexBySearchAction( + BulkByScrollTask task, + Logger logger, + ParentTaskAssigningClient client, + ThreadPool threadPool, + ScriptService scriptService, + ReindexSslConfig sslConfig, + ReindexRequest request, + ActionListener listener, + Optional interceptor + ) { + super( + task, /* * We only need the source version if we're going to use it when write and we only do that when the destination request uses * external versioning. */ request.getDestination().versionType() != VersionType.INTERNAL, - false, logger, client, threadPool, request, listener, scriptService, sslConfig, interceptor); + false, + logger, + client, + threadPool, + request, + listener, + scriptService, + sslConfig, + interceptor + ); } @Override @@ -247,23 +302,34 @@ public class Reindexer { RemoteInfo remoteInfo = mainRequest.getRemoteInfo(); createdThreads = synchronizedList(new ArrayList<>()); assert sslConfig != null : "Reindex ssl config must be set"; - RestClient restClient = buildRestClient(remoteInfo, sslConfig, task.getId(), createdThreads, - this.interceptor); - return new RemoteScrollableHitSource(logger, backoffPolicy, threadPool, worker::countSearchRetry, - this::onScrollResponse, this::finishHim, - restClient, remoteInfo.getQuery(), mainRequest.getSearchRequest()); + RestClient restClient = buildRestClient(remoteInfo, sslConfig, task.getId(), createdThreads, this.interceptor); + return new RemoteScrollableHitSource( + logger, + backoffPolicy, + threadPool, + worker::countSearchRetry, + this::onScrollResponse, + this::finishHim, + restClient, + remoteInfo.getQuery(), + mainRequest.getSearchRequest() + ); } return super.buildScrollableResultSource(backoffPolicy); } @Override - protected void finishHim(Exception failure, List indexingFailures, - List searchFailures, boolean timedOut) { + protected void finishHim( + Exception failure, + List indexingFailures, + List searchFailures, + boolean timedOut + ) { super.finishHim(failure, indexingFailures, searchFailures, timedOut); // A little extra paranoia so we log something if we leave any threads running for (Thread thread : createdThreads) { if (thread.isAlive()) { - assert false: "Failed to properly stop client thread [" + thread.getName() + "]"; + assert false : "Failed to properly stop client thread [" + thread.getName() + "]"; logger.error("Failed to properly stop client thread [{}]", thread.getName()); } } @@ -313,16 +379,20 @@ public class Reindexer { final XContentType mainRequestXContentType = mainRequest.getDestination().getContentType(); if (mainRequestXContentType != null && doc.getXContentType() != mainRequestXContentType) { // we need to convert - try (InputStream stream = doc.getSource().streamInput(); - XContentParser parser = sourceXContentType.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream); - XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent())) { + try ( + InputStream stream = doc.getSource().streamInput(); + XContentParser parser = sourceXContentType.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream); + XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent()) + ) { parser.nextToken(); builder.copyCurrentStructure(parser); index.source(BytesReference.bytes(builder), builder.contentType()); } catch (IOException e) { - throw new UncheckedIOException("failed to convert hit from " + sourceXContentType + " to " - + mainRequestXContentType, e); + throw new UncheckedIOException( + "failed to convert hit from " + sourceXContentType + " to " + mainRequestXContentType, + e + ); } } else { index.source(doc.getSource(), doc.getXContentType()); @@ -369,8 +439,12 @@ public class Reindexer { class ReindexScriptApplier extends ScriptApplier { - ReindexScriptApplier(WorkerBulkByScrollTaskState taskWorker, ScriptService scriptService, Script script, - Map params) { + ReindexScriptApplier( + WorkerBulkByScrollTaskState taskWorker, + ScriptService scriptService, + Script script, + Map params + ) { super(taskWorker, scriptService, script, params); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java index 9041014c33d..aea72e694a6 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestDeleteByQueryAction.java @@ -54,9 +54,7 @@ public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler routes() { - return unmodifiableList(asList( - new Route(POST, "/{index}/_delete_by_query"), - new Route(POST, "/{index}/{type}/_delete_by_query"))); + return unmodifiableList(asList(new Route(POST, "/{index}/_delete_by_query"), new Route(POST, "/{index}/{type}/_delete_by_query"))); } @Override diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestReindexAction.java index 9a9cd555c31..9fb30876c3e 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestReindexAction.java @@ -75,8 +75,9 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler routes() { - return unmodifiableList(asList( - new Route(POST, "/_update_by_query/{taskId}/_rethrottle"), - new Route(POST, "/_delete_by_query/{taskId}/_rethrottle"), - new Route(POST, "/_reindex/{taskId}/_rethrottle"))); + return unmodifiableList( + asList( + new Route(POST, "/_update_by_query/{taskId}/_rethrottle"), + new Route(POST, "/_delete_by_query/{taskId}/_rethrottle"), + new Route(POST, "/_reindex/{taskId}/_rethrottle") + ) + ); } @Override @@ -76,7 +79,10 @@ public class RestRethrottleAction extends BaseRestHandler { } internalRequest.setRequestsPerSecond(requestsPerSecond); final String groupBy = request.param("group_by", "nodes"); - return channel -> - client.execute(RethrottleAction.INSTANCE, internalRequest, listTasksResponseListener(nodesInCluster, groupBy, channel)); + return channel -> client.execute( + RethrottleAction.INSTANCE, + internalRequest, + listTasksResponseListener(nodesInCluster, groupBy, channel) + ); } } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java index dc201dbea59..d38cb47fc83 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RestUpdateByQueryAction.java @@ -55,9 +55,7 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler routes() { - return unmodifiableList(asList( - new Route(POST, "/{index}/_update_by_query"), - new Route(POST, "/{index}/{type}/_update_by_query"))); + return unmodifiableList(asList(new Route(POST, "/{index}/_update_by_query"), new Route(POST, "/{index}/{type}/_update_by_query"))); } @Override diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequest.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequest.java index ff8d436f7d3..f174aa6d6fd 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequest.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequest.java @@ -52,8 +52,7 @@ public class RethrottleRequest extends BaseTasksRequest { */ private Float requestsPerSecond; - public RethrottleRequest() { - } + public RethrottleRequest() {} public RethrottleRequest(StreamInput in) throws IOException { super(in); @@ -81,7 +80,8 @@ public class RethrottleRequest extends BaseTasksRequest { public RethrottleRequest setRequestsPerSecond(float requestsPerSecond) { if (requestsPerSecond <= 0) { throw new IllegalArgumentException( - "[requests_per_second] must be greater than 0. Use Float.POSITIVE_INFINITY to disable throttling."); + "[requests_per_second] must be greater than 0. Use Float.POSITIVE_INFINITY to disable throttling." + ); } this.requestsPerSecond = requestsPerSecond; return this; @@ -95,13 +95,15 @@ public class RethrottleRequest extends BaseTasksRequest { } for (String action : getActions()) { switch (action) { - case ReindexAction.NAME: - case UpdateByQueryAction.NAME: - case DeleteByQueryAction.NAME: - continue; - default: - validationException = addValidationError( - "Can only change the throttling on reindex or update-by-query. Not on [" + action + "]", validationException); + case ReindexAction.NAME: + case UpdateByQueryAction.NAME: + case DeleteByQueryAction.NAME: + continue; + default: + validationException = addValidationError( + "Can only change the throttling on reindex or update-by-query. Not on [" + action + "]", + validationException + ); } } return validationException; diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequestBuilder.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequestBuilder.java index 8b9e33ef6f8..e83a37bbb17 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequestBuilder.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/RethrottleRequestBuilder.java @@ -41,8 +41,7 @@ import org.opensearch.client.OpenSearchClient; * Java API support for changing the throttle on reindex tasks while they are running. */ public class RethrottleRequestBuilder extends TasksRequestBuilder { - public RethrottleRequestBuilder(OpenSearchClient client, - ActionType action) { + public RethrottleRequestBuilder(OpenSearchClient client, ActionType action) { super(client, action, new RethrottleRequest()); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportDeleteByQueryAction.java index 520376d9f9c..51e388227eb 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportDeleteByQueryAction.java @@ -45,7 +45,6 @@ import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; - public class TransportDeleteByQueryAction extends HandledTransportAction { private final ThreadPool threadPool; @@ -54,10 +53,20 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); + public TransportDeleteByQueryAction( + ThreadPool threadPool, + ActionFilters actionFilters, + Client client, + TransportService transportService, + ScriptService scriptService, + ClusterService clusterService + ) { + super( + DeleteByQueryAction.NAME, + transportService, + actionFilters, + (Writeable.Reader) DeleteByQueryRequest::new + ); this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; @@ -67,13 +76,21 @@ public class TransportDeleteByQueryAction extends HandledTransportAction listener) { BulkByScrollTask bulkByScrollTask = (BulkByScrollTask) task; - BulkByScrollParallelizationHelper.startSlicedAction(request, bulkByScrollTask, DeleteByQueryAction.INSTANCE, listener, client, + BulkByScrollParallelizationHelper.startSlicedAction( + request, + bulkByScrollTask, + DeleteByQueryAction.INSTANCE, + listener, + client, clusterService.localNode(), () -> { - ParentTaskAssigningClient assigningClient = new ParentTaskAssigningClient(client, clusterService.localNode(), - bulkByScrollTask); - new AsyncDeleteByQueryAction(bulkByScrollTask, logger, assigningClient, threadPool, request, scriptService, - listener).start(); + ParentTaskAssigningClient assigningClient = new ParentTaskAssigningClient( + client, + clusterService.localNode(), + bulkByScrollTask + ); + new AsyncDeleteByQueryAction(bulkByScrollTask, logger, assigningClient, threadPool, request, scriptService, listener) + .start(); } ); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java index fc0c6e30097..a24c2b002b7 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java @@ -56,17 +56,30 @@ import java.util.function.Function; import static java.util.Collections.emptyList; public class TransportReindexAction extends HandledTransportAction { - public static final Setting> REMOTE_CLUSTER_WHITELIST = - Setting.listSetting("reindex.remote.whitelist", emptyList(), Function.identity(), Property.NodeScope); + public static final Setting> REMOTE_CLUSTER_WHITELIST = Setting.listSetting( + "reindex.remote.whitelist", + emptyList(), + Function.identity(), + Property.NodeScope + ); public static Optional remoteExtension = Optional.empty(); private final ReindexValidator reindexValidator; private final Reindexer reindexer; @Inject - public TransportReindexAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, ScriptService scriptService, - AutoCreateIndex autoCreateIndex, Client client, TransportService transportService, ReindexSslConfig sslConfig) { + public TransportReindexAction( + Settings settings, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, + ScriptService scriptService, + AutoCreateIndex autoCreateIndex, + Client client, + TransportService transportService, + ReindexSslConfig sslConfig + ) { super(ReindexAction.NAME, transportService, actionFilters, ReindexRequest::new); this.reindexValidator = new ReindexValidator(settings, clusterService, indexNameExpressionResolver, autoCreateIndex); this.reindexer = new Reindexer(clusterService, client, threadPool, scriptService, sslConfig, remoteExtension); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportRethrottleAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportRethrottleAction.java index 2ec971e82ae..2ee869ce2b4 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportRethrottleAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportRethrottleAction.java @@ -53,10 +53,22 @@ public class TransportRethrottleAction extends TransportTasksAction listener) { + static void rethrottle( + Logger logger, + String localNodeId, + Client client, + BulkByScrollTask task, + float newRequestsPerSecond, + ActionListener listener + ) { if (task.isWorker()) { rethrottleChildTask(logger, localNodeId, task, newRequestsPerSecond, listener); @@ -78,12 +96,19 @@ public class TransportRethrottleAction extends TransportTasksAction listener) { + private static void rethrottleParentTask( + Logger logger, + String localNodeId, + Client client, + BulkByScrollTask task, + float newRequestsPerSecond, + ActionListener listener + ) { final LeaderBulkByScrollTaskState leaderState = task.getLeaderState(); final int runningSubtasks = leaderState.runningSliceSubTasks(); @@ -91,30 +116,36 @@ public class TransportRethrottleAction extends TransportTasksAction { - r.rethrowFailures("Rethrottle"); - listener.onResponse(task.taskInfoGivenSubtaskInfo(localNodeId, r.getTasks())); - }, - listener::onFailure)); + logger.debug("rethrottling children of task [{}] to [{}] requests per second", task.getId(), subRequest.getRequestsPerSecond()); + client.execute(RethrottleAction.INSTANCE, subRequest, ActionListener.wrap(r -> { + r.rethrowFailures("Rethrottle"); + listener.onResponse(task.taskInfoGivenSubtaskInfo(localNodeId, r.getTasks())); + }, listener::onFailure)); } else { logger.debug("children of task [{}] are already finished, nothing to rethrottle", task.getId()); listener.onResponse(task.taskInfo(localNodeId, true)); } } - private static void rethrottleChildTask(Logger logger, String localNodeId, BulkByScrollTask task, float newRequestsPerSecond, - ActionListener listener) { + private static void rethrottleChildTask( + Logger logger, + String localNodeId, + BulkByScrollTask task, + float newRequestsPerSecond, + ActionListener listener + ) { logger.debug("rethrottling local task [{}] to [{}] requests per second", task.getId(), newRequestsPerSecond); task.getWorkerState().rethrottle(newRequestsPerSecond); listener.onResponse(task.taskInfo(localNodeId, true)); } @Override - protected ListTasksResponse newResponse(RethrottleRequest request, List tasks, - List taskOperationFailures, List failedNodeExceptions) { + protected ListTasksResponse newResponse( + RethrottleRequest request, + List tasks, + List taskOperationFailures, + List failedNodeExceptions + ) { return new ListTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java index 2c5f6bf6a37..382a732bc06 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportUpdateByQueryAction.java @@ -65,10 +65,20 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); + public TransportUpdateByQueryAction( + ThreadPool threadPool, + ActionFilters actionFilters, + Client client, + TransportService transportService, + ScriptService scriptService, + ClusterService clusterService + ) { + super( + UpdateByQueryAction.NAME, + transportService, + actionFilters, + (Writeable.Reader) UpdateByQueryRequest::new + ); this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; @@ -78,14 +88,22 @@ public class TransportUpdateByQueryAction extends HandledTransportAction listener) { BulkByScrollTask bulkByScrollTask = (BulkByScrollTask) task; - BulkByScrollParallelizationHelper.startSlicedAction(request, bulkByScrollTask, UpdateByQueryAction.INSTANCE, listener, client, + BulkByScrollParallelizationHelper.startSlicedAction( + request, + bulkByScrollTask, + UpdateByQueryAction.INSTANCE, + listener, + client, clusterService.localNode(), () -> { ClusterState state = clusterService.state(); - ParentTaskAssigningClient assigningClient = new ParentTaskAssigningClient(client, clusterService.localNode(), - bulkByScrollTask); - new AsyncIndexBySearchAction(bulkByScrollTask, logger, assigningClient, threadPool, scriptService, request, state, - listener).start(); + ParentTaskAssigningClient assigningClient = new ParentTaskAssigningClient( + client, + clusterService.localNode(), + bulkByScrollTask + ); + new AsyncIndexBySearchAction(bulkByScrollTask, logger, assigningClient, threadPool, scriptService, request, state, listener) + .start(); } ); } @@ -97,15 +115,30 @@ public class TransportUpdateByQueryAction extends HandledTransportAction listener) { - super(task, + AsyncIndexBySearchAction( + BulkByScrollTask task, + Logger logger, + ParentTaskAssigningClient client, + ThreadPool threadPool, + ScriptService scriptService, + UpdateByQueryRequest request, + ClusterState clusterState, + ActionListener listener + ) { + super( + task, // not all nodes support sequence number powered optimistic concurrency control, we fall back to version clusterState.nodes().getMinNodeVersion().onOrAfter(LegacyESVersion.V_6_7_0) == false, // all nodes support sequence number powered optimistic concurrency control and we can use it clusterState.nodes().getMinNodeVersion().onOrAfter(LegacyESVersion.V_6_7_0), - logger, client, threadPool, request, listener, scriptService, null); + logger, + client, + threadPool, + request, + listener, + scriptService, + null + ); useSeqNoForCAS = clusterState.nodes().getMinNodeVersion().onOrAfter(LegacyESVersion.V_6_7_0); } @@ -133,8 +166,12 @@ public class TransportUpdateByQueryAction extends HandledTransportAction params) { + UpdateByQueryScriptApplier( + WorkerBulkByScrollTaskState taskWorker, + ScriptService scriptService, + Script script, + Map params + ) { super(taskWorker, scriptService, script, params); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java index e03afe11f09..69ec2e8b852 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java @@ -67,11 +67,11 @@ import static org.opensearch.common.unit.TimeValue.timeValueMillis; * because the version constants have been removed. */ final class RemoteRequestBuilders { - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(RemoteRequestBuilders.class); + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(RemoteRequestBuilders.class); static final String DEPRECATED_URL_ENCODED_INDEX_WARNING = - "Specifying index name using URL escaped index names for reindex from remote is deprecated. " + - "Instead specify index name without URL escaping."; + "Specifying index name using URL escaped index names for reindex from remote is deprecated. " + + "Instead specify index name without URL escaping."; private RemoteRequestBuilders() {} @@ -152,12 +152,14 @@ final class RemoteRequestBuilders { } // EMPTY is safe here because we're not calling namedObject - try (XContentBuilder entity = JsonXContent.contentBuilder(); - XContentParser queryParser = XContentHelper - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, query)) { + try ( + XContentBuilder entity = JsonXContent.contentBuilder(); + XContentParser queryParser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, query) + ) { entity.startObject(); - entity.field("query"); { + entity.field("query"); + { /* We're intentionally a bit paranoid here - copying the query * as xcontent rather than writing a raw field. We don't want * poorly written queries to escape. Ever. */ @@ -165,7 +167,8 @@ final class RemoteRequestBuilders { XContentParser.Token shouldBeEof = queryParser.nextToken(); if (shouldBeEof != null) { throw new OpenSearchException( - "query was more than a single object. This first token after the object is [" + shouldBeEof + "]"); + "query was more than a single object. This first token after the object is [" + shouldBeEof + "]" + ); } } @@ -253,9 +256,7 @@ final class RemoteRequestBuilders { } try (XContentBuilder entity = JsonXContent.contentBuilder()) { - entity.startObject() - .field("scroll_id", scroll) - .endObject(); + entity.startObject().field("scroll_id", scroll).endObject(); request.setJsonEntity(Strings.toString(entity)); } catch (IOException e) { throw new OpenSearchException("failed to build scroll entity", e); @@ -272,9 +273,7 @@ final class RemoteRequestBuilders { return request; } try (XContentBuilder entity = JsonXContent.contentBuilder()) { - entity.startObject() - .array("scroll_id", scroll) - .endObject(); + entity.startObject().array("scroll_id", scroll).endObject(); request.setJsonEntity(Strings.toString(entity)); } catch (IOException e) { throw new OpenSearchException("failed to build clear scroll entity", e); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java index b899484ee9a..4c57872462f 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteResponseParsers.java @@ -72,15 +72,14 @@ final class RemoteResponseParsers { /** * Parser for an individual {@code hit} element. */ - public static final ConstructingObjectParser HIT_PARSER = - new ConstructingObjectParser<>("hit", true, a -> { - int i = 0; - String index = (String) a[i++]; - String type = (String) a[i++]; - String id = (String) a[i++]; - Long version = (Long) a[i++]; - return new BasicHit(index, type, id, version == null ? -1 : version); - }); + public static final ConstructingObjectParser HIT_PARSER = new ConstructingObjectParser<>("hit", true, a -> { + int i = 0; + String index = (String) a[i++]; + String type = (String) a[i++]; + String id = (String) a[i++]; + Long version = (Long) a[i++]; + return new BasicHit(index, type, id, version == null ? -1 : version); + }); static { HIT_PARSER.declareString(constructorArg(), new ParseField("_index")); HIT_PARSER.declareString(constructorArg(), new ParseField("_type")); @@ -110,9 +109,7 @@ final class RemoteResponseParsers { String routing; } ObjectParser fieldsParser = new ObjectParser<>("fields", Fields::new); - HIT_PARSER.declareObject((hit, fields) -> { - hit.setRouting(fields.routing); - }, fieldsParser, new ParseField("fields")); + HIT_PARSER.declareObject((hit, fields) -> { hit.setRouting(fields.routing); }, fieldsParser, new ParseField("fields")); fieldsParser.declareString((fields, routing) -> fields.routing = routing, routingField); fieldsParser.declareLong((fields, ttl) -> {}, ttlField); // ignore ttls since they have been removed fieldsParser.declareString((fields, parent) -> {}, parentField); // ignore parents since they have been removed @@ -121,8 +118,7 @@ final class RemoteResponseParsers { /** * Parser for the {@code hits} element. Parsed to an array of {@code [total (Long), hits (List)]}. */ - public static final ConstructingObjectParser HITS_PARSER = - new ConstructingObjectParser<>("hits", true, a -> a); + public static final ConstructingObjectParser HITS_PARSER = new ConstructingObjectParser<>("hits", true, a -> a); static { HITS_PARSER.declareField(constructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.START_OBJECT) { @@ -140,22 +136,25 @@ final class RemoteResponseParsers { /** * Parser for {@code failed} shards in the {@code _shards} elements. */ - public static final ConstructingObjectParser SEARCH_FAILURE_PARSER = - new ConstructingObjectParser<>("failure", true, a -> { - int i = 0; - String index = (String) a[i++]; - Integer shardId = (Integer) a[i++]; - String nodeId = (String) a[i++]; - Object reason = a[i++]; + public static final ConstructingObjectParser SEARCH_FAILURE_PARSER = new ConstructingObjectParser<>( + "failure", + true, + a -> { + int i = 0; + String index = (String) a[i++]; + Integer shardId = (Integer) a[i++]; + String nodeId = (String) a[i++]; + Object reason = a[i++]; - Throwable reasonThrowable; - if (reason instanceof String) { - reasonThrowable = new RuntimeException("Unknown remote exception with reason=[" + (String) reason + "]"); - } else { - reasonThrowable = (Throwable) reason; - } - return new SearchFailure(reasonThrowable, index, shardId, nodeId); - }); + Throwable reasonThrowable; + if (reason instanceof String) { + reasonThrowable = new RuntimeException("Unknown remote exception with reason=[" + (String) reason + "]"); + } else { + reasonThrowable = (Throwable) reason; + } + return new SearchFailure(reasonThrowable, index, shardId, nodeId); + } + ); static { SEARCH_FAILURE_PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("index")); SEARCH_FAILURE_PARSER.declareInt(optionalConstructorArg(), new ParseField("shard")); @@ -173,44 +172,50 @@ final class RemoteResponseParsers { * Parser for the {@code _shards} element. Throws everything out except the errors array if there is one. If there isn't one then it * parses to an empty list. */ - public static final ConstructingObjectParser, Void> SHARDS_PARSER = - new ConstructingObjectParser<>("_shards", true, a -> { - @SuppressWarnings("unchecked") - List failures = (List) a[0]; - failures = failures == null ? emptyList() : failures; - return failures; - }); + public static final ConstructingObjectParser, Void> SHARDS_PARSER = new ConstructingObjectParser<>( + "_shards", + true, + a -> { + @SuppressWarnings("unchecked") + List failures = (List) a[0]; + failures = failures == null ? emptyList() : failures; + return failures; + } + ); static { SHARDS_PARSER.declareObjectArray(optionalConstructorArg(), SEARCH_FAILURE_PARSER, new ParseField("failures")); } - public static final ConstructingObjectParser RESPONSE_PARSER = - new ConstructingObjectParser<>("search_response", true, a -> { - int i = 0; - Throwable catastrophicFailure = (Throwable) a[i++]; - if (catastrophicFailure != null) { - return new Response(false, singletonList(new SearchFailure(catastrophicFailure)), 0, emptyList(), null); - } - boolean timedOut = (boolean) a[i++]; - String scroll = (String) a[i++]; - Object[] hitsElement = (Object[]) a[i++]; + public static final ConstructingObjectParser RESPONSE_PARSER = new ConstructingObjectParser<>( + "search_response", + true, + a -> { + int i = 0; + Throwable catastrophicFailure = (Throwable) a[i++]; + if (catastrophicFailure != null) { + return new Response(false, singletonList(new SearchFailure(catastrophicFailure)), 0, emptyList(), null); + } + boolean timedOut = (boolean) a[i++]; + String scroll = (String) a[i++]; + Object[] hitsElement = (Object[]) a[i++]; + @SuppressWarnings("unchecked") + List failures = (List) a[i++]; + + long totalHits = 0; + List hits = emptyList(); + + // Pull apart the hits element if we got it + if (hitsElement != null) { + i = 0; + totalHits = (long) hitsElement[i++]; @SuppressWarnings("unchecked") - List failures = (List) a[i++]; + List h = (List) hitsElement[i++]; + hits = h; + } - long totalHits = 0; - List hits = emptyList(); - - // Pull apart the hits element if we got it - if (hitsElement != null) { - i = 0; - totalHits = (long) hitsElement[i++]; - @SuppressWarnings("unchecked") - List h = (List) hitsElement[i++]; - hits = h; - } - - return new Response(timedOut, failures, totalHits, hits, scroll); - }); + return new Response(timedOut, failures, totalHits, hits, scroll); + } + ); static { RESPONSE_PARSER.declareObject(optionalConstructorArg(), (p, c) -> ThrowableBuilder.PARSER.apply(p, null), new ParseField("error")); RESPONSE_PARSER.declareBoolean(optionalConstructorArg(), new ParseField("timed_out")); @@ -254,33 +259,37 @@ final class RemoteResponseParsers { requireNonNull(type, "[type] is required"); requireNonNull(reason, "[reason] is required"); switch (type) { - // Make some effort to use the right exceptions - case "rejected_execution_exception": - return new OpenSearchRejectedExecutionException(reason); - case "parsing_exception": - XContentLocation location = null; - if (line != null && column != null) { - location = new XContentLocation(line, column); - } - return new ParsingException(location, reason); - // But it isn't worth trying to get it perfect.... - default: - return new RuntimeException(type + ": " + reason); + // Make some effort to use the right exceptions + case "rejected_execution_exception": + return new OpenSearchRejectedExecutionException(reason); + case "parsing_exception": + XContentLocation location = null; + if (line != null && column != null) { + location = new XContentLocation(line, column); + } + return new ParsingException(location, reason); + // But it isn't worth trying to get it perfect.... + default: + return new RuntimeException(type + ": " + reason); } } public void setType(String type) { this.type = type; } + public void setReason(String reason) { this.reason = reason; } + public void setLine(Integer line) { this.line = line; } + public void setColumn(Integer column) { this.column = column; } + public void setCausedBy(Throwable causedBy) { this.causedBy = causedBy; } @@ -290,19 +299,18 @@ final class RemoteResponseParsers { * Parses the main action to return just the {@linkplain Version} that it returns. We throw everything else out. */ public static final ConstructingObjectParser MAIN_ACTION_PARSER = new ConstructingObjectParser<>( - "/", true, a -> (Version) a[0]); + "/", + true, + a -> (Version) a[0] + ); static { ConstructingObjectParser versionParser = new ConstructingObjectParser<>( - "version", true, a -> a[0] == null ? - LegacyESVersion.fromString( - ((String) a[1]) - .replace("-SNAPSHOT", "") - .replaceFirst("-(alpha\\d+|beta\\d+|rc\\d+)", "")) : - Version.fromString( - ((String) a[1]) - .replace("-SNAPSHOT", "") - .replaceFirst("-(alpha\\d+|beta\\d+|rc\\d+)", "")) - ); + "version", + true, + a -> a[0] == null + ? LegacyESVersion.fromString(((String) a[1]).replace("-SNAPSHOT", "").replaceFirst("-(alpha\\d+|beta\\d+|rc\\d+)", "")) + : Version.fromString(((String) a[1]).replace("-SNAPSHOT", "").replaceFirst("-(alpha\\d+|beta\\d+|rc\\d+)", "")) + ); versionParser.declareStringOrNull(optionalConstructorArg(), new ParseField("distribution")); versionParser.declareString(constructorArg(), new ParseField("number")); MAIN_ACTION_PARSER.declareObject(constructorArg(), versionParser, new ParseField("version")); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java index 369b5fe2ef5..be691243ecf 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -79,9 +79,17 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { private final SearchRequest searchRequest; Version remoteVersion; - public RemoteScrollableHitSource(Logger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, - Consumer onResponse, Consumer fail, - RestClient client, BytesReference query, SearchRequest searchRequest) { + public RemoteScrollableHitSource( + Logger logger, + BackoffPolicy backoffPolicy, + ThreadPool threadPool, + Runnable countSearchRetry, + Consumer onResponse, + Consumer fail, + RestClient client, + BytesReference query, + SearchRequest searchRequest + ) { super(logger, backoffPolicy, threadPool, countSearchRetry, onResponse, fail); this.query = query; this.searchRequest = searchRequest; @@ -92,8 +100,11 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { protected void doStart(RejectAwareActionListener searchListener) { lookupRemoteVersion(RejectAwareActionListener.withResponseHandler(searchListener, version -> { remoteVersion = version; - execute(RemoteRequestBuilders.initialSearch(searchRequest, query, remoteVersion), - RESPONSE_PARSER, RejectAwareActionListener.withResponseHandler(searchListener, r -> onStartResponse(searchListener, r))); + execute( + RemoteRequestBuilders.initialSearch(searchRequest, query, remoteVersion), + RESPONSE_PARSER, + RejectAwareActionListener.withResponseHandler(searchListener, r -> onStartResponse(searchListener, r)) + ); })); } @@ -134,11 +145,15 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { private void logFailure(Exception e) { if (e instanceof ResponseException) { ResponseException re = (ResponseException) e; - if (remoteVersion.before(Version.fromId(2000099)) - && re.getResponse().getStatusLine().getStatusCode() == 404) { - logger.debug((Supplier) () -> new ParameterizedMessage( + if (remoteVersion.before(Version.fromId(2000099)) && re.getResponse().getStatusLine().getStatusCode() == 404) { + logger.debug( + (Supplier) () -> new ParameterizedMessage( "Failed to clear scroll [{}] from pre-2.0 OpenSearch. This is normal if the request terminated " - + "normally as the scroll has already been cleared automatically.", scrollId), e); + + "normally as the scroll has already been cleared automatically.", + scrollId + ), + e + ); return; } } @@ -164,8 +179,11 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { }); } - private void execute(Request request, - BiFunction parser, RejectAwareActionListener listener) { + private void execute( + Request request, + BiFunction parser, + RejectAwareActionListener listener + ) { // Preserve the thread context so headers survive after the call java.util.function.Supplier contextSupplier = threadPool.getThreadContext().newRestorableContext(true); try { @@ -188,7 +206,8 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { try { logger.debug("Response didn't include Content-Type: " + bodyMessage(response.getEntity())); throw new OpenSearchException( - "Response didn't include supported Content-Type, remote is likely not an OpenSearch instance"); + "Response didn't include supported Content-Type, remote is likely not an OpenSearch instance" + ); } catch (IOException e) { OpenSearchException ee = new OpenSearchException("Error extracting body from response"); ee.addSuppressed(e); @@ -196,18 +215,18 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { } } // EMPTY is safe here because we don't call namedObject - try (XContentParser xContentParser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, content)) { + try ( + XContentParser xContentParser = xContentType.xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, content) + ) { parsedResponse = parser.apply(xContentParser, xContentType); } catch (XContentParseException e) { /* Because we're streaming the response we can't get a copy of it here. The best we can do is hint that it * is totally wrong and we're probably not talking to Elasticsearch. */ - throw new OpenSearchException( - "Error parsing the response, remote is likely not an OpenSearch instance", e); + throw new OpenSearchException("Error parsing the response, remote is likely not an OpenSearch instance", e); } } catch (IOException e) { - throw new OpenSearchException( - "Error deserializing response, remote is likely not an OpenSearch instance", e); + throw new OpenSearchException("Error deserializing response, remote is likely not an OpenSearch instance", e); } listener.onResponse(parsedResponse); } @@ -220,15 +239,16 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { if (e instanceof ResponseException) { ResponseException re = (ResponseException) e; int statusCode = re.getResponse().getStatusLine().getStatusCode(); - e = wrapExceptionToPreserveStatus(statusCode, - re.getResponse().getEntity(), re); + e = wrapExceptionToPreserveStatus(statusCode, re.getResponse().getEntity(), re); if (RestStatus.TOO_MANY_REQUESTS.getStatus() == statusCode) { listener.onRejection(e); return; } } else if (e instanceof ContentTooLongException) { e = new IllegalArgumentException( - "Remote responded with a chunk that was too large. Use a smaller batch size.", e); + "Remote responded with a chunk that was too large. Use a smaller batch size.", + e + ); } listener.onFailure(e); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/RemoteReindexExtension.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/RemoteReindexExtension.java index 81543b04ea0..ce252de292d 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/RemoteReindexExtension.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/RemoteReindexExtension.java @@ -28,7 +28,8 @@ public interface RemoteReindexExtension { * the success/failure of the remote reindex call. * @return ActionListener wrapper implementation. */ - ActionListener getRemoteReindexActionListener(ActionListener listener, - ReindexRequest reindexRequest); + ActionListener getRemoteReindexActionListener( + ActionListener listener, + ReindexRequest reindexRequest + ); } - diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java index e4d30bfd033..a71381d968c 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java @@ -33,9 +33,8 @@ package org.opensearch.index.reindex; public abstract class AbstractAsyncBulkByScrollActionMetadataTestCase< - Request extends AbstractBulkByScrollRequest, - Response extends BulkByScrollResponse> - extends AbstractAsyncBulkByScrollActionTestCase { + Request extends AbstractBulkByScrollRequest, + Response extends BulkByScrollResponse> extends AbstractAsyncBulkByScrollActionTestCase { protected ScrollableHitSource.BasicHit doc() { return new ScrollableHitSource.BasicHit("index", "type", "id", 0); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index b5bb0c5891b..6b691a76ffe 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -53,9 +53,8 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public abstract class AbstractAsyncBulkByScrollActionScriptTestCase< - Request extends AbstractBulkIndexByScrollRequest, - Response extends BulkByScrollResponse> - extends AbstractAsyncBulkByScrollActionTestCase { + Request extends AbstractBulkIndexByScrollRequest, + Response extends BulkByScrollResponse> extends AbstractAsyncBulkByScrollActionTestCase { protected ScriptService scriptService; @@ -117,8 +116,10 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase< } public void testSetOpTypeUnknown() throws Exception { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> applyScript((Map ctx) -> ctx.put("op", "unknown"))); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> applyScript((Map ctx) -> ctx.put("op", "unknown")) + ); assertThat(e.getMessage(), equalTo("Operation type [unknown] not allowed, only [noop, index, delete] are allowed")); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java index be17fb877c0..1dd758150c3 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -225,10 +225,16 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { // this test primarily tests ClientScrollableHitSource but left it to test integration to status client.scrollsToReject = randomIntBetween(0, testRequest.getMaxRetries() - 1); // use fail() onResponse handler because mocked search never fires on listener. - ClientScrollableHitSource hitSource = new ClientScrollableHitSource(logger, buildTestBackoffPolicy(), + ClientScrollableHitSource hitSource = new ClientScrollableHitSource( + logger, + buildTestBackoffPolicy(), threadPool, - testTask.getWorkerState()::countSearchRetry, r -> fail(), ExceptionsHelper::reThrowIfNotNull, - new ParentTaskAssigningClient(client, localNode, testTask), testRequest.getSearchRequest()); + testTask.getWorkerState()::countSearchRetry, + r -> fail(), + ExceptionsHelper::reThrowIfNotNull, + new ParentTaskAssigningClient(client, localNode, testTask), + testRequest.getSearchRequest() + ); hitSource.setScroll(scrollId()); hitSource.startNextScroll(TimeValue.timeValueSeconds(0)); assertBusy(() -> assertEquals(client.scrollsToReject + 1, client.scrollAttempts.get())); @@ -243,21 +249,25 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { public void testStartNextScrollRetriesOnRejectionButFailsOnTooManyRejections() throws Exception { // this test primarily tests ClientScrollableHitSource but left it to test integration to status client.scrollsToReject = testRequest.getMaxRetries() + randomIntBetween(1, 100); - assertExactlyOnce( - onFail -> { - Consumer validingOnFail = e -> { - assertNotNull(ExceptionsHelper.unwrap(e, OpenSearchRejectedExecutionException.class)); - onFail.run(); - }; - ClientScrollableHitSource hitSource = new ClientScrollableHitSource(logger, buildTestBackoffPolicy(), - threadPool, - testTask.getWorkerState()::countSearchRetry, r -> fail(), validingOnFail, - new ParentTaskAssigningClient(client, localNode, testTask), testRequest.getSearchRequest()); - hitSource.setScroll(scrollId()); - hitSource.startNextScroll(TimeValue.timeValueSeconds(0)); - assertBusy(() -> assertEquals(testRequest.getMaxRetries() + 1, client.scrollAttempts.get())); - } - ); + assertExactlyOnce(onFail -> { + Consumer validingOnFail = e -> { + assertNotNull(ExceptionsHelper.unwrap(e, OpenSearchRejectedExecutionException.class)); + onFail.run(); + }; + ClientScrollableHitSource hitSource = new ClientScrollableHitSource( + logger, + buildTestBackoffPolicy(), + threadPool, + testTask.getWorkerState()::countSearchRetry, + r -> fail(), + validingOnFail, + new ParentTaskAssigningClient(client, localNode, testTask), + testRequest.getSearchRequest() + ); + hitSource.setScroll(scrollId()); + hitSource.startNextScroll(TimeValue.timeValueSeconds(0)); + assertBusy(() -> assertEquals(testRequest.getMaxRetries() + 1, client.scrollAttempts.get())); + }); assertNull("There shouldn't be a scroll attempt pending that we didn't reject", client.lastScroll.get()); assertEquals(testRequest.getMaxRetries(), testTask.getStatus().getSearchRetries()); } @@ -302,42 +312,48 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { ShardId shardId = new ShardId(new Index("name", "uid"), 0); if (rarely()) { versionConflicts++; - responses[i] = new BulkItemResponse(i, randomFrom(DocWriteRequest.OpType.values()), - new Failure(shardId.getIndexName(), "type", "id" + i, - new VersionConflictEngineException(shardId, "id", "test"))); + responses[i] = new BulkItemResponse( + i, + randomFrom(DocWriteRequest.OpType.values()), + new Failure(shardId.getIndexName(), "type", "id" + i, new VersionConflictEngineException(shardId, "id", "test")) + ); continue; } boolean createdResponse; DocWriteRequest.OpType opType; switch (randomIntBetween(0, 2)) { - case 0: - createdResponse = true; - opType = DocWriteRequest.OpType.CREATE; - created++; - break; - case 1: - createdResponse = false; - opType = randomFrom(DocWriteRequest.OpType.INDEX, DocWriteRequest.OpType.UPDATE); - updated++; - break; - case 2: - createdResponse = false; - opType = DocWriteRequest.OpType.DELETE; - deleted++; - break; - default: - throw new RuntimeException("Bad scenario"); + case 0: + createdResponse = true; + opType = DocWriteRequest.OpType.CREATE; + created++; + break; + case 1: + createdResponse = false; + opType = randomFrom(DocWriteRequest.OpType.INDEX, DocWriteRequest.OpType.UPDATE); + updated++; + break; + case 2: + createdResponse = false; + opType = DocWriteRequest.OpType.DELETE; + deleted++; + break; + default: + throw new RuntimeException("Bad scenario"); } final int seqNo = randomInt(20); final int primaryTerm = randomIntBetween(1, 16); - final IndexResponse response = - new IndexResponse(shardId, "type", "id" + i, seqNo, primaryTerm, randomInt(), createdResponse); + final IndexResponse response = new IndexResponse( + shardId, + "type", + "id" + i, + seqNo, + primaryTerm, + randomInt(), + createdResponse + ); responses[i] = new BulkItemResponse(i, opType, response); } - assertExactlyOnce(onSuccess -> - new DummyAsyncBulkByScrollAction().onBulkResponse(new BulkResponse(responses, 0), - onSuccess) - ); + assertExactlyOnce(onSuccess -> new DummyAsyncBulkByScrollAction().onBulkResponse(new BulkResponse(responses, 0), onSuccess)); assertEquals(versionConflicts, testTask.getStatus().getVersionConflicts()); assertEquals(updated, testTask.getStatus().getUpdated()); assertEquals(created, testTask.getStatus().getCreated()); @@ -383,8 +399,13 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { */ public void testShardFailuresAbortRequest() throws Exception { SearchFailure shardFailure = new SearchFailure(new RuntimeException("test")); - ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(false, singletonList(shardFailure), 0, - emptyList(), null); + ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response( + false, + singletonList(shardFailure), + 0, + emptyList(), + null + ); simulateScrollResponse(new DummyAsyncBulkByScrollAction(), System.nanoTime(), 0, scrollResponse); BulkByScrollResponse response = listener.get(); assertThat(response.getBulkFailures(), empty()); @@ -414,8 +435,10 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { public void testBulkFailuresAbortRequest() throws Exception { Failure failure = new Failure("index", "type", "id", new RuntimeException("test")); DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction(); - BulkResponse bulkResponse = new BulkResponse(new BulkItemResponse[] - {new BulkItemResponse(0, DocWriteRequest.OpType.CREATE, failure)}, randomLong()); + BulkResponse bulkResponse = new BulkResponse( + new BulkItemResponse[] { new BulkItemResponse(0, DocWriteRequest.OpType.CREATE, failure) }, + randomLong() + ); action.onBulkResponse(bulkResponse, Assert::fail); BulkByScrollResponse response = listener.get(); assertThat(response.getBulkFailures(), contains(failure)); @@ -477,6 +500,7 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { capturedCommand.set(command); return new ScheduledCancellable() { private boolean cancelled = false; + @Override public long getDelay(TimeUnit unit) { return unit.convert(delay.millis(), TimeUnit.MILLISECONDS); @@ -518,11 +542,22 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { // create a simulated response. SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); - SearchHits hits = new SearchHits(IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), - new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); + SearchHits hits = new SearchHits( + IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), + new TotalHits(0, TotalHits.Relation.EQUAL_TO), + 0 + ); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); - SearchResponse searchResponse = new SearchResponse(internalResponse, scrollId(), 5, 4, 0, randomLong(), null, - SearchResponse.Clusters.EMPTY); + SearchResponse searchResponse = new SearchResponse( + internalResponse, + scrollId(), + 5, + 4, + 0, + randomLong(), + null, + SearchResponse.Clusters.EMPTY + ); client.lastSearch.get().listener.onResponse(searchResponse); @@ -572,9 +607,7 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { assertThat(response.getSearchFailures(), empty()); assertNull(response.getReasonCancelled()); } else { - assertExactlyOnce(onSuccess -> - action.sendBulkRequest(request, onSuccess) - ); + assertExactlyOnce(onSuccess -> action.sendBulkRequest(request, onSuccess)); } } @@ -621,7 +654,7 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { } action.refreshAndFinish(emptyList(), emptyList(), false); if (shouldRefresh) { - assertArrayEquals(new String[] {"foo"}, client.lastRefreshRequest.get().indices()); + assertArrayEquals(new String[] { "foo" }, client.lastRefreshRequest.get().indices()); } else { assertNull("No refresh was attempted", client.lastRefreshRequest.get()); } @@ -632,18 +665,24 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { } public void testCancelBeforeScrollResponse() throws Exception { - cancelTaskCase((DummyAsyncBulkByScrollAction action) -> simulateScrollResponse(action, System.nanoTime(), 1, - new ScrollableHitSource.Response(false, emptyList(), between(1, 100000), emptyList(), null))); + cancelTaskCase( + (DummyAsyncBulkByScrollAction action) -> simulateScrollResponse( + action, + System.nanoTime(), + 1, + new ScrollableHitSource.Response(false, emptyList(), between(1, 100000), emptyList(), null) + ) + ); } public void testCancelBeforeSendBulkRequest() throws Exception { - cancelTaskCase((DummyAsyncBulkByScrollAction action) -> - action.sendBulkRequest(new BulkRequest(), Assert::fail)); + cancelTaskCase((DummyAsyncBulkByScrollAction action) -> action.sendBulkRequest(new BulkRequest(), Assert::fail)); } public void testCancelBeforeOnBulkResponse() throws Exception { - cancelTaskCase((DummyAsyncBulkByScrollAction action) -> - action.onBulkResponse(new BulkResponse(new BulkItemResponse[0], 0), Assert::fail)); + cancelTaskCase( + (DummyAsyncBulkByScrollAction action) -> action.onBulkResponse(new BulkResponse(new BulkItemResponse[0], 0), Assert::fail) + ); } public void testCancelBeforeStartNextScroll() throws Exception { @@ -725,8 +764,12 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { /** * Simulate a scroll response by setting the scroll id and firing the onScrollResponse method. */ - private void simulateScrollResponse(DummyAsyncBulkByScrollAction action, long lastBatchTime, int lastBatchSize, - ScrollableHitSource.Response response) { + private void simulateScrollResponse( + DummyAsyncBulkByScrollAction action, + long lastBatchTime, + int lastBatchSize, + ScrollableHitSource.Response response + ) { action.setScroll(scrollId()); action.onScrollResponse(lastBatchTime, lastBatchSize, new ScrollableHitSource.AsyncResponse() { @Override @@ -741,11 +784,22 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { }); } - private class DummyAsyncBulkByScrollAction - extends AbstractAsyncBulkByScrollAction { + private class DummyAsyncBulkByScrollAction extends AbstractAsyncBulkByScrollAction< + DummyAbstractBulkByScrollRequest, + DummyTransportAsyncBulkByScrollAction> { DummyAsyncBulkByScrollAction() { - super(testTask, randomBoolean(), randomBoolean(), AsyncBulkByScrollActionTests.this.logger, - new ParentTaskAssigningClient(client, localNode, testTask), client.threadPool(), testRequest, listener, null, null); + super( + testTask, + randomBoolean(), + randomBoolean(), + AsyncBulkByScrollActionTests.this.logger, + new ParentTaskAssigningClient(client, localNode, testTask), + client.threadPool(), + testRequest, + listener, + null, + null + ); } @Override @@ -769,9 +823,9 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { return constantBackoff(timeValueMillis(0), testRequest.getMaxRetries()); } - private static class DummyTransportAsyncBulkByScrollAction - extends TransportAction { - + private static class DummyTransportAsyncBulkByScrollAction extends TransportAction< + DummyAbstractBulkByScrollRequest, + BulkByScrollResponse> { protected DummyTransportAsyncBulkByScrollAction(String actionName, ActionFilters actionFilters, TaskManager taskManager) { super(actionName, actionFilters, taskManager); @@ -828,11 +882,15 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { @Override @SuppressWarnings("unchecked") - protected - void doExecute(ActionType action, Request request, ActionListener listener) { + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) { listener.onFailure( - new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders())); + new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders()) + ); return; } @@ -886,37 +944,51 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { ShardId shardId = new ShardId(new Index(item.index(), "uuid"), 0); if (item instanceof IndexRequest) { IndexRequest index = (IndexRequest) item; - response = - new IndexResponse( - shardId, - index.type(), - index.id() == null ? "dummy_id" : index.id(), - randomInt(20), - randomIntBetween(1, 16), - randomIntBetween(0, Integer.MAX_VALUE), - true); + response = new IndexResponse( + shardId, + index.type(), + index.id() == null ? "dummy_id" : index.id(), + randomInt(20), + randomIntBetween(1, 16), + randomIntBetween(0, Integer.MAX_VALUE), + true + ); } else if (item instanceof UpdateRequest) { UpdateRequest update = (UpdateRequest) item; - response = new UpdateResponse(shardId, update.type(), update.id(), randomNonNegativeLong(), - randomIntBetween(1, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE), Result.CREATED); + response = new UpdateResponse( + shardId, + update.type(), + update.id(), + randomNonNegativeLong(), + randomIntBetween(1, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + Result.CREATED + ); } else if (item instanceof DeleteRequest) { DeleteRequest delete = (DeleteRequest) item; - response = - new DeleteResponse( - shardId, - delete.type(), - delete.id(), - randomInt(20), - randomIntBetween(1, 16), - randomIntBetween(0, Integer.MAX_VALUE), - true); + response = new DeleteResponse( + shardId, + delete.type(), + delete.id(), + randomInt(20), + randomIntBetween(1, 16), + randomIntBetween(0, Integer.MAX_VALUE), + true + ); } else { throw new RuntimeException("Unknown request: " + item); } if (i == toReject) { - responses[i] = new BulkItemResponse(i, item.opType(), - new Failure(response.getIndex(), response.getType(), response.getId(), - new OpenSearchRejectedExecutionException())); + responses[i] = new BulkItemResponse( + i, + item.opType(), + new Failure( + response.getIndex(), + response.getType(), + response.getId(), + new OpenSearchRejectedExecutionException() + ) + ); } else { responses[i] = new BulkItemResponse(i, item.opType(), response); } @@ -932,15 +1004,15 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase { int wraps = randomIntBetween(0, 4); for (int i = 0; i < wraps; i++) { switch (randomIntBetween(0, 2)) { - case 0: - e = new SearchPhaseExecutionException("test", "test failure", e, new ShardSearchFailure[0]); - continue; - case 1: - e = new ReduceSearchPhaseException("test", "test failure", e, new ShardSearchFailure[0]); - continue; - case 2: - e = new OpenSearchException(e); - continue; + case 0: + e = new SearchPhaseExecutionException("test", "test failure", e, new ShardSearchFailure[0]); + continue; + case 1: + e = new ReduceSearchPhaseException("test", "test failure", e, new ShardSearchFailure[0]); + continue; + case 2: + e = new OpenSearchException(e); + continue; } } return e; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelperTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelperTests.java index bae1c2040b0..073eb3550d2 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelperTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkByScrollParallelizationHelperTests.java @@ -46,12 +46,9 @@ import static org.opensearch.search.RandomSearchRequestGenerator.randomSearchSou public class BulkByScrollParallelizationHelperTests extends OpenSearchTestCase { public void testSliceIntoSubRequests() throws IOException { - SearchRequest searchRequest = randomSearchRequest(() -> randomSearchSourceBuilder( - () -> null, - () -> null, - () -> null, - () -> emptyList(), - () -> null)); + SearchRequest searchRequest = randomSearchRequest( + () -> randomSearchSourceBuilder(() -> null, () -> null, () -> null, () -> emptyList(), () -> null) + ); if (searchRequest.source() != null) { // Clear the slice builder if there is one set. We can't call sliceIntoSubRequests if it is. searchRequest.source().slice(null); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseMatcher.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseMatcher.java index 1e7bb467d6a..2cf583ea066 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseMatcher.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/BulkIndexByScrollResponseMatcher.java @@ -144,14 +144,14 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher bulkFailures = frequently() ? emptyList() - : IntStream.range(0, between(1, 3)).mapToObj(j -> new BulkItemResponse.Failure("idx", "type", "id", new Exception())) - .collect(Collectors.toList()); + BulkByScrollTask.Status status = new BulkByScrollTask.Status( + i, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + timeValueMillis(0), + 0f, + thisReasonCancelled, + timeValueMillis(0) + ); + List bulkFailures = frequently() + ? emptyList() + : IntStream.range(0, between(1, 3)) + .mapToObj(j -> new BulkItemResponse.Failure("idx", "type", "id", new Exception())) + .collect(Collectors.toList()); allBulkFailures.addAll(bulkFailures); - List searchFailures = frequently() ? emptyList() - : IntStream.range(0, between(1, 3)).mapToObj(j -> new SearchFailure(new Exception())).collect(Collectors.toList()); + List searchFailures = frequently() + ? emptyList() + : IntStream.range(0, between(1, 3)).mapToObj(j -> new SearchFailure(new Exception())).collect(Collectors.toList()); allSearchFailures.addAll(searchFailures); boolean thisTimedOut = rarely(); timedOut |= thisTimedOut; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java index d9178986f98..1bab1db908c 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/CancelTests.java @@ -98,8 +98,12 @@ public class CancelTests extends ReindexTestCase { /** * Executes the cancellation test */ - private void testCancel(String action, AbstractBulkByScrollRequestBuilder builder, CancelAssertion assertion, - Matcher taskDescriptionMatcher) throws Exception { + private void testCancel( + String action, + AbstractBulkByScrollRequestBuilder builder, + CancelAssertion assertion, + Matcher taskDescriptionMatcher + ) throws Exception { createIndex(INDEX); // Total number of documents created for this test (~10 per primary shard per slice) @@ -107,9 +111,14 @@ public class CancelTests extends ReindexTestCase { ALLOWED_OPERATIONS.release(numDocs); logger.debug("setting up [{}] docs", numDocs); - indexRandom(true, false, true, IntStream.range(0, numDocs) + indexRandom( + true, + false, + true, + IntStream.range(0, numDocs) .mapToObj(i -> client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i)) - .collect(Collectors.toList())); + .collect(Collectors.toList()) + ); // Checks that the all documents have been indexed and correctly counted assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), numDocs); @@ -135,7 +144,9 @@ public class CancelTests extends ReindexTestCase { logger.debug("waiting for updates to be blocked"); assertBusy( () -> assertTrue("updates blocked", ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0), - 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavily loaded machines this can take a while + 1, + TimeUnit.MINUTES + ); // 10 seconds is usually fine but on heavily loaded machines this can take a while // Status should show the task running TaskInfo mainTask = findTaskToCancel(action, builder.request().getSlices()); @@ -160,11 +171,15 @@ public class CancelTests extends ReindexTestCase { if (builder.request().getSlices() > 1) { boolean foundCancelled = false; - ListTasksResponse sliceList = client().admin().cluster().prepareListTasks().setParentTaskId(mainTask.getTaskId()) - .setDetailed(true).get(); + ListTasksResponse sliceList = client().admin() + .cluster() + .prepareListTasks() + .setParentTaskId(mainTask.getTaskId()) + .setDetailed(true) + .get(); sliceList.rethrowFailures("Fetch slice tasks"); logger.debug("finding at least one canceled child among {}", sliceList.getTasks()); - for (TaskInfo slice: sliceList.getTasks()) { + for (TaskInfo slice : sliceList.getTasks()) { BulkByScrollTask.Status sliceStatus = (BulkByScrollTask.Status) slice.getStatus(); if (sliceStatus.getReasonCancelled() == null) continue; assertEquals(CancelTasksRequest.DEFAULT_REASON, sliceStatus.getReasonCancelled()); @@ -194,8 +209,13 @@ public class CancelTests extends ReindexTestCase { if (ExceptionsHelper.unwrapCausesAndSuppressed(e, t -> t instanceof TaskCancelledException).isPresent()) { return; // the scroll request was cancelled } - String tasks = client().admin().cluster().prepareListTasks().setParentTaskId(mainTask.getTaskId()) - .setDetailed(true).get().toString(); + String tasks = client().admin() + .cluster() + .prepareListTasks() + .setParentTaskId(mainTask.getTaskId()) + .setDetailed(true) + .get() + .toString(); throw new RuntimeException("Exception while waiting for the response. Running tasks: " + tasks, e); } assertThat(response.getReasonCancelled(), equalTo("by user request")); @@ -236,12 +256,14 @@ public class CancelTests extends ReindexTestCase { } public void testUpdateByQueryCancel() throws Exception { - BytesReference pipeline = new BytesArray("{\n" + - " \"description\" : \"sets processed to true\",\n" + - " \"processors\" : [ {\n" + - " \"test\" : {}\n" + - " } ]\n" + - "}"); + BytesReference pipeline = new BytesArray( + "{\n" + + " \"description\" : \"sets processed to true\",\n" + + " \"processors\" : [ {\n" + + " \"test\" : {}\n" + + " } ]\n" + + "}" + ); assertAcked(client().admin().cluster().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); testCancel(UpdateByQueryAction.NAME, updateByQuery().setPipeline("set-processed").source(INDEX), (response, total, modified) -> { @@ -253,48 +275,64 @@ public class CancelTests extends ReindexTestCase { } public void testDeleteByQueryCancel() throws Exception { - testCancel(DeleteByQueryAction.NAME, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()), + testCancel( + DeleteByQueryAction.NAME, + deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request"))); assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), total - modified); - }, equalTo("delete-by-query [" + INDEX + "]")); + }, + equalTo("delete-by-query [" + INDEX + "]") + ); } public void testReindexCancelWithWorkers() throws Exception { - testCancel(ReindexAction.NAME, - reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest", TYPE).setSlices(5), - (response, total, modified) -> { - assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); - refresh("dest"); - assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified); - }, - equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")); + testCancel( + ReindexAction.NAME, + reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest", TYPE).setSlices(5), + (response, total, modified) -> { + assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); + refresh("dest"); + assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified); + }, + equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]") + ); } public void testUpdateByQueryCancelWithWorkers() throws Exception { - BytesReference pipeline = new BytesArray("{\n" + - " \"description\" : \"sets processed to true\",\n" + - " \"processors\" : [ {\n" + - " \"test\" : {}\n" + - " } ]\n" + - "}"); + BytesReference pipeline = new BytesArray( + "{\n" + + " \"description\" : \"sets processed to true\",\n" + + " \"processors\" : [ {\n" + + " \"test\" : {}\n" + + " } ]\n" + + "}" + ); assertAcked(client().admin().cluster().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); - testCancel(UpdateByQueryAction.NAME, updateByQuery().setPipeline("set-processed").source(INDEX).setSlices(5), - (response, total, modified) -> { - assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); - assertHitCount(client().prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)).get(), modified); - }, equalTo("update-by-query [" + INDEX + "]")); + testCancel( + UpdateByQueryAction.NAME, + updateByQuery().setPipeline("set-processed").source(INDEX).setSlices(5), + (response, total, modified) -> { + assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); + assertHitCount(client().prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)).get(), modified); + }, + equalTo("update-by-query [" + INDEX + "]") + ); assertAcked(client().admin().cluster().deletePipeline(new DeletePipelineRequest("set-processed")).get()); } public void testDeleteByQueryCancelWithWorkers() throws Exception { - testCancel(DeleteByQueryAction.NAME, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()).setSlices(5), + testCancel( + DeleteByQueryAction.NAME, + deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()).setSlices(5), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), total - modified); - }, equalTo("delete-by-query [" + INDEX + "]")); + }, + equalTo("delete-by-query [" + INDEX + "]") + ); } /** diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java index 68d0b258d3c..e0c8bf604ed 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java @@ -106,23 +106,30 @@ public class ClientScrollableHitSourceTests extends OpenSearchTestCase { public void testRetryFail() { int retries = randomInt(10); - ExpectedException ex = expectThrows(ExpectedException.class, () -> { - dotestBasicsWithRetry(retries, retries+1, retries+1, e -> { throw new ExpectedException(e); }); - }); + ExpectedException ex = expectThrows( + ExpectedException.class, + () -> { dotestBasicsWithRetry(retries, retries + 1, retries + 1, e -> { throw new ExpectedException(e); }); } + ); assertThat(ex.getCause(), instanceOf(OpenSearchRejectedExecutionException.class)); } - private void dotestBasicsWithRetry(int retries, int minFailures, int maxFailures, - Consumer failureHandler) throws InterruptedException { + private void dotestBasicsWithRetry(int retries, int minFailures, int maxFailures, Consumer failureHandler) + throws InterruptedException { BlockingQueue responses = new ArrayBlockingQueue<>(100); MockClient client = new MockClient(threadPool); TaskId parentTask = new TaskId("thenode", randomInt()); AtomicInteger actualSearchRetries = new AtomicInteger(); int expectedSearchRetries = 0; - ClientScrollableHitSource hitSource = new ClientScrollableHitSource(logger, BackoffPolicy.constantBackoff(TimeValue.ZERO, retries), - threadPool, actualSearchRetries::incrementAndGet, responses::add, failureHandler, + ClientScrollableHitSource hitSource = new ClientScrollableHitSource( + logger, + BackoffPolicy.constantBackoff(TimeValue.ZERO, retries), + threadPool, + actualSearchRetries::incrementAndGet, + responses::add, + failureHandler, new ParentTaskAssigningClient(client, parentTask), - new SearchRequest().scroll("1m")); + new SearchRequest().scroll("1m") + ); hitSource.start(); for (int retry = 0; retry < randomIntBetween(minFailures, maxFailures); ++retry) { @@ -158,27 +165,41 @@ public class ClientScrollableHitSourceTests extends OpenSearchTestCase { MockClient client = new MockClient(threadPool); TaskId parentTask = new TaskId("thenode", randomInt()); - ClientScrollableHitSource hitSource = new ClientScrollableHitSource(logger, BackoffPolicy.constantBackoff(TimeValue.ZERO, 0), - threadPool, () -> fail(), r -> fail(), e -> fail(), new ParentTaskAssigningClient(client, - parentTask), + ClientScrollableHitSource hitSource = new ClientScrollableHitSource( + logger, + BackoffPolicy.constantBackoff(TimeValue.ZERO, 0), + threadPool, + () -> fail(), + r -> fail(), + e -> fail(), + new ParentTaskAssigningClient(client, parentTask), // Set the base for the scroll to wait - this is added to the figure we calculate below - new SearchRequest().scroll(timeValueSeconds(10))); + new SearchRequest().scroll(timeValueSeconds(10)) + ); hitSource.startNextScroll(timeValueSeconds(100)); - client.validateRequest(SearchScrollAction.INSTANCE, - (SearchScrollRequest r) -> assertEquals(r.scroll().keepAlive().seconds(), 110)); + client.validateRequest(SearchScrollAction.INSTANCE, (SearchScrollRequest r) -> assertEquals(r.scroll().keepAlive().seconds(), 110)); } - - private SearchResponse createSearchResponse() { // create a simulated response. SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); - SearchHits hits = new SearchHits(IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), - new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); + SearchHits hits = new SearchHits( + IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), + new TotalHits(0, TotalHits.Relation.EQUAL_TO), + 0 + ); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); - return new SearchResponse(internalResponse, randomSimpleString(random(), 1, 10), 5, 4, 0, randomLong(), null, - SearchResponse.Clusters.EMPTY); + return new SearchResponse( + internalResponse, + randomSimpleString(random(), 1, 10), + 5, + 4, + 0, + randomLong(), + null, + SearchResponse.Clusters.EMPTY + ); } private void assertSameHits(List actual, SearchHit[] expected) { @@ -222,24 +243,28 @@ public class ClientScrollableHitSourceTests extends OpenSearchTestCase { } private static class MockClient extends AbstractClient { - private ExecuteRequest executeRequest; + private ExecuteRequest executeRequest; MockClient(ThreadPool threadPool) { super(Settings.EMPTY, threadPool); } @Override - protected synchronized - void doExecute(ActionType action, - Request request, ActionListener listener) { + protected synchronized void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { this.executeRequest = new ExecuteRequest<>(action, request, listener); this.notifyAll(); } @SuppressWarnings("unchecked") - public void respondx(ActionType action, - Function response) { + public void respondx( + ActionType action, + Function response + ) { ExecuteRequest executeRequest; synchronized (this) { executeRequest = this.executeRequest; @@ -248,8 +273,7 @@ public class ClientScrollableHitSourceTests extends OpenSearchTestCase { ((ExecuteRequest) executeRequest).respond(action, response); } - public void respond(ActionType action, - Response response) { + public void respond(ActionType action, Response response) { respondx(action, req -> response); } @@ -264,14 +288,15 @@ public class ClientScrollableHitSourceTests extends OpenSearchTestCase { } @SuppressWarnings("unchecked") - public void validateRequest(ActionType action, - Consumer validator) { + public void validateRequest( + ActionType action, + Consumer validator + ) { ((ExecuteRequest) executeRequest).validateRequest(action, validator); } @Override - public void close() { - } + public void close() {} public synchronized void awaitOperation() throws InterruptedException { if (executeRequest == null) { diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java index f5156bb4fde..13ca95c01b7 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java @@ -72,14 +72,15 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { } public void testBasics() throws Exception { - indexRandom(true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c"), - client().prepareIndex("test", "test", "5").setSource("foo", "d"), - client().prepareIndex("test", "test", "6").setSource("foo", "e"), - client().prepareIndex("test", "test", "7").setSource("foo", "f") + indexRandom( + true, + client().prepareIndex("test", "test", "1").setSource("foo", "a"), + client().prepareIndex("test", "test", "2").setSource("foo", "a"), + client().prepareIndex("test", "test", "3").setSource("foo", "b"), + client().prepareIndex("test", "test", "4").setSource("foo", "c"), + client().prepareIndex("test", "test", "5").setSource("foo", "d"), + client().prepareIndex("test", "test", "6").setSource("foo", "e"), + client().prepareIndex("test", "test", "7").setSource("foo", "f") ); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7); @@ -139,8 +140,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { indexRandom(true, true, true, builders); // Deletes all the documents with candidate=true - assertThat(deleteByQuery().source("test-*").filter(termQuery("candidate", true)).refresh(true).get(), - matcher().deleted(deletions)); + assertThat(deleteByQuery().source("test-*").filter(termQuery("candidate", true)).refresh(true).get(), matcher().deleted(deletions)); for (int i = 0; i < indices; i++) { long remaining = docs - candidates[i]; @@ -198,9 +198,11 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { - builders.add(client().prepareIndex("test", "test", Integer.toString(i)) + builders.add( + client().prepareIndex("test", "test", Integer.toString(i)) .setRouting(randomAlphaOfLengthBetween(1, 5)) - .setSource("foo", "bar")); + .setSource("foo", "bar") + ); } indexRandom(true, true, true, builders); @@ -235,8 +237,10 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { try { enableIndexBlock("test", SETTING_READ_ONLY); - assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), - matcher().deleted(0).failures(docs)); + assertThat( + deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), + matcher().deleted(0).failures(docs) + ); } finally { disableIndexBlock("test", SETTING_READ_ONLY); } @@ -267,19 +271,29 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { enableIndexBlock("test", SETTING_READ_ONLY_ALLOW_DELETE); if (diskAllocationDeciderEnabled) { InternalTestCluster internalTestCluster = internalCluster(); - InternalClusterInfoService infoService = (InternalClusterInfoService) internalTestCluster - .getInstance(ClusterInfoService.class, internalTestCluster.getMasterName()); + InternalClusterInfoService infoService = (InternalClusterInfoService) internalTestCluster.getInstance( + ClusterInfoService.class, + internalTestCluster.getMasterName() + ); ThreadPool threadPool = internalTestCluster.getInstance(ThreadPool.class, internalTestCluster.getMasterName()); // Refresh the cluster info after a random delay to check the disk threshold and release the block on the index threadPool.schedule(infoService::refresh, TimeValue.timeValueMillis(randomIntBetween(1, 100)), ThreadPool.Names.MANAGEMENT); // The delete by query request will be executed successfully because the block will be released - assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), - matcher().deleted(docs)); + assertThat( + deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), + matcher().deleted(docs) + ); } else { // The delete by query request will not be executed successfully because the block cannot be released - assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true) - .setMaxRetries(2).setRetryBackoffInitialTime(TimeValue.timeValueMillis(50)).get(), - matcher().deleted(0).failures(docs)); + assertThat( + deleteByQuery().source("test") + .filter(QueryBuilders.matchAllQuery()) + .refresh(true) + .setMaxRetries(2) + .setRetryBackoffInitialTime(TimeValue.timeValueMillis(50)) + .get(), + matcher().deleted(0).failures(docs) + ); } } finally { disableIndexBlock("test", SETTING_READ_ONLY_ALLOW_DELETE); @@ -295,14 +309,15 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { } public void testSlices() throws Exception { - indexRandom(true, - client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c"), - client().prepareIndex("test", "test", "5").setSource("foo", "d"), - client().prepareIndex("test", "test", "6").setSource("foo", "e"), - client().prepareIndex("test", "test", "7").setSource("foo", "f") + indexRandom( + true, + client().prepareIndex("test", "test", "1").setSource("foo", "a"), + client().prepareIndex("test", "test", "2").setSource("foo", "a"), + client().prepareIndex("test", "test", "3").setSource("foo", "b"), + client().prepareIndex("test", "test", "4").setSource("foo", "c"), + client().prepareIndex("test", "test", "5").setSource("foo", "d"), + client().prepareIndex("test", "test", "6").setSource("foo", "e"), + client().prepareIndex("test", "test", "7").setSource("foo", "f") ); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7); @@ -311,26 +326,16 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { // Deletes the two docs that matches "foo:a" assertThat( - deleteByQuery() - .source("test") - .filter(termQuery("foo", "a")) - .refresh(true) - .setSlices(slices).get(), - matcher() - .deleted(2) - .slices(hasSize(expectedSlices))); + deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(), + matcher().deleted(2).slices(hasSize(expectedSlices)) + ); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5); // Delete remaining docs assertThat( - deleteByQuery() - .source("test") - .filter(QueryBuilders.matchAllQuery()) - .refresh(true) - .setSlices(slices).get(), - matcher() - .deleted(5) - .slices(hasSize(expectedSlices))); + deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(), + matcher().deleted(5).slices(hasSize(expectedSlices)) + ); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0); } @@ -359,14 +364,9 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]); assertThat( - deleteByQuery() - .source(sourceIndexNames) - .filter(QueryBuilders.matchAllQuery()) - .refresh(true) - .setSlices(slices).get(), - matcher() - .deleted(allDocs.size()) - .slices(hasSize(expectedSlices))); + deleteByQuery().source(sourceIndexNames).filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(), + matcher().deleted(allDocs.size()).slices(hasSize(expectedSlices)) + ); for (String index : docs.keySet()) { assertHitCount(client().prepareSearch(index).setTypes("test").setSize(0).get(), 0); @@ -375,8 +375,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { } public void testMissingSources() { - BulkByScrollResponse response = updateByQuery() - .source("missing-index-*") + BulkByScrollResponse response = updateByQuery().source("missing-index-*") .refresh(true) .setSlices(AbstractBulkByScrollRequest.AUTO_SLICES) .get(); @@ -385,10 +384,11 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { /** Enables or disables the cluster disk allocation decider **/ private void setDiskAllocationDeciderEnabled(boolean value) { - Settings settings = value ? Settings.builder().putNull( - DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey()).build() : - Settings.builder().put( - DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), value).build(); + Settings settings = value + ? Settings.builder().putNull(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey()).build() + : Settings.builder() + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), value) + .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java index dabd4ba93ed..13101cdf594 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryConcurrentTests.java @@ -49,7 +49,7 @@ import static org.hamcrest.Matchers.equalTo; public class DeleteByQueryConcurrentTests extends ReindexTestCase { public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { - final Thread[] threads = new Thread[scaledRandomIntBetween(2, 5)]; + final Thread[] threads = new Thread[scaledRandomIntBetween(2, 5)]; final long docs = randomIntBetween(1, 50); List builders = new ArrayList<>(); @@ -69,8 +69,10 @@ public class DeleteByQueryConcurrentTests extends ReindexTestCase { try { start.await(); - assertThat(deleteByQuery().source("_all").filter(termQuery("field", threadNum)).refresh(true).get(), - matcher().deleted(docs)); + assertThat( + deleteByQuery().source("_all").filter(termQuery("field", threadNum)).refresh(true).get(), + matcher().deleted(docs) + ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } @@ -98,7 +100,7 @@ public class DeleteByQueryConcurrentTests extends ReindexTestCase { } indexRandom(true, true, true, builders); - final Thread[] threads = new Thread[scaledRandomIntBetween(2, 9)]; + final Thread[] threads = new Thread[scaledRandomIntBetween(2, 9)]; final CountDownLatch start = new CountDownLatch(1); final MatchQueryBuilder query = matchQuery("foo", "bar"); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java index 3605a70a58f..581cb19b0dd 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java @@ -49,10 +49,13 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ReindexBasicTests extends ReindexTestCase { public void testFiltering() throws Exception { - indexRandom(true, client().prepareIndex("source", "test", "1").setSource("foo", "a"), - client().prepareIndex("source", "test", "2").setSource("foo", "a"), - client().prepareIndex("source", "test", "3").setSource("foo", "b"), - client().prepareIndex("source", "test", "4").setSource("foo", "c")); + indexRandom( + true, + client().prepareIndex("source", "test", "1").setSource("foo", "a"), + client().prepareIndex("source", "test", "2").setSource("foo", "a"), + client().prepareIndex("source", "test", "3").setSource("foo", "b"), + client().prepareIndex("source", "test", "4").setSource("foo", "c") + ); assertHitCount(client().prepareSearch("source").setSize(0).get(), 4); // Copy all the docs @@ -159,11 +162,7 @@ public class ReindexBasicTests extends ReindexTestCase { int expectedSlices = expectedSliceStatuses(slices, docs.keySet()); String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]); - ReindexRequestBuilder request = reindex() - .source(sourceIndexNames) - .destination("dest", "type") - .refresh(true) - .setSlices(slices); + ReindexRequestBuilder request = reindex().source(sourceIndexNames).destination("dest", "type").refresh(true).setSlices(slices); BulkByScrollResponse response = request.get(); assertThat(response, matcher().created(allDocs.size()).slices(hasSize(expectedSlices))); @@ -171,8 +170,7 @@ public class ReindexBasicTests extends ReindexTestCase { } public void testMissingSources() { - BulkByScrollResponse response = updateByQuery() - .source("missing-index-*") + BulkByScrollResponse response = updateByQuery().source("missing-index-*") .refresh(true) .setSlices(AbstractBulkByScrollRequest.AUTO_SLICES) .get(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java index 843b71c99ab..db9c2779928 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFailureTests.java @@ -56,8 +56,7 @@ public class ReindexFailureTests extends ReindexTestCase { * Create the destination index such that the copy will cause a mapping * conflict on every request. */ - indexRandom(true, - client().prepareIndex("dest", "_doc", "test").setSource("test", 10) /* Its a string in the source! */); + indexRandom(true, client().prepareIndex("dest", "_doc", "test").setSource("test", 10) /* Its a string in the source! */); indexDocs(100); @@ -70,18 +69,15 @@ public class ReindexFailureTests extends ReindexTestCase { copy.source().setSize(1); BulkByScrollResponse response = copy.get(); - assertThat(response, matcher() - .batches(1) - .failures(both(greaterThan(0)).and(lessThanOrEqualTo(maximumNumberOfShards())))); - for (Failure failure: response.getBulkFailures()) { + assertThat(response, matcher().batches(1).failures(both(greaterThan(0)).and(lessThanOrEqualTo(maximumNumberOfShards())))); + for (Failure failure : response.getBulkFailures()) { assertThat(failure.getMessage(), containsString("IllegalArgumentException[For input string: \"words words\"]")); } } public void testAbortOnVersionConflict() throws Exception { // Just put something in the way of the copy. - indexRandom(true, - client().prepareIndex("dest", "_doc", "1").setSource("test", "test")); + indexRandom(true, client().prepareIndex("dest", "_doc", "1").setSource("test", "test")); indexDocs(100); @@ -91,7 +87,7 @@ public class ReindexFailureTests extends ReindexTestCase { BulkByScrollResponse response = copy.get(); assertThat(response, matcher().batches(1).versionConflicts(1).failures(1).created(99)); - for (Failure failure: response.getBulkFailures()) { + for (Failure failure : response.getBulkFailures()) { assertThat(failure.getMessage(), containsString("VersionConflictEngineException[[")); } } @@ -128,9 +124,9 @@ public class ReindexFailureTests extends ReindexTestCase { assertBusy(() -> assertFalse(client().admin().indices().prepareExists("source").get().isExists())); } catch (ExecutionException e) { logger.info("Triggered a reindex failure on the {} attempt: {}", attempt, e.getMessage()); - assertThat(e.getMessage(), - either(containsString("all shards failed")) - .or(containsString("No search context found")) + assertThat( + e.getMessage(), + either(containsString("all shards failed")).or(containsString("No search context found")) .or(containsString("no such index [source]")) .or(containsString("Partial shards failure")) ); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java index 846166076e5..bd2aae5b7a7 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java @@ -57,9 +57,19 @@ public class ReindexFromRemoteBuildRestClientTests extends RestClientBuilderTest private final BytesReference matchAll = new BytesArray(new MatchAllQueryBuilder().toString()); public void testBuildRestClient() throws Exception { - for(final String path: new String[]{"", null, "/", "path"}) { - RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, path, matchAll, null, null, emptyMap(), - RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + for (final String path : new String[] { "", null, "/", "path" }) { + RemoteInfo remoteInfo = new RemoteInfo( + "https", + "localhost", + 9200, + path, + matchAll, + null, + null, + emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ); long taskId = randomLong(); List threads = synchronizedList(new ArrayList<>()); RestClient client = Reindexer.buildRestClient(remoteInfo, sslConfig(), taskId, threads); @@ -82,8 +92,18 @@ public class ReindexFromRemoteBuildRestClientTests extends RestClientBuilderTest for (int i = 0; i < numHeaders; ++i) { headers.put("header" + i, Integer.toString(i)); } - RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, null, matchAll, null, null, - headers, RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + RemoteInfo remoteInfo = new RemoteInfo( + "https", + "localhost", + 9200, + null, + matchAll, + null, + null, + headers, + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ); long taskId = randomLong(); List threads = synchronizedList(new ArrayList<>()); RestClient client = Reindexer.buildRestClient(remoteInfo, sslConfig(), taskId, threads); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java index 17c86b7a61d..e083b877236 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -66,8 +66,18 @@ public class ReindexFromRemoteWhitelistTests extends OpenSearchTestCase { * Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything. */ private RemoteInfo newRemoteInfo(String host, int port) { - return new RemoteInfo(randomAlphaOfLength(5), host, port, null, query, null, null, emptyMap(), - RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + return new RemoteInfo( + randomAlphaOfLength(5), + host, + port, + null, + query, + null, + null, + emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ); } public void testWhitelistedRemote() { @@ -79,11 +89,25 @@ public class ReindexFromRemoteWhitelistTests extends OpenSearchTestCase { } public void testWhitelistedByPrefix() { - checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), - new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, null, query, null, null, emptyMap(), - RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT)); - checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), - newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200)); + checkRemoteWhitelist( + buildRemoteWhitelist(singletonList("*.example.com:9200")), + new RemoteInfo( + randomAlphaOfLength(5), + "es.example.com", + 9200, + null, + query, + null, + null, + emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ) + ); + checkRemoteWhitelist( + buildRemoteWhitelist(singletonList("*.example.com:9200")), + newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200) + ); } public void testWhitelistedBySuffix() { @@ -103,8 +127,10 @@ public class ReindexFromRemoteWhitelistTests extends OpenSearchTestCase { public void testUnwhitelistedRemote() { int port = between(1, Integer.MAX_VALUE); List whitelist = randomBoolean() ? randomWhitelist() : emptyList(); - Exception e = expectThrows(IllegalArgumentException.class, - () -> checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("not in list", port))); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> checkRemoteWhitelist(buildRemoteWhitelist(whitelist), newRemoteInfo("not in list", port)) + ); assertEquals("[not in list:" + port + "] not whitelisted in reindex.remote.whitelist", e.getMessage()); } @@ -127,9 +153,14 @@ public class ReindexFromRemoteWhitelistTests extends OpenSearchTestCase { private void assertMatchesTooMuch(List whitelist) { Exception e = expectThrows(IllegalArgumentException.class, () -> buildRemoteWhitelist(whitelist)); - assertEquals("Refusing to start because whitelist " + whitelist + " accepts all addresses. " + assertEquals( + "Refusing to start because whitelist " + + whitelist + + " accepts all addresses. " + "This would allow users to reindex-from-remote any URL they like effectively having OpenSearch make HTTP GETs " - + "for them.", e.getMessage()); + + "for them.", + e.getMessage() + ); } private List randomWhitelist() { diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java index 2cda39f03f4..a44880c645d 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -87,10 +87,7 @@ public class ReindexFromRemoteWithAuthTests extends OpenSearchSingleNodeTestCase @Override protected Collection> getPlugins() { - return Arrays.asList( - Netty4Plugin.class, - ReindexFromRemoteWithAuthTests.TestPlugin.class, - ReindexPlugin.class); + return Arrays.asList(Netty4Plugin.class, ReindexFromRemoteWithAuthTests.TestPlugin.class, ReindexPlugin.class); } @Override @@ -122,28 +119,40 @@ public class ReindexFromRemoteWithAuthTests extends OpenSearchSingleNodeTestCase * Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything. */ private RemoteInfo newRemoteInfo(String username, String password, Map headers) { - return new RemoteInfo("http", address.getAddress(), address.getPort(), null, - new BytesArray("{\"match_all\":{}}"), username, password, headers, - RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + return new RemoteInfo( + "http", + address.getAddress(), + address.getPort(), + null, + new BytesArray("{\"match_all\":{}}"), + username, + password, + headers, + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ); } public void testReindexFromRemoteWithAuthentication() throws Exception { - ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source").destination("dest") - .setRemoteInfo(newRemoteInfo("Aladdin", "open sesame", emptyMap())); + ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") + .destination("dest") + .setRemoteInfo(newRemoteInfo("Aladdin", "open sesame", emptyMap())); assertThat(request.get(), matcher().created(1)); } public void testReindexSendsHeaders() throws Exception { - ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source").destination("dest") - .setRemoteInfo(newRemoteInfo(null, null, singletonMap(TestFilter.EXAMPLE_HEADER, "doesn't matter"))); + ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") + .destination("dest") + .setRemoteInfo(newRemoteInfo(null, null, singletonMap(TestFilter.EXAMPLE_HEADER, "doesn't matter"))); OpenSearchStatusException e = expectThrows(OpenSearchStatusException.class, () -> request.get()); assertEquals(RestStatus.BAD_REQUEST, e.status()); assertThat(e.getMessage(), containsString("Hurray! Sent the header!")); } public void testReindexWithoutAuthenticationWhenRequired() throws Exception { - ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source").destination("dest") - .setRemoteInfo(newRemoteInfo(null, null, emptyMap())); + ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") + .destination("dest") + .setRemoteInfo(newRemoteInfo(null, null, emptyMap())); OpenSearchStatusException e = expectThrows(OpenSearchStatusException.class, () -> request.get()); assertEquals(RestStatus.UNAUTHORIZED, e.status()); assertThat(e.getMessage(), containsString("\"reason\":\"Authentication required\"")); @@ -151,8 +160,9 @@ public class ReindexFromRemoteWithAuthTests extends OpenSearchSingleNodeTestCase } public void testReindexWithBadAuthentication() throws Exception { - ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source").destination("dest") - .setRemoteInfo(newRemoteInfo("junk", "auth", emptyMap())); + ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") + .destination("dest") + .setRemoteInfo(newRemoteInfo("junk", "auth", emptyMap())); OpenSearchStatusException e = expectThrows(OpenSearchStatusException.class, () -> request.get()); assertThat(e.getMessage(), containsString("\"reason\":\"Bad Authorization\"")); } @@ -165,12 +175,19 @@ public class ReindexFromRemoteWithAuthTests extends OpenSearchSingleNodeTestCase private final SetOnce testFilter = new SetOnce<>(); @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier) { + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver expressionResolver, + Supplier repositoriesServiceSupplier + ) { testFilter.set(new ReindexFromRemoteWithAuthTests.TestFilter(threadPool)); return Collections.emptyList(); } @@ -182,8 +199,10 @@ public class ReindexFromRemoteWithAuthTests extends OpenSearchSingleNodeTestCase @Override public Collection getRestHeaders() { - return Arrays.asList(new RestHeaderDefinition(TestFilter.AUTHORIZATION_HEADER, false), - new RestHeaderDefinition(TestFilter.EXAMPLE_HEADER, false)); + return Arrays.asList( + new RestHeaderDefinition(TestFilter.AUTHORIZATION_HEADER, false), + new RestHeaderDefinition(TestFilter.EXAMPLE_HEADER, false) + ); } } @@ -210,8 +229,13 @@ public class ReindexFromRemoteWithAuthTests extends OpenSearchSingleNodeTestCase } @Override - public void apply(Task task, String action, - Request request, ActionListener listener, ActionFilterChain chain) { + public void apply( + Task task, + String action, + Request request, + ActionListener listener, + ActionFilterChain chain + ) { if (false == action.equals(SearchAction.NAME)) { chain.proceed(task, action, request, listener); return; @@ -221,8 +245,7 @@ public class ReindexFromRemoteWithAuthTests extends OpenSearchSingleNodeTestCase } String auth = context.getHeader(AUTHORIZATION_HEADER); if (auth == null) { - OpenSearchSecurityException e = new OpenSearchSecurityException("Authentication required", - RestStatus.UNAUTHORIZED); + OpenSearchSecurityException e = new OpenSearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED); e.addHeader("WWW-Authenticate", "Basic realm=auth-realm"); throw e; } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexMetadataTests.java index 13b12f77f17..291325a3d8a 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexMetadataTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexMetadataTests.java @@ -89,8 +89,16 @@ public class ReindexMetadataTests extends AbstractAsyncBulkByScrollActionMetadat private class TestAction extends Reindexer.AsyncIndexBySearchAction { TestAction() { - super(ReindexMetadataTests.this.task, ReindexMetadataTests.this.logger, null, ReindexMetadataTests.this.threadPool, - null, null, request(), listener()); + super( + ReindexMetadataTests.this.task, + ReindexMetadataTests.this.logger, + null, + ReindexMetadataTests.this.threadPool, + null, + null, + request(), + listener() + ); } public ReindexRequest mainRequest() { @@ -98,8 +106,10 @@ public class ReindexMetadataTests extends AbstractAsyncBulkByScrollActionMetadat } @Override - public AbstractAsyncBulkByScrollAction.RequestWrapper copyMetadata(AbstractAsyncBulkByScrollAction.RequestWrapper request, - Hit doc) { + public AbstractAsyncBulkByScrollAction.RequestWrapper copyMetadata( + AbstractAsyncBulkByScrollAction.RequestWrapper request, + Hit doc + ) { return super.copyMetadata(request, doc); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java index cf36dae2cac..eb19454d8d7 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java @@ -83,8 +83,7 @@ import static org.mockito.Mockito.mock; public class ReindexRestClientSslTests extends OpenSearchTestCase { private static HttpsServer server; - private static Consumer handler = ignore -> { - }; + private static Consumer handler = ignore -> {}; @BeforeClass public static void setupHttpServer() throws Exception { @@ -98,8 +97,8 @@ public class ReindexRestClientSslTests extends OpenSearchTestCase { HttpsExchange https = (HttpsExchange) http; handler.accept(https); // Always respond with 200 - // * If the reindex sees the 200, it means the SSL connection was established correctly. - // * We can check client certs in the handler. + // * If the reindex sees the 200, it means the SSL connection was established correctly. + // * We can check client certs in the handler. https.sendResponseHeaders(200, 0); https.close(); }); @@ -211,9 +210,18 @@ public class ReindexRestClientSslTests extends OpenSearchTestCase { } private RemoteInfo getRemoteInfo() { - return new RemoteInfo("https", "localhost", server.getAddress().getPort(), "/", - new BytesArray("{\"match_all\":{}}"), "user", "password", Collections.emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, - RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + return new RemoteInfo( + "https", + "localhost", + server.getAddress().getPort(), + "/", + new BytesArray("{\"match_all\":{}}"), + "user", + "password", + Collections.emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ); } @SuppressForbidden(reason = "use http server") diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java index 8c97da036b9..bd6eba132af 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexScriptTests.java @@ -47,7 +47,7 @@ import static org.hamcrest.Matchers.containsString; public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTestCase { public void testSetIndex() throws Exception { - Object dest = randomFrom(new Object[] {234, 234L, "pancake"}); + Object dest = randomFrom(new Object[] { 234, 234L, "pancake" }); IndexRequest index = applyScript((Map ctx) -> ctx.put("_index", dest)); assertEquals(dest.toString(), index.index()); } @@ -61,7 +61,7 @@ public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTes } public void testSetType() throws Exception { - Object type = randomFrom(new Object[] {234, 234L, "pancake"}); + Object type = randomFrom(new Object[] { 234, 234L, "pancake" }); IndexRequest index = applyScript((Map ctx) -> ctx.put("_type", type)); assertEquals(type.toString(), index.type()); } @@ -75,7 +75,7 @@ public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTes } public void testSetId() throws Exception { - Object id = randomFrom(new Object[] {null, 234, 234L, "pancake"}); + Object id = randomFrom(new Object[] { null, 234, 234L, "pancake" }); IndexRequest index = applyScript((Map ctx) -> ctx.put("_id", id)); if (id == null) { assertNull(index.id()); @@ -85,7 +85,7 @@ public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTes } public void testSetVersion() throws Exception { - Number version = randomFrom(new Number[] {null, 234, 234L}); + Number version = randomFrom(new Number[] { null, 234, 234L }); IndexRequest index = applyScript((Map ctx) -> ctx.put("_version", version)); if (version == null) { assertEquals(Versions.MATCH_ANY, index.version()); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java index 5ebbdb11873..8d675916437 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSingleNodeTests.java @@ -60,14 +60,15 @@ public class ReindexSingleNodeTests extends OpenSearchSingleNodeTestCase { // Copy a subset of the docs sorted int subsetSize = randomIntBetween(1, max - 1); - ReindexRequestBuilder copy = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE) - .source("source").destination("dest").refresh(true); + ReindexRequestBuilder copy = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") + .destination("dest") + .refresh(true); copy.maxDocs(subsetSize); copy.request().addSortField("foo", SortOrder.DESC); assertThat(copy.get(), matcher().created(subsetSize)); assertHitCount(client().prepareSearch("dest").setSize(0).get(), subsetSize); - assertHitCount(client().prepareSearch("dest").setQuery(new RangeQueryBuilder("foo").gte(0).lt(max-subsetSize)).get(), 0); + assertHitCount(client().prepareSearch("dest").setQuery(new RangeQueryBuilder("foo").gte(0).lt(max - subsetSize)).get(), 0); assertWarnings(ReindexValidator.SORT_DEPRECATED_MESSAGE); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSourceTargetValidationTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSourceTargetValidationTests.java index 991831b9057..e3bd1c1cba4 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSourceTargetValidationTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -64,7 +64,9 @@ import static org.hamcrest.Matchers.containsString; * cluster.... */ public class ReindexSourceTargetValidationTests extends OpenSearchTestCase { - private static final ClusterState STATE = ClusterState.builder(new ClusterName("test")).metadata(Metadata.builder() + private static final ClusterState STATE = ClusterState.builder(new ClusterName("test")) + .metadata( + Metadata.builder() .put(index("target", "target_alias", "target_multi"), true) .put(index("target2", "target_multi"), true) .put(index("target_with_write_index", true, "target_multi_with_write_index"), true) @@ -74,12 +76,18 @@ public class ReindexSourceTargetValidationTests extends OpenSearchTestCase { .put(index("bar"), true) .put(index("baz"), true) .put(index("source", "source_multi"), true) - .put(index("source2", "source_multi"), true)).build(); - private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = - new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)); - private static final AutoCreateIndex AUTO_CREATE_INDEX = new AutoCreateIndex(Settings.EMPTY, - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), INDEX_NAME_EXPRESSION_RESOLVER, - new SystemIndices(new HashMap<>())); + .put(index("source2", "source_multi"), true) + ) + .build(); + private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver( + new ThreadContext(Settings.EMPTY) + ); + private static final AutoCreateIndex AUTO_CREATE_INDEX = new AutoCreateIndex( + Settings.EMPTY, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + INDEX_NAME_EXPRESSION_RESOLVER, + new SystemIndices(new HashMap<>()) + ); private final BytesReference query = new BytesArray("{ \"foo\" : \"bar\" }"); @@ -106,16 +114,26 @@ public class ReindexSourceTargetValidationTests extends OpenSearchTestCase { public void testTargetIsAliasToMultipleIndicesWithoutWriteAlias() { Exception e = expectThrows(IllegalArgumentException.class, () -> succeeds("target_multi", "foo")); - assertThat(e.getMessage(), containsString("no write index is defined for alias [target_multi]. The write index may be explicitly " + - "disabled using is_write_index=false or the alias points to multiple indices without one being designated as a " + - "write index")); + assertThat( + e.getMessage(), + containsString( + "no write index is defined for alias [target_multi]. The write index may be explicitly " + + "disabled using is_write_index=false or the alias points to multiple indices without one being designated as a " + + "write index" + ) + ); } public void testTargetIsAliasWithWriteIndexDisabled() { Exception e = expectThrows(IllegalArgumentException.class, () -> succeeds("target_alias_with_write_index_disabled", "foo")); - assertThat(e.getMessage(), containsString("no write index is defined for alias [target_alias_with_write_index_disabled]. " + - "The write index may be explicitly disabled using is_write_index=false or the alias points to multiple indices without one " + - "being designated as a write index")); + assertThat( + e.getMessage(), + containsString( + "no write index is defined for alias [target_alias_with_write_index_disabled]. " + + "The write index may be explicitly disabled using is_write_index=false or the alias points to multiple indices without one " + + "being designated as a write index" + ) + ); succeeds("qux", "foo"); // writing directly into the index of which this is the alias works though } @@ -128,11 +146,39 @@ public class ReindexSourceTargetValidationTests extends OpenSearchTestCase { public void testRemoteInfoSkipsValidation() { // The index doesn't have to exist - succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, query, null, null, emptyMap(), - RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "does_not_exist", "target"); + succeeds( + new RemoteInfo( + randomAlphaOfLength(5), + "test", + 9200, + null, + query, + null, + null, + emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ), + "does_not_exist", + "target" + ); // And it doesn't matter if they are the same index. They are considered to be different because the remote one is, well, remote. - succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, query, null, null, emptyMap(), - RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "target", "target"); + succeeds( + new RemoteInfo( + randomAlphaOfLength(5), + "test", + 9200, + null, + query, + null, + null, + emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ), + "target", + "target" + ); } private void fails(String target, String... sources) { @@ -145,8 +191,14 @@ public class ReindexSourceTargetValidationTests extends OpenSearchTestCase { } private void succeeds(RemoteInfo remoteInfo, String target, String... sources) { - ReindexValidator.validateAgainstAliases(new SearchRequest(sources), new IndexRequest(target), remoteInfo, - INDEX_NAME_EXPRESSION_RESOLVER, AUTO_CREATE_INDEX, STATE); + ReindexValidator.validateAgainstAliases( + new SearchRequest(sources), + new IndexRequest(target), + remoteInfo, + INDEX_NAME_EXPRESSION_RESOLVER, + AUTO_CREATE_INDEX, + STATE + ); } private static IndexMetadata index(String name, String... aliases) { @@ -154,11 +206,14 @@ public class ReindexSourceTargetValidationTests extends OpenSearchTestCase { } private static IndexMetadata index(String name, @Nullable Boolean writeIndex, String... aliases) { - IndexMetadata.Builder builder = IndexMetadata.builder(name).settings(Settings.builder() - .put("index.version.created", Version.CURRENT.id) - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 1)); - for (String alias: aliases) { + IndexMetadata.Builder builder = IndexMetadata.builder(name) + .settings( + Settings.builder() + .put("index.version.created", Version.CURRENT.id) + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 1) + ); + for (String alias : aliases) { builder.putAlias(AliasMetadata.builder(alias).writeIndex(writeIndex).build()); } return builder.build(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexTestCase.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexTestCase.java index c81ea6eaf73..b7e218c7bdc 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexTestCase.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexTestCase.java @@ -96,9 +96,9 @@ public abstract class ReindexTestCase extends OpenSearchIntegTestCase { */ protected int expectedSlices(int requestSlices, Collection indices) { if (requestSlices == AbstractBulkByScrollRequest.AUTO_SLICES) { - int leastNumShards = Collections.min(indices.stream() - .map(sourceIndex -> getNumShards(sourceIndex).numPrimaries) - .collect(Collectors.toList())); + int leastNumShards = Collections.min( + indices.stream().map(sourceIndex -> getNumShards(sourceIndex).numPrimaries).collect(Collectors.toList()) + ); return Math.min(leastNumShards, BulkByScrollParallelizationHelper.AUTO_SLICE_CEILING); } else { return requestSlices; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java index e934d9dd437..7181fa9f4d2 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexVersioningTests.java @@ -38,7 +38,6 @@ import static org.opensearch.action.DocWriteRequest.OpType.CREATE; import static org.opensearch.index.VersionType.EXTERNAL; import static org.opensearch.index.VersionType.INTERNAL; - public class ReindexVersioningTests extends ReindexTestCase { private static final int SOURCE_VERSION = 4; private static final int OLDER_VERSION = 1; @@ -102,7 +101,7 @@ public class ReindexVersioningTests extends ReindexTestCase { * Perform a reindex with EXTERNAL versioning which has "refresh" semantics. */ private BulkByScrollResponse reindexExternal() { - ReindexRequestBuilder reindex = reindex().source("source").destination("dest").abortOnVersionConflict(false); + ReindexRequestBuilder reindex = reindex().source("source").destination("dest").abortOnVersionConflict(false); reindex.destination().setVersionType(EXTERNAL); return reindex.get(); } @@ -111,7 +110,7 @@ public class ReindexVersioningTests extends ReindexTestCase { * Perform a reindex with INTERNAL versioning which has "overwrite" semantics. */ private BulkByScrollResponse reindexInternal() { - ReindexRequestBuilder reindex = reindex().source("source").destination("dest").abortOnVersionConflict(false); + ReindexRequestBuilder reindex = reindex().source("source").destination("dest").abortOnVersionConflict(false); reindex.destination().setVersionType(INTERNAL); return reindex.get(); } @@ -120,22 +119,26 @@ public class ReindexVersioningTests extends ReindexTestCase { * Perform a reindex with CREATE OpType which has "create" semantics. */ private BulkByScrollResponse reindexCreate() { - ReindexRequestBuilder reindex = reindex().source("source").destination("dest").abortOnVersionConflict(false); + ReindexRequestBuilder reindex = reindex().source("source").destination("dest").abortOnVersionConflict(false); reindex.destination().setOpType(CREATE); return reindex.get(); } private void setupSourceAbsent() throws Exception { - indexRandom(true, client().prepareIndex("source", "_doc", "test").setVersionType(EXTERNAL) - .setVersion(SOURCE_VERSION).setSource("foo", "source")); + indexRandom( + true, + client().prepareIndex("source", "_doc", "test").setVersionType(EXTERNAL).setVersion(SOURCE_VERSION).setSource("foo", "source") + ); assertEquals(SOURCE_VERSION, client().prepareGet("source", "_doc", "test").get().getVersion()); } private void setupDest(int version) throws Exception { setupSourceAbsent(); - indexRandom(true, client().prepareIndex("dest", "_doc", "test").setVersionType(EXTERNAL) - .setVersion(version).setSource("foo", "dest")); + indexRandom( + true, + client().prepareIndex("dest", "_doc", "test").setVersionType(EXTERNAL).setVersion(version).setSource("foo", "dest") + ); assertEquals(version, client().prepareGet("dest", "_doc", "test").get().getVersion()); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java index ffe53476b41..5341bcd0fee 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestDeleteByQueryActionTests.java @@ -53,8 +53,7 @@ public class RestDeleteByQueryActionTests extends RestActionTestCase { } public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.POST) + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) .withPath("/some_index/some_type/_delete_by_query") .build(); @@ -66,7 +65,7 @@ public class RestDeleteByQueryActionTests extends RestActionTestCase { // checks the type in the URL is propagated correctly to the request object // only works after the request is dispatched, so its params are filled from url. DeleteByQueryRequest dbqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertArrayEquals(new String[]{"some_type"}, dbqRequest.getDocTypes()); + assertArrayEquals(new String[] { "some_type" }, dbqRequest.getDocTypes()); // RestDeleteByQueryAction itself doesn't check for a deprecated type usage // checking here for a deprecation from its internal search request diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java index c674d35dfa8..508cfefa167 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestReindexActionTests.java @@ -62,12 +62,15 @@ public class RestReindexActionTests extends RestActionTestCase { public void testPipelineQueryParameterIsError() throws IOException { FakeRestRequest.Builder request = new FakeRestRequest.Builder(xContentRegistry()); try (XContentBuilder body = JsonXContent.contentBuilder().prettyPrint()) { - body.startObject(); { - body.startObject("source"); { + body.startObject(); + { + body.startObject("source"); + { body.field("index", "source"); } body.endObject(); - body.startObject("dest"); { + body.startObject("dest"); + { body.field("index", "dest"); } body.endObject(); @@ -76,8 +79,10 @@ public class RestReindexActionTests extends RestActionTestCase { request.withContent(BytesReference.bytes(body), body.contentType()); } request.withParams(singletonMap("pipeline", "doesn't matter")); - Exception e = expectThrows(IllegalArgumentException.class, () -> - action.buildRequest(request.build(), new NamedWriteableRegistry(Collections.emptyList()))); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> action.buildRequest(request.build(), new NamedWriteableRegistry(Collections.emptyList())) + ); assertEquals("_reindex doesn't support [pipeline] as a query parameter. Specify it in the [dest] object instead.", e.getMessage()); } @@ -102,9 +107,8 @@ public class RestReindexActionTests extends RestActionTestCase { * test deprecation is logged if one or more types are used in source search request inside reindex */ public void testTypeInSource() throws IOException { - FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(Method.POST) - .withPath("/_reindex"); + FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) + .withPath("/_reindex"); XContentBuilder b = JsonXContent.contentBuilder().startObject(); { b.startObject("source"); @@ -127,9 +131,8 @@ public class RestReindexActionTests extends RestActionTestCase { * test deprecation is logged if a type is used in the destination index request inside reindex */ public void testTypeInDestination() throws IOException { - FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(Method.POST) - .withPath("/_reindex"); + FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST) + .withPath("/_reindex"); XContentBuilder b = JsonXContent.contentBuilder().startObject(); { b.startObject("dest"); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java index e971f33fbe4..743f0e8a852 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RestUpdateByQueryActionTests.java @@ -53,9 +53,8 @@ public class RestUpdateByQueryActionTests extends RestActionTestCase { controller().registerHandler(action); } - public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.POST) + public void testTypeInPath() throws IOException { + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) .withPath("/some_index/some_type/_update_by_query") .build(); @@ -67,7 +66,7 @@ public class RestUpdateByQueryActionTests extends RestActionTestCase { // checks the type in the URL is propagated correctly to the request object // only works after the request is dispatched, so its params are filled from url. UpdateByQueryRequest ubqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY); - assertArrayEquals(new String[]{"some_type"}, ubqRequest.getDocTypes()); + assertArrayEquals(new String[] { "some_type" }, ubqRequest.getDocTypes()); // RestUpdateByQueryAction itself doesn't check for a deprecated type usage // checking here for a deprecation from its internal search request diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java index b6ca0a6a21d..3f46d621ab8 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RethrottleTests.java @@ -115,12 +115,16 @@ public class RethrottleTests extends ReindexTestCase { assertThat(taskGroupToRethrottle.getChildTasks(), empty()); } else { // There should be a sane number of child tasks running - assertThat(taskGroupToRethrottle.getChildTasks(), - hasSize(allOf(greaterThanOrEqualTo(1), lessThanOrEqualTo(numSlices)))); + assertThat(taskGroupToRethrottle.getChildTasks(), hasSize(allOf(greaterThanOrEqualTo(1), lessThanOrEqualTo(numSlices)))); // Wait for all of the sub tasks to start (or finish, some might finish early, all that matters is that not all do) assertBusy(() -> { - BulkByScrollTask.Status parent = (BulkByScrollTask.Status) client().admin().cluster().prepareGetTask(taskToRethrottle).get() - .getTask().getTask().getStatus(); + BulkByScrollTask.Status parent = (BulkByScrollTask.Status) client().admin() + .cluster() + .prepareGetTask(taskToRethrottle) + .get() + .getTask() + .getTask() + .getStatus(); long finishedSubTasks = parent.getSliceStatuses().stream().filter(Objects::nonNull).count(); ListTasksResponse list = client().admin().cluster().prepareListTasks().setParentTaskId(taskToRethrottle).get(); list.rethrowFailures("subtasks"); @@ -141,14 +145,17 @@ public class RethrottleTests extends ReindexTestCase { } else { /* Check that at least one slice was rethrottled. We won't always rethrottle all of them because they might have completed. * With multiple slices these numbers might not add up perfectly, thus the 1.01F. */ - long unfinished = status.getSliceStatuses().stream() - .filter(Objects::nonNull) - .filter(slice -> slice.getStatus().getTotal() > slice.getStatus().getSuccessfullyProcessed()) - .count(); - float maxExpectedSliceRequestsPerSecond = newRequestsPerSecond == Float.POSITIVE_INFINITY ? - Float.POSITIVE_INFINITY : (newRequestsPerSecond / unfinished) * 1.01F; - float minExpectedSliceRequestsPerSecond = newRequestsPerSecond == Float.POSITIVE_INFINITY ? - Float.POSITIVE_INFINITY : (newRequestsPerSecond / numSlices) * 0.99F; + long unfinished = status.getSliceStatuses() + .stream() + .filter(Objects::nonNull) + .filter(slice -> slice.getStatus().getTotal() > slice.getStatus().getSuccessfullyProcessed()) + .count(); + float maxExpectedSliceRequestsPerSecond = newRequestsPerSecond == Float.POSITIVE_INFINITY + ? Float.POSITIVE_INFINITY + : (newRequestsPerSecond / unfinished) * 1.01F; + float minExpectedSliceRequestsPerSecond = newRequestsPerSecond == Float.POSITIVE_INFINITY + ? Float.POSITIVE_INFINITY + : (newRequestsPerSecond / numSlices) * 0.99F; boolean oneSliceRethrottled = false; float totalRequestsPerSecond = 0; for (BulkByScrollTask.StatusOrException statusOrException : status.getSliceStatuses()) { @@ -161,11 +168,15 @@ public class RethrottleTests extends ReindexTestCase { BulkByScrollTask.Status slice = statusOrException.getStatus(); if (slice.getTotal() > slice.getSuccessfullyProcessed()) { // This slice reports as not having completed so it should have been processed. - assertThat(slice.getRequestsPerSecond(), both(greaterThanOrEqualTo(minExpectedSliceRequestsPerSecond)) - .and(lessThanOrEqualTo(maxExpectedSliceRequestsPerSecond))); + assertThat( + slice.getRequestsPerSecond(), + both(greaterThanOrEqualTo(minExpectedSliceRequestsPerSecond)).and( + lessThanOrEqualTo(maxExpectedSliceRequestsPerSecond) + ) + ); } if (minExpectedSliceRequestsPerSecond <= slice.getRequestsPerSecond() - && slice.getRequestsPerSecond() <= maxExpectedSliceRequestsPerSecond) { + && slice.getRequestsPerSecond() <= maxExpectedSliceRequestsPerSecond) { oneSliceRethrottled = true; } totalRequestsPerSecond += slice.getRequestsPerSecond(); @@ -185,8 +196,11 @@ public class RethrottleTests extends ReindexTestCase { BulkByScrollResponse response = responseListener.get(); // It'd be bad if the entire require completed in a single batch. The test wouldn't be testing anything. - assertThat("Entire request completed in a single batch. This may invalidate the test as throttling is done between batches.", - response.getBatches(), greaterThanOrEqualTo(numSlices)); + assertThat( + "Entire request completed in a single batch. This may invalidate the test as throttling is done between batches.", + response.getBatches(), + greaterThanOrEqualTo(numSlices) + ); } private ListTasksResponse rethrottleTask(TaskId taskToRethrottle, float newRequestsPerSecond) throws Exception { @@ -196,8 +210,7 @@ public class RethrottleTests extends ReindexTestCase { assertBusy(() -> { try { - ListTasksResponse rethrottleResponse = rethrottle() - .setTaskId(taskToRethrottle) + ListTasksResponse rethrottleResponse = rethrottle().setTaskId(taskToRethrottle) .setRequestsPerSecond(newRequestsPerSecond) .get(); rethrottleResponse.rethrowFailures("Rethrottle"); @@ -209,8 +222,14 @@ public class RethrottleTests extends ReindexTestCase { throw e; } // We want to retry in this case so we throw an assertion error - assertThat(unwrapped.getMessage(), equalTo("task [" + taskToRethrottle.getId() - + "] has not yet been initialized to the point where it knows how to rethrottle itself")); + assertThat( + unwrapped.getMessage(), + equalTo( + "task [" + + taskToRethrottle.getId() + + "] has not yet been initialized to the point where it knows how to rethrottle itself" + ) + ); logger.info("caught unprepared task, retrying until prepared"); throw new AssertionError("Rethrottle request for task [" + taskToRethrottle.getId() + "] failed", e); } @@ -224,8 +243,7 @@ public class RethrottleTests extends ReindexTestCase { do { ListTasksResponse tasks = client().admin().cluster().prepareListTasks().setActions(actionName).setDetailed(true).get(); tasks.rethrowFailures("Finding tasks to rethrottle"); - assertThat("tasks are left over from the last execution of this test", - tasks.getTaskGroups(), hasSize(lessThan(2))); + assertThat("tasks are left over from the last execution of this test", tasks.getTaskGroups(), hasSize(lessThan(2))); if (0 == tasks.getTaskGroups().size()) { // The parent task hasn't started yet continue; @@ -239,11 +257,14 @@ public class RethrottleTests extends ReindexTestCase { * (maybe even empty!) that complete super fast so we have to * count them too. */ - long finishedChildStatuses = status.getSliceStatuses().stream() - .filter(n -> n != null) - .count(); - logger.info("Expected [{}] total children, [{}] are running and [{}] are finished\n{}", - sliceCount, taskGroup.getChildTasks().size(), finishedChildStatuses, status.getSliceStatuses()); + long finishedChildStatuses = status.getSliceStatuses().stream().filter(n -> n != null).count(); + logger.info( + "Expected [{}] total children, [{}] are running and [{}] are finished\n{}", + sliceCount, + taskGroup.getChildTasks().size(), + finishedChildStatuses, + status.getSliceStatuses() + ); if (sliceCount == finishedChildStatuses) { fail("all slices finished:\n" + status); } @@ -253,7 +274,8 @@ public class RethrottleTests extends ReindexTestCase { } return taskGroup; } while (System.nanoTime() - start < TimeUnit.SECONDS.toNanos(10)); - throw new AssertionError("Couldn't find tasks to rethrottle. Here are the running tasks " + - client().admin().cluster().prepareListTasks().get()); + throw new AssertionError( + "Couldn't find tasks to rethrottle. Here are the running tasks " + client().admin().cluster().prepareListTasks().get() + ); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java index 61fc3756957..3bbb2b7da93 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java @@ -77,23 +77,19 @@ public class RetryTests extends OpenSearchIntegTestCase { @After public void forceUnblockAllExecutors() { - for (CyclicBarrier barrier: blockedExecutors) { + for (CyclicBarrier barrier : blockedExecutors) { barrier.reset(); } } @Override protected Collection> nodePlugins() { - return Arrays.asList( - ReindexPlugin.class, - Netty4Plugin.class); + return Arrays.asList(ReindexPlugin.class, Netty4Plugin.class); } @Override protected Collection> transportClientPlugins() { - return Arrays.asList( - ReindexPlugin.class, - Netty4Plugin.class); + return Arrays.asList(ReindexPlugin.class, Netty4Plugin.class); } /** @@ -111,16 +107,17 @@ public class RetryTests extends OpenSearchIntegTestCase { final Settings nodeSettings() { return Settings.builder() - // whitelist reindexing from the HTTP host we're going to use - .put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*") - .build(); + // whitelist reindexing from the HTTP host we're going to use + .put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*") + .build(); } public void testReindex() throws Exception { testCase( - ReindexAction.NAME, - client -> new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source").destination("dest"), - matcher().created(DOC_COUNT)); + ReindexAction.NAME, + client -> new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source").destination("dest"), + matcher().created(DOC_COUNT) + ); } public void testReindexFromRemote() throws Exception { @@ -138,51 +135,66 @@ public class RetryTests extends OpenSearchIntegTestCase { assertNotNull(masterNode); TransportAddress address = masterNode.getInfo(HttpInfo.class).getAddress().publishAddress(); - RemoteInfo remote = - new RemoteInfo("http", address.getAddress(), address.getPort(), null, - new BytesArray("{\"match_all\":{}}"), null, null, emptyMap(), - RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); - ReindexRequestBuilder request = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source").destination("dest") - .setRemoteInfo(remote); + RemoteInfo remote = new RemoteInfo( + "http", + address.getAddress(), + address.getPort(), + null, + new BytesArray("{\"match_all\":{}}"), + null, + null, + emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ); + ReindexRequestBuilder request = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source") + .destination("dest") + .setRemoteInfo(remote); return request; }; testCase(ReindexAction.NAME, function, matcher().created(DOC_COUNT)); } public void testUpdateByQuery() throws Exception { - testCase(UpdateByQueryAction.NAME, client -> new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE).source("source"), - matcher().updated(DOC_COUNT)); + testCase( + UpdateByQueryAction.NAME, + client -> new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE).source("source"), + matcher().updated(DOC_COUNT) + ); } public void testDeleteByQuery() throws Exception { - testCase(DeleteByQueryAction.NAME, client -> new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE).source("source") - .filter(QueryBuilders.matchAllQuery()), matcher().deleted(DOC_COUNT)); + testCase( + DeleteByQueryAction.NAME, + client -> new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE).source("source") + .filter(QueryBuilders.matchAllQuery()), + matcher().deleted(DOC_COUNT) + ); } private void testCase( - String action, - Function> request, - BulkIndexByScrollResponseMatcher matcher) - throws Exception { + String action, + Function> request, + BulkIndexByScrollResponseMatcher matcher + ) throws Exception { /* * These test cases work by stuffing the bulk queue of a single node and * making sure that we read and write from that node. */ final Settings nodeSettings = Settings.builder() - // use pools of size 1 so we can block them - .put("thread_pool.write.size", 1) - // use queues of size 1 because size 0 is broken and because bulk requests need the queue to function - .put("thread_pool.write.queue_size", 1) - .put("node.attr.color", "blue") - .build(); + // use pools of size 1 so we can block them + .put("thread_pool.write.size", 1) + // use queues of size 1 because size 0 is broken and because bulk requests need the queue to function + .put("thread_pool.write.queue_size", 1) + .put("node.attr.color", "blue") + .build(); final String node = internalCluster().startDataOnlyNode(nodeSettings); - final Settings indexSettings = - Settings.builder() - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 0) - .put("index.routing.allocation.include.color", "blue") - .build(); + final Settings indexSettings = Settings.builder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.routing.allocation.include.color", "blue") + .build(); // Create the source index on the node with small thread pools so we can block them. client().admin().indices().prepareCreate("source").setSettings(indexSettings).execute().actionGet(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java index 1d4f83008b7..7b5ffa6a863 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java @@ -76,8 +76,19 @@ public class RoundTripTests extends OpenSearchTestCase { TimeValue socketTimeout = parseTimeValue(randomPositiveTimeValue(), "socketTimeout"); TimeValue connectTimeout = parseTimeValue(randomPositiveTimeValue(), "connectTimeout"); reindex.setRemoteInfo( - new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, null, - query, username, password, headers, socketTimeout, connectTimeout)); + new RemoteInfo( + randomAlphaOfLength(5), + randomAlphaOfLength(5), + port, + null, + query, + username, + password, + headers, + socketTimeout, + connectTimeout + ) + ); } ReindexRequest tripped = new ReindexRequest(toInputByteStream(reindex)); assertRequestEquals(reindex, tripped); @@ -176,8 +187,7 @@ public class RoundTripTests extends OpenSearchTestCase { } } - private void assertRequestEquals(AbstractBulkIndexByScrollRequest request, - AbstractBulkIndexByScrollRequest tripped) { + private void assertRequestEquals(AbstractBulkIndexByScrollRequest request, AbstractBulkIndexByScrollRequest tripped) { assertRequestEquals((AbstractBulkByScrollRequest) request, (AbstractBulkByScrollRequest) tripped); assertEquals(request.getScript(), tripped.getScript()); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java index 982c35924ee..4e6d3401a2f 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/TransportRethrottleActionTests.java @@ -77,8 +77,11 @@ public class TransportRethrottleActionTests extends OpenSearchTestCase { * @param simulator simulate a response from the sub-request to rethrottle the child requests * @param verifier verify the resulting response */ - private void rethrottleTestCase(int runningSlices, Consumer> simulator, - Consumer> verifier) { + private void rethrottleTestCase( + int runningSlices, + Consumer> simulator, + Consumer> verifier + ) { Client client = mock(Client.class); String localNodeId = randomAlphaOfLength(5); float newRequestsPerSecond = randomValueOtherThanMany(f -> f <= 0, () -> randomFloat()); @@ -103,7 +106,8 @@ public class TransportRethrottleActionTests extends OpenSearchTestCase { } private Consumer> expectSuccessfulRethrottleWithStatuses( - List sliceStatuses) { + List sliceStatuses + ) { return listener -> { TaskInfo taskInfo = captureResponse(TaskInfo.class, listener); assertEquals(sliceStatuses, ((BulkByScrollTask.Status) taskInfo.getStatus()).getSliceStatuses()); @@ -115,13 +119,27 @@ public class TransportRethrottleActionTests extends OpenSearchTestCase { List sliceStatuses = new ArrayList<>(slices); for (int i = 0; i < slices; i++) { BulkByScrollTask.Status status = believeableInProgressStatus(i); - tasks.add(new TaskInfo(new TaskId("test", 123), "test", "test", "test", status, 0, 0, true, new TaskId("test", task.getId()), - Collections.emptyMap())); + tasks.add( + new TaskInfo( + new TaskId("test", 123), + "test", + "test", + "test", + status, + 0, + 0, + true, + new TaskId("test", task.getId()), + Collections.emptyMap() + ) + ); sliceStatuses.add(new BulkByScrollTask.StatusOrException(status)); } - rethrottleTestCase(slices, - listener -> listener.onResponse(new ListTasksResponse(tasks, emptyList(), emptyList())), - expectSuccessfulRethrottleWithStatuses(sliceStatuses)); + rethrottleTestCase( + slices, + listener -> listener.onResponse(new ListTasksResponse(tasks, emptyList(), emptyList())), + expectSuccessfulRethrottleWithStatuses(sliceStatuses) + ); } public void testRethrottleWithSomeSucceeded() { @@ -129,20 +147,34 @@ public class TransportRethrottleActionTests extends OpenSearchTestCase { List sliceStatuses = new ArrayList<>(slices); for (int i = 0; i < succeeded; i++) { BulkByScrollTask.Status status = believeableCompletedStatus(i); - task.getLeaderState().onSliceResponse(neverCalled(), i, - new BulkByScrollResponse(timeValueMillis(10), status, emptyList(), emptyList(), false)); + task.getLeaderState() + .onSliceResponse(neverCalled(), i, new BulkByScrollResponse(timeValueMillis(10), status, emptyList(), emptyList(), false)); sliceStatuses.add(new BulkByScrollTask.StatusOrException(status)); } List tasks = new ArrayList<>(); for (int i = succeeded; i < slices; i++) { BulkByScrollTask.Status status = believeableInProgressStatus(i); - tasks.add(new TaskInfo(new TaskId("test", 123), "test", "test", "test", status, 0, 0, true, new TaskId("test", task.getId()), - Collections.emptyMap())); + tasks.add( + new TaskInfo( + new TaskId("test", 123), + "test", + "test", + "test", + status, + 0, + 0, + true, + new TaskId("test", task.getId()), + Collections.emptyMap() + ) + ); sliceStatuses.add(new BulkByScrollTask.StatusOrException(status)); } - rethrottleTestCase(slices - succeeded, - listener -> listener.onResponse(new ListTasksResponse(tasks, emptyList(), emptyList())), - expectSuccessfulRethrottleWithStatuses(sliceStatuses)); + rethrottleTestCase( + slices - succeeded, + listener -> listener.onResponse(new ListTasksResponse(tasks, emptyList(), emptyList())), + expectSuccessfulRethrottleWithStatuses(sliceStatuses) + ); } public void testRethrottleWithAllSucceeded() { @@ -151,17 +183,19 @@ public class TransportRethrottleActionTests extends OpenSearchTestCase { @SuppressWarnings("unchecked") ActionListener listener = i < slices - 1 ? neverCalled() : mock(ActionListener.class); BulkByScrollTask.Status status = believeableCompletedStatus(i); - task.getLeaderState().onSliceResponse(listener, i, new BulkByScrollResponse(timeValueMillis(10), status, emptyList(), - emptyList(), false)); + task.getLeaderState() + .onSliceResponse(listener, i, new BulkByScrollResponse(timeValueMillis(10), status, emptyList(), emptyList(), false)); if (i == slices - 1) { // The whole thing succeeded so we should have got the success captureResponse(BulkByScrollResponse.class, listener).getStatus(); } sliceStatuses.add(new BulkByScrollTask.StatusOrException(status)); } - rethrottleTestCase(0, - listener -> { /* There are no async tasks to simulate because the listener is called for us. */}, - expectSuccessfulRethrottleWithStatuses(sliceStatuses)); + rethrottleTestCase( + 0, + listener -> { /* There are no async tasks to simulate because the listener is called for us. */}, + expectSuccessfulRethrottleWithStatuses(sliceStatuses) + ); } private Consumer> expectException(Matcher exceptionMatcher) { @@ -180,16 +214,20 @@ public class TransportRethrottleActionTests extends OpenSearchTestCase { public void testRethrottleTaskOperationFailure() { Exception e = new Exception(); TaskOperationFailure failure = new TaskOperationFailure("test", 123, e); - rethrottleTestCase(slices, - listener -> listener.onResponse(new ListTasksResponse(emptyList(), singletonList(failure), emptyList())), - expectException(hasToString(containsString("Rethrottle of [test:123] failed")))); + rethrottleTestCase( + slices, + listener -> listener.onResponse(new ListTasksResponse(emptyList(), singletonList(failure), emptyList())), + expectException(hasToString(containsString("Rethrottle of [test:123] failed"))) + ); } public void testRethrottleNodeFailure() { FailedNodeException e = new FailedNodeException("test", "test", new Exception()); - rethrottleTestCase(slices, - listener -> listener.onResponse(new ListTasksResponse(emptyList(), emptyList(), singletonList(e))), - expectException(theInstance(e))); + rethrottleTestCase( + slices, + listener -> listener.onResponse(new ListTasksResponse(emptyList(), emptyList(), singletonList(e))), + expectException(theInstance(e)) + ); } private BulkByScrollTask.Status believeableInProgressStatus(Integer sliceId) { diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java index c4ac61ecee7..d803eff25d0 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java @@ -48,10 +48,13 @@ import static org.hamcrest.Matchers.hasSize; public class UpdateByQueryBasicTests extends ReindexTestCase { public void testBasics() throws Exception { - indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "a"), - client().prepareIndex("test", "test", "2").setSource("foo", "a"), - client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c")); + indexRandom( + true, + client().prepareIndex("test", "test", "1").setSource("foo", "a"), + client().prepareIndex("test", "test", "2").setSource("foo", "a"), + client().prepareIndex("test", "test", "3").setSource("foo", "b"), + client().prepareIndex("test", "test", "4").setSource("foo", "c") + ); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4); assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion()); @@ -85,11 +88,13 @@ public class UpdateByQueryBasicTests extends ReindexTestCase { } public void testSlices() throws Exception { - indexRandom(true, + indexRandom( + true, client().prepareIndex("test", "test", "1").setSource("foo", "a"), client().prepareIndex("test", "test", "2").setSource("foo", "a"), client().prepareIndex("test", "test", "3").setSource("foo", "b"), - client().prepareIndex("test", "test", "4").setSource("foo", "c")); + client().prepareIndex("test", "test", "4").setSource("foo", "c") + ); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4); assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion()); @@ -99,39 +104,25 @@ public class UpdateByQueryBasicTests extends ReindexTestCase { // Reindex all the docs assertThat( - updateByQuery() - .source("test") - .refresh(true) - .setSlices(slices).get(), - matcher() - .updated(4) - .slices(hasSize(expectedSlices))); + updateByQuery().source("test").refresh(true).setSlices(slices).get(), + matcher().updated(4).slices(hasSize(expectedSlices)) + ); assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); // Now none of them assertThat( - updateByQuery() - .source("test") - .filter(termQuery("foo", "no_match")) - .setSlices(slices) - .refresh(true).get(), - matcher() - .updated(0) - .slices(hasSize(expectedSlices))); + updateByQuery().source("test").filter(termQuery("foo", "no_match")).setSlices(slices).refresh(true).get(), + matcher().updated(0).slices(hasSize(expectedSlices)) + ); assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); // Now half of them assertThat( - updateByQuery() - .source("test") - .filter(termQuery("foo", "a")) - .refresh(true) - .setSlices(slices).get(), - matcher() - .updated(2) - .slices(hasSize(expectedSlices))); + updateByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(), + matcher().updated(2).slices(hasSize(expectedSlices)) + ); assertEquals(3, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(3, client().prepareGet("test", "test", "2").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "3").get().getVersion()); @@ -173,8 +164,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase { } public void testMissingSources() { - BulkByScrollResponse response = updateByQuery() - .source("missing-index-*") + BulkByScrollResponse response = updateByQuery().source("missing-index-*") .refresh(true) .setSlices(AbstractBulkByScrollRequest.AUTO_SLICES) .get(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryMetadataTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryMetadataTests.java index 0135b5c6f42..93532dceaeb 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryMetadataTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryMetadataTests.java @@ -36,8 +36,9 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.cluster.ClusterState; import org.opensearch.index.reindex.ScrollableHitSource.Hit; -public class UpdateByQueryMetadataTests - extends AbstractAsyncBulkByScrollActionMetadataTestCase { +public class UpdateByQueryMetadataTests extends AbstractAsyncBulkByScrollActionMetadataTestCase< + UpdateByQueryRequest, + BulkByScrollResponse> { public void testRoutingIsCopied() { IndexRequest index = new IndexRequest(); @@ -57,13 +58,23 @@ public class UpdateByQueryMetadataTests private class TestAction extends TransportUpdateByQueryAction.AsyncIndexBySearchAction { TestAction() { - super(UpdateByQueryMetadataTests.this.task, UpdateByQueryMetadataTests.this.logger, null, - UpdateByQueryMetadataTests.this.threadPool, null, request(), ClusterState.EMPTY_STATE, listener()); + super( + UpdateByQueryMetadataTests.this.task, + UpdateByQueryMetadataTests.this.logger, + null, + UpdateByQueryMetadataTests.this.threadPool, + null, + request(), + ClusterState.EMPTY_STATE, + listener() + ); } @Override - public AbstractAsyncBulkByScrollAction.RequestWrapper copyMetadata(AbstractAsyncBulkByScrollAction.RequestWrapper request, - Hit doc) { + public AbstractAsyncBulkByScrollAction.RequestWrapper copyMetadata( + AbstractAsyncBulkByScrollAction.RequestWrapper request, + Hit doc + ) { return super.copyMetadata(request, doc); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java index d5e1e61f33b..3685fc5f124 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -63,8 +63,10 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { while (keepUpdating.get()) { try { BulkByScrollResponse response = updateByQuery().source("test").refresh(true).abortOnVersionConflict(false).get(); - assertThat(response, matcher().updated(either(equalTo(0L)).or(equalTo(1L))) - .versionConflicts(either(equalTo(0L)).or(equalTo(1L)))); + assertThat( + response, + matcher().updated(either(equalTo(0L)).or(equalTo(1L))).versionConflicts(either(equalTo(0L)).or(equalTo(1L))) + ); } catch (Exception e) { failure.set(e); } @@ -77,8 +79,9 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { GetResponse get = client().prepareGet("test", "test", "test").get(); assertEquals(value.get(), get.getSource().get("test")); value.set(randomSimpleString(random())); - IndexRequestBuilder index = client().prepareIndex("test", "test", "test").setSource("test", value.get()) - .setRefreshPolicy(IMMEDIATE); + IndexRequestBuilder index = client().prepareIndex("test", "test", "test") + .setSource("test", value.get()) + .setRefreshPolicy(IMMEDIATE); /* * Update by query changes the document so concurrent * indexes might get version conflict exceptions so we just @@ -93,10 +96,16 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { } catch (VersionConflictEngineException e) { if (attempts >= MAX_ATTEMPTS) { throw new RuntimeException( - "Failed to index after [" + MAX_ATTEMPTS + "] attempts. Too many version conflicts!"); + "Failed to index after [" + MAX_ATTEMPTS + "] attempts. Too many version conflicts!" + ); } - logger.info("Caught expected version conflict trying to perform mutation number [{}] with version [{}] " - + "on attempt [{}]. Retrying.", i, get.getVersion(), attempts); + logger.info( + "Caught expected version conflict trying to perform mutation number [{}] with version [{}] " + + "on attempt [{}]. Retrying.", + i, + get.getVersion(), + attempts + ); get = client().prepareGet("test", "test", "test").get(); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java index 314066d6998..b72f66ce112 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -44,8 +44,9 @@ import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.mockito.Mockito.mock; -public class UpdateByQueryWithScriptTests - extends AbstractAsyncBulkByScrollActionScriptTestCase { +public class UpdateByQueryWithScriptTests extends AbstractAsyncBulkByScrollActionScriptTestCase< + UpdateByQueryRequest, + BulkByScrollResponse> { public void testModifyingCtxNotAllowed() { /* @@ -54,8 +55,8 @@ public class UpdateByQueryWithScriptTests * more. The point of have many is that they should all present the same * error message to the user, not some ClassCastException. */ - Object[] options = new Object[] {"cat", new Object(), 123, new Date(), Math.PI}; - for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_routing"}) { + Object[] options = new Object[] { "cat", new Object(), 123, new Date(), Math.PI }; + for (String ctxVar : new String[] { "_index", "_type", "_id", "_version", "_routing" }) { try { applyScript((Map ctx) -> ctx.put(ctxVar, randomFrom(options))); } catch (IllegalArgumentException e) { @@ -72,9 +73,23 @@ public class UpdateByQueryWithScriptTests @Override protected TransportUpdateByQueryAction.AsyncIndexBySearchAction action(ScriptService scriptService, UpdateByQueryRequest request) { TransportService transportService = mock(TransportService.class); - TransportUpdateByQueryAction transportAction = new TransportUpdateByQueryAction(threadPool, - new ActionFilters(Collections.emptySet()), null, transportService, scriptService, null); - return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, scriptService, request, - ClusterState.EMPTY_STATE, listener()); + TransportUpdateByQueryAction transportAction = new TransportUpdateByQueryAction( + threadPool, + new ActionFilters(Collections.emptySet()), + null, + transportService, + scriptService, + null + ); + return new TransportUpdateByQueryAction.AsyncIndexBySearchAction( + task, + logger, + null, + threadPool, + scriptService, + request, + ClusterState.EMPTY_STATE, + listener() + ); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteInfoTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteInfoTests.java index 2d089096b3b..91558963d43 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteInfoTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteInfoTests.java @@ -40,20 +40,37 @@ import static java.util.Collections.emptyMap; public class RemoteInfoTests extends OpenSearchTestCase { private RemoteInfo newRemoteInfo(String scheme, String prefixPath, String username, String password) { - return new RemoteInfo(scheme, "testhost", 12344, prefixPath,new BytesArray("{ \"foo\" : \"bar\" }"), username, password, - emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + return new RemoteInfo( + scheme, + "testhost", + 12344, + prefixPath, + new BytesArray("{ \"foo\" : \"bar\" }"), + username, + password, + emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, + RemoteInfo.DEFAULT_CONNECT_TIMEOUT + ); } public void testToString() { - assertEquals("host=testhost port=12344 query={ \"foo\" : \"bar\" }", - newRemoteInfo("http", null, null, null).toString()); - assertEquals("host=testhost port=12344 query={ \"foo\" : \"bar\" } username=testuser", - newRemoteInfo("http", null, "testuser", null).toString()); - assertEquals("host=testhost port=12344 query={ \"foo\" : \"bar\" } username=testuser password=<<>>", - newRemoteInfo("http", null, "testuser", "testpass").toString()); - assertEquals("scheme=https host=testhost port=12344 query={ \"foo\" : \"bar\" } username=testuser password=<<>>", - newRemoteInfo("https", null, "testuser", "testpass").toString()); - assertEquals("scheme=https host=testhost port=12344 pathPrefix=prxy query={ \"foo\" : \"bar\" } username=testuser password=<<>>", - newRemoteInfo("https", "prxy", "testuser", "testpass").toString()); + assertEquals("host=testhost port=12344 query={ \"foo\" : \"bar\" }", newRemoteInfo("http", null, null, null).toString()); + assertEquals( + "host=testhost port=12344 query={ \"foo\" : \"bar\" } username=testuser", + newRemoteInfo("http", null, "testuser", null).toString() + ); + assertEquals( + "host=testhost port=12344 query={ \"foo\" : \"bar\" } username=testuser password=<<>>", + newRemoteInfo("http", null, "testuser", "testpass").toString() + ); + assertEquals( + "scheme=https host=testhost port=12344 query={ \"foo\" : \"bar\" } username=testuser password=<<>>", + newRemoteInfo("https", null, "testuser", "testpass").toString() + ); + assertEquals( + "scheme=https host=testhost port=12344 pathPrefix=prxy query={ \"foo\" : \"bar\" } username=testuser password=<<>>", + newRemoteInfo("https", "prxy", "testuser", "testpass").toString() + ); } } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java index ddbfd8ae064..541134f9403 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -91,8 +91,10 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { assertEquals("/cat%2F,dog/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); // test a specific date math + all characters that need escaping. searchRequest.indices("", "<>/{}|+:,"); - assertEquals("/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/c,d/_search", - initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + assertEquals( + "/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/c,d/_search", + initialSearch(searchRequest, query, remoteVersion).getEndpoint() + ); // pass-through if already escaped. searchRequest.indices("%2f", "%3a"); @@ -154,8 +156,10 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { // Test request without any fields Version remoteVersion = Version.fromId(between(2000099, Version.CURRENT.id)); - assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), - not(either(hasKey("stored_fields")).or(hasKey("fields")))); + assertThat( + initialSearch(searchRequest, query, remoteVersion).getParameters(), + not(either(hasKey("stored_fields")).or(hasKey("fields"))) + ); // Test stored_fields for versions that support it searchRequest = new SearchRequest().source(new SearchSourceBuilder()); @@ -175,15 +179,16 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { searchRequest = new SearchRequest().source(new SearchSourceBuilder()); searchRequest.source().storedField("_source").storedField("_id"); remoteVersion = Version.fromId(between(0, 2000099 - 1)); - assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), - hasEntry("fields", "_source,_id,_parent,_routing,_ttl")); + assertThat( + initialSearch(searchRequest, query, remoteVersion).getParameters(), + hasEntry("fields", "_source,_id,_parent,_routing,_ttl") + ); // But only versions before 1.0 force _source to be in the list searchRequest = new SearchRequest().source(new SearchSourceBuilder()); searchRequest.source().storedField("_id"); remoteVersion = Version.fromId(between(1000099, 2000099 - 1)); - assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), - hasEntry("fields", "_id,_parent,_routing,_ttl")); + assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), hasEntry("fields", "_id,_parent,_routing,_ttl")); } public void testInitialSearchParamsMisc() { @@ -213,8 +218,7 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { } assertThat(params, hasEntry("size", Integer.toString(size))); if (fetchVersion != null) { - assertThat(params, fetchVersion ? hasEntry("version", Boolean.TRUE.toString()) : - hasEntry("version", Boolean.FALSE.toString())); + assertThat(params, fetchVersion ? hasEntry("version", Boolean.TRUE.toString()) : hasEntry("version", Boolean.FALSE.toString())); } else { assertThat(params, hasEntry("version", Boolean.FALSE.toString())); } @@ -231,7 +235,7 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { Map params = initialSearch(searchRequest, query, disallowVersion).getParameters(); assertEquals("false", params.get(allowPartialParamName)); - Version allowVersion = Version.fromId(between(0, v6_3-1)); + Version allowVersion = Version.fromId(between(0, v6_3 - 1)); params = initialSearch(searchRequest, query, allowVersion).getParameters(); assertThat(params.keySet(), not(contains(allowPartialParamName))); } @@ -259,23 +263,31 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { HttpEntity entity = initialSearch(searchRequest, new BytesArray(query), remoteVersion).getEntity(); assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); if (remoteVersion.onOrAfter(Version.fromId(1000099))) { - assertEquals("{\"query\":" + query + ",\"_source\":true}", - Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + assertEquals( + "{\"query\":" + query + ",\"_source\":true}", + Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)) + ); } else { - assertEquals("{\"query\":" + query + "}", - Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + assertEquals( + "{\"query\":" + query + "}", + Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)) + ); } // Source filtering is included if set up - searchRequest.source().fetchSource(new String[]{"in1", "in2"}, new String[]{"out"}); + searchRequest.source().fetchSource(new String[] { "in1", "in2" }, new String[] { "out" }); entity = initialSearch(searchRequest, new BytesArray(query), remoteVersion).getEntity(); assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); - assertEquals("{\"query\":" + query + ",\"_source\":{\"includes\":[\"in1\",\"in2\"],\"excludes\":[\"out\"]}}", - Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + assertEquals( + "{\"query\":" + query + ",\"_source\":{\"includes\":[\"in1\",\"in2\"],\"excludes\":[\"out\"]}}", + Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)) + ); // Invalid XContent fails - RuntimeException e = expectThrows(RuntimeException.class, - () -> initialSearch(searchRequest, new BytesArray("{}, \"trailing\": {}"), remoteVersion)); + RuntimeException e = expectThrows( + RuntimeException.class, + () -> initialSearch(searchRequest, new BytesArray("{}, \"trailing\": {}"), remoteVersion) + ); assertThat(e.getCause().getMessage(), containsString("Unexpected character (',' (code 44))")); e = expectThrows(RuntimeException.class, () -> initialSearch(searchRequest, new BytesArray("{"), remoteVersion)); assertThat(e.getCause().getMessage(), containsString("Unexpected end-of-input")); @@ -292,8 +304,10 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { String scroll = randomAlphaOfLength(30); HttpEntity entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.fromString("5.0.0")).getEntity(); assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); - assertThat(Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)), - containsString("\"" + scroll + "\"")); + assertThat( + Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)), + containsString("\"" + scroll + "\"") + ); // Test with version < 2.0.0 entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.fromId(1070499)).getEntity(); @@ -305,8 +319,10 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase { String scroll = randomAlphaOfLength(30); Request request = clearScroll(scroll, Version.fromString("5.0.0")); assertEquals(ContentType.APPLICATION_JSON.toString(), request.getEntity().getContentType().getValue()); - assertThat(Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)), - containsString("\"" + scroll + "\"")); + assertThat( + Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)), + containsString("\"" + scroll + "\"") + ); assertThat(request.getParameters().keySet(), empty()); // Test with version < 2.0.0 diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 5bc92addae6..ff97b9a301a 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -162,10 +162,9 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { private void assertLookupRemoteVersion(Version expected, String s) throws Exception { AtomicBoolean called = new AtomicBoolean(); - sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, s) - .lookupRemoteVersion(wrapAsListener(v -> { - assertEquals(expected, v); - called.set(true); + sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, s).lookupRemoteVersion(wrapAsListener(v -> { + assertEquals(expected, v); + called.set(true); })); assertTrue(called.get()); } @@ -269,10 +268,13 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { assertEquals((Integer) 0, r.getFailures().get(0).getShardId()); assertEquals("87A7NvevQxSrEwMbtRCecg", r.getFailures().get(0).getNodeId()); assertThat(r.getFailures().get(0).getReason(), instanceOf(OpenSearchRejectedExecutionException.class)); - assertEquals("rejected execution of org.opensearch.transport.TransportService$5@52d06af2 on " + assertEquals( + "rejected execution of org.opensearch.transport.TransportService$5@52d06af2 on " + "OpenSearchThreadPoolExecutor[search, queue capacity = 1000, org.opensearch.common.util.concurrent." + "OpenSearchThreadPoolExecutor@778ea553[Running, pool size = 7, active threads = 7, queued tasks = 1000, " - + "completed tasks = 4182]]", r.getFailures().get(0).getReason().getMessage()); + + "completed tasks = 4182]]", + r.getFailures().get(0).getReason().getMessage() + ); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); assertEquals("test", r.getHits().get(0).getType()); @@ -300,8 +302,10 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { assertEquals(null, r.getFailures().get(0).getShardId()); assertEquals(null, r.getFailures().get(0).getNodeId()); assertThat(r.getFailures().get(0).getReason(), instanceOf(RuntimeException.class)); - assertEquals("Unknown remote exception with reason=[SearchContextMissingException[No search context found for id [82]]]", - r.getFailures().get(0).getReason().getMessage()); + assertEquals( + "Unknown remote exception with reason=[SearchContextMissingException[No search context found for id [82]]]", + r.getFailures().get(0).getReason().getMessage() + ); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); assertEquals("test", r.getHits().get(0).getType()); @@ -312,8 +316,11 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { sourceWithMockedRemoteCall("failure_with_status.json").doStart(wrapAsListener(checkResponse)); assertTrue(called.get()); called.set(false); - sourceWithMockedRemoteCall("failure_with_status.json").doStartNextScroll("scroll", timeValueMillis(0), - wrapAsListener(checkResponse)); + sourceWithMockedRemoteCall("failure_with_status.json").doStartNextScroll( + "scroll", + timeValueMillis(0), + wrapAsListener(checkResponse) + ); assertTrue(called.get()); } @@ -443,8 +450,14 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { public void testTooLargeResponse() throws Exception { ContentTooLongException tooLong = new ContentTooLongException("too long!"); CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); - when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), - any(HttpClientContext.class), any(FutureCallback.class))).then(new Answer>() { + when( + httpClient.execute( + any(HttpAsyncRequestProducer.class), + any(HttpAsyncResponseConsumer.class), + any(HttpClientContext.class), + any(FutureCallback.class) + ) + ).then(new Answer>() { @Override public Future answer(InvocationOnMock invocationOnMock) throws Throwable { HeapBufferedAsyncResponseConsumer consumer = (HeapBufferedAsyncResponseConsumer) invocationOnMock.getArguments()[1]; @@ -469,22 +482,29 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { } public void testNoContentTypeIsError() { - RuntimeException e = expectListenerFailure(RuntimeException.class, (RejectAwareActionListener listener) -> - sourceWithMockedRemoteCall(false, null, "main/0_20_5.json").lookupRemoteVersion(listener)); + RuntimeException e = expectListenerFailure( + RuntimeException.class, + (RejectAwareActionListener listener) -> sourceWithMockedRemoteCall(false, null, "main/0_20_5.json") + .lookupRemoteVersion(listener) + ); assertEquals(e.getMessage(), "Response didn't include supported Content-Type, remote is likely not an OpenSearch instance"); } public void testInvalidJsonThinksRemoteIsNotES() throws IOException { Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("some_text.txt").start()); - assertEquals("Error parsing the response, remote is likely not an OpenSearch instance", - e.getCause().getCause().getCause().getMessage()); + assertEquals( + "Error parsing the response, remote is likely not an OpenSearch instance", + e.getCause().getCause().getCause().getMessage() + ); } public void testUnexpectedJsonThinksRemoteIsNotES() throws IOException { // Use the response from a main action instead of a proper start response to generate a parse error Exception e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall("main/2_3_3.json").start()); - assertEquals("Error parsing the response, remote is likely not an OpenSearch instance", - e.getCause().getCause().getCause().getMessage()); + assertEquals( + "Error parsing the response, remote is likely not an OpenSearch instance", + e.getCause().getCause().getCause().getMessage() + ); } public void testCleanupSuccessful() throws Exception { @@ -525,8 +545,14 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { } CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); - when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), - any(HttpClientContext.class), any(FutureCallback.class))).thenAnswer(new Answer>() { + when( + httpClient.execute( + any(HttpAsyncRequestProducer.class), + any(HttpAsyncResponseConsumer.class), + any(HttpClientContext.class), + any(FutureCallback.class) + ) + ).thenAnswer(new Answer>() { int responseCount = 0; @@ -536,7 +562,7 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { threadPool.getThreadContext().stashContext(); HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[3]; - HttpEntityEnclosingRequest request = (HttpEntityEnclosingRequest)requestProducer.generateRequest(); + HttpEntityEnclosingRequest request = (HttpEntityEnclosingRequest) requestProducer.generateRequest(); URL resource = resources[responseCount]; String path = paths[responseCount++]; ProtocolVersion protocolVersion = new ProtocolVersion("http", 1, 1); @@ -566,7 +592,8 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { when(clientBuilder.build()).thenReturn(httpClient); RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)) - .setHttpClientConfigCallback(httpClientBuilder -> clientBuilder).build(); + .setHttpClientConfigCallback(httpClientBuilder -> clientBuilder) + .build(); TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(restClient) { @Override @@ -598,30 +625,32 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase { private class TestRemoteScrollableHitSource extends RemoteScrollableHitSource { TestRemoteScrollableHitSource(RestClient client) { - super(RemoteScrollableHitSourceTests.this.logger, backoff(), RemoteScrollableHitSourceTests.this.threadPool, + super( + RemoteScrollableHitSourceTests.this.logger, + backoff(), + RemoteScrollableHitSourceTests.this.threadPool, RemoteScrollableHitSourceTests.this::countRetry, - responseQueue::add, RemoteScrollableHitSourceTests.this::failRequest, - client, new BytesArray("{}"), RemoteScrollableHitSourceTests.this.searchRequest); + responseQueue::add, + RemoteScrollableHitSourceTests.this::failRequest, + client, + new BytesArray("{}"), + RemoteScrollableHitSourceTests.this.searchRequest + ); } } private RejectAwareActionListener wrapAsListener(Consumer consumer) { - Consumer throwing = e -> { - throw new AssertionError(e); - }; + Consumer throwing = e -> { throw new AssertionError(e); }; return RejectAwareActionListener.wrap(consumer::accept, throwing, throwing); } @SuppressWarnings("unchecked") private T expectListenerFailure(Class expectedException, Consumer> subject) { AtomicReference exception = new AtomicReference<>(); - subject.accept(RejectAwareActionListener.wrap( - r -> fail(), - e -> { - assertThat(e, instanceOf(expectedException)); - assertTrue(exception.compareAndSet(null, (T) e)); - }, - e -> fail())); + subject.accept(RejectAwareActionListener.wrap(r -> fail(), e -> { + assertThat(e, instanceOf(expectedException)); + assertTrue(exception.compareAndSet(null, (T) e)); + }, e -> fail())); assertNotNull(exception.get()); return exception.get(); } diff --git a/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java b/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java index 80783960841..aa274549f3a 100644 --- a/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java +++ b/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java @@ -67,11 +67,18 @@ public class URLSnapshotRestoreIT extends OpenSearchIntegTestCase { logger.info("--> creating repository"); Path repositoryLocation = randomRepoPath(); - assertAcked(client.admin().cluster().preparePutRepository("test-repo") - .setType(FsRepository.TYPE).setSettings(Settings.builder() - .put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation) - .put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean()) - .put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); + assertAcked( + client.admin() + .cluster() + .preparePutRepository("test-repo") + .setType(FsRepository.TYPE) + .setSettings( + Settings.builder() + .put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation) + .put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean()) + .put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES) + ) + ); createIndex("test-idx"); ensureGreen(); @@ -84,8 +91,7 @@ public class URLSnapshotRestoreIT extends OpenSearchIntegTestCase { assertThat(client.prepareSearch("test-idx").setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); logger.info("--> snapshot"); - CreateSnapshotResponse createSnapshotResponse = client - .admin() + CreateSnapshotResponse createSnapshotResponse = client.admin() .cluster() .prepareCreateSnapshot("test-repo", "test-snap") .setWaitForCompletion(true) @@ -95,8 +101,7 @@ public class URLSnapshotRestoreIT extends OpenSearchIntegTestCase { int actualTotalShards = createSnapshotResponse.getSnapshotInfo().totalShards(); assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(actualTotalShards)); - SnapshotState state = client - .admin() + SnapshotState state = client.admin() .cluster() .prepareGetSnapshots("test-repo") .setSnapshots("test-snap") @@ -110,13 +115,19 @@ public class URLSnapshotRestoreIT extends OpenSearchIntegTestCase { cluster().wipeIndices("test-idx"); logger.info("--> create read-only URL repository"); - assertAcked(client.admin().cluster().preparePutRepository("url-repo") - .setType(URLRepository.TYPE).setSettings(Settings.builder() - .put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL().toString()) - .put("list_directories", randomBoolean()))); + assertAcked( + client.admin() + .cluster() + .preparePutRepository("url-repo") + .setType(URLRepository.TYPE) + .setSettings( + Settings.builder() + .put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL().toString()) + .put("list_directories", randomBoolean()) + ) + ); logger.info("--> restore index after deletion"); - RestoreSnapshotResponse restoreSnapshotResponse = client - .admin() + RestoreSnapshotResponse restoreSnapshotResponse = client.admin() .cluster() .prepareRestoreSnapshot("url-repo", "test-snap") .setWaitForCompletion(true) diff --git a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java index 0f74dcc1cdf..fbfbf5e006f 100644 --- a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java +++ b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java @@ -66,8 +66,8 @@ public class URLBlobStore implements BlobStore { */ public URLBlobStore(Settings settings, URL path) { this.path = path; - this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.uri.buffer_size", - new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes(); + this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.uri.buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)) + .getBytes(); } @Override diff --git a/modules/repository-url/src/main/java/org/opensearch/plugin/repository/url/URLRepositoryPlugin.java b/modules/repository-url/src/main/java/org/opensearch/plugin/repository/url/URLRepositoryPlugin.java index b278f562cba..d1b08d44380 100644 --- a/modules/repository-url/src/main/java/org/opensearch/plugin/repository/url/URLRepositoryPlugin.java +++ b/modules/repository-url/src/main/java/org/opensearch/plugin/repository/url/URLRepositoryPlugin.java @@ -59,9 +59,15 @@ public class URLRepositoryPlugin extends Plugin implements RepositoryPlugin { } @Override - public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry, - ClusterService clusterService, RecoverySettings recoverySettings) { - return Collections.singletonMap(URLRepository.TYPE, - metadata -> new URLRepository(metadata, env, namedXContentRegistry, clusterService, recoverySettings)); + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + RecoverySettings recoverySettings + ) { + return Collections.singletonMap( + URLRepository.TYPE, + metadata -> new URLRepository(metadata, env, namedXContentRegistry, clusterService, recoverySettings) + ); } } diff --git a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java index 5c926fc3c51..041550b70a6 100644 --- a/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/opensearch/repositories/url/URLRepository.java @@ -71,17 +71,27 @@ public class URLRepository extends BlobStoreRepository { public static final String TYPE = "url"; - public static final Setting> SUPPORTED_PROTOCOLS_SETTING = - Setting.listSetting("repositories.url.supported_protocols", Arrays.asList("http", "https", "ftp", "file", "jar"), - Function.identity(), Property.NodeScope); + public static final Setting> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting( + "repositories.url.supported_protocols", + Arrays.asList("http", "https", "ftp", "file", "jar"), + Function.identity(), + Property.NodeScope + ); - public static final Setting> ALLOWED_URLS_SETTING = - Setting.listSetting("repositories.url.allowed_urls", Collections.emptyList(), URIPattern::new, Property.NodeScope); + public static final Setting> ALLOWED_URLS_SETTING = Setting.listSetting( + "repositories.url.allowed_urls", + Collections.emptyList(), + URIPattern::new, + Property.NodeScope + ); public static final Setting URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, Property.NodeScope); - public static final Setting REPOSITORIES_URL_SETTING = - new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), URLRepository::parseURL, - Property.NodeScope); + public static final Setting REPOSITORIES_URL_SETTING = new Setting<>( + "repositories.url.url", + (s) -> s.get("repositories.uri.url", "http:"), + URLRepository::parseURL, + Property.NodeScope + ); private final List supportedProtocols; @@ -96,20 +106,25 @@ public class URLRepository extends BlobStoreRepository { /** * Constructs a read-only URL-based repository */ - public URLRepository(RepositoryMetadata metadata, Environment environment, - NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, - RecoverySettings recoverySettings) { + public URLRepository( + RepositoryMetadata metadata, + Environment environment, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + RecoverySettings recoverySettings + ) { super(metadata, false, namedXContentRegistry, clusterService, recoverySettings); - if (URL_SETTING.exists(metadata.settings()) == false && REPOSITORIES_URL_SETTING.exists(environment.settings()) == false) { + if (URL_SETTING.exists(metadata.settings()) == false && REPOSITORIES_URL_SETTING.exists(environment.settings()) == false) { throw new RepositoryException(metadata.name(), "missing url"); } this.environment = environment; supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(environment.settings()); - urlWhiteList = ALLOWED_URLS_SETTING.get(environment.settings()).toArray(new URIPattern[]{}); + urlWhiteList = ALLOWED_URLS_SETTING.get(environment.settings()).toArray(new URIPattern[] {}); basePath = BlobPath.cleanPath(); url = URL_SETTING.exists(metadata.settings()) - ? URL_SETTING.get(metadata.settings()) : REPOSITORIES_URL_SETTING.get(environment.settings()); + ? URL_SETTING.get(metadata.settings()) + : REPOSITORIES_URL_SETTING.get(environment.settings()); } @Override @@ -157,10 +172,12 @@ public class URLRepository extends BlobStoreRepository { // We didn't match white list - try to resolve against path.repo URL normalizedUrl = environment.resolveRepoURL(url); if (normalizedUrl == null) { - String logMessage = "The specified url [{}] doesn't start with any repository paths specified by the " + - "path.repo setting or by {} setting: [{}] "; + String logMessage = "The specified url [{}] doesn't start with any repository paths specified by the " + + "path.repo setting or by {} setting: [{}] "; logger.warn(logMessage, url, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles()); - String exceptionMessage = "file url [" + url + "] doesn't match any of the locations specified by path.repo or " + String exceptionMessage = "file url [" + + url + + "] doesn't match any of the locations specified by path.repo or " + ALLOWED_URLS_SETTING.getKey(); throw new RepositoryException(getMetadata().name(), exceptionMessage); } diff --git a/modules/repository-url/src/test/java/org/opensearch/repositories/url/URLRepositoryTests.java b/modules/repository-url/src/test/java/org/opensearch/repositories/url/URLRepositoryTests.java index aebca8549ea..30d1c81fc9e 100644 --- a/modules/repository-url/src/test/java/org/opensearch/repositories/url/URLRepositoryTests.java +++ b/modules/repository-url/src/test/java/org/opensearch/repositories/url/URLRepositoryTests.java @@ -54,9 +54,13 @@ import static org.hamcrest.CoreMatchers.nullValue; public class URLRepositoryTests extends OpenSearchTestCase { private URLRepository createRepository(Settings baseSettings, RepositoryMetadata repositoryMetadata) { - return new URLRepository(repositoryMetadata, TestEnvironment.newEnvironment(baseSettings), - new NamedXContentRegistry(Collections.emptyList()), BlobStoreTestUtil.mockClusterService(), - new RecoverySettings(baseSettings, new ClusterSettings(baseSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { + return new URLRepository( + repositoryMetadata, + TestEnvironment.newEnvironment(baseSettings), + new NamedXContentRegistry(Collections.emptyList()), + BlobStoreTestUtil.mockClusterService(), + new RecoverySettings(baseSettings, new ClusterSettings(baseSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) + ) { @Override protected void assertSnapshotOrGenericThread() { // eliminate thread name check as we create repo manually on test/main threads @@ -93,7 +97,8 @@ public class URLRepositoryTests extends OpenSearchTestCase { repository.blobContainer(); fail("RepositoryException should have been thrown."); } catch (RepositoryException e) { - String msg = "[url] file url [" + repoPath + String msg = "[url] file url [" + + repoPath + "] doesn't match any of the locations specified by path.repo or repositories.url.allowed_urls"; assertEquals(msg, e.getMessage()); } @@ -115,7 +120,7 @@ public class URLRepositoryTests extends OpenSearchTestCase { repository.blobContainer(); fail("RepositoryException should have been thrown."); } catch (RepositoryException e) { - assertEquals("[url] unsupported url protocol [file] from URL [" + repoPath +"]", e.getMessage()); + assertEquals("[url] unsupported url protocol [file] from URL [" + repoPath + "]", e.getMessage()); } } @@ -123,7 +128,7 @@ public class URLRepositoryTests extends OpenSearchTestCase { Settings baseSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(URLRepository.ALLOWED_URLS_SETTING.getKey(), "file:/tmp/") - .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), "file:/var/" ) + .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), "file:/var/") .build(); RepositoryMetadata repositoryMetadata = new RepositoryMetadata("url", URLRepository.TYPE, baseSettings); final URLRepository repository = createRepository(baseSettings, repositoryMetadata); @@ -132,9 +137,11 @@ public class URLRepositoryTests extends OpenSearchTestCase { repository.blobContainer(); fail("RepositoryException should have been thrown."); } catch (RepositoryException e) { - assertEquals("[url] file url [file:/var/] doesn't match any of the locations " - + "specified by path.repo or repositories.url.allowed_urls", - e.getMessage()); + assertEquals( + "[url] file url [file:/var/] doesn't match any of the locations " + + "specified by path.repo or repositories.url.allowed_urls", + e.getMessage() + ); } } diff --git a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java index 7ff30b632f5..3d0c09fb228 100644 --- a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java +++ b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java @@ -99,15 +99,17 @@ public class RepositoryURLClientYamlTestSuiteIT extends OpenSearchClientYamlSuit // Create a FS repository using the path.repo location Request createFsRepositoryRequest = new Request("PUT", "/_snapshot/repository-fs"); - createFsRepositoryRequest.setEntity(buildRepositorySettings(FsRepository.TYPE, - Settings.builder().put("location", pathRepo).build())); + createFsRepositoryRequest.setEntity( + buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo).build()) + ); Response createFsRepositoryResponse = client().performRequest(createFsRepositoryRequest); assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); // Create a URL repository using the file://{path.repo} URL Request createFileRepositoryRequest = new Request("PUT", "/_snapshot/repository-file"); - createFileRepositoryRequest.setEntity(buildRepositorySettings("url", - Settings.builder().put("url", pathRepoUri.toString()).build())); + createFileRepositoryRequest.setEntity( + buildRepositorySettings("url", Settings.builder().put("url", pathRepoUri.toString()).build()) + ); Response createFileRepositoryResponse = client().performRequest(createFileRepositoryRequest); assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); @@ -119,8 +121,7 @@ public class RepositoryURLClientYamlTestSuiteIT extends OpenSearchClientYamlSuit InetAddress inetAddress = InetAddress.getByName(new URL(allowedUrl).getHost()); if (inetAddress.isAnyLocalAddress() || inetAddress.isLoopbackAddress()) { Request createUrlRepositoryRequest = new Request("PUT", "/_snapshot/repository-url"); - createUrlRepositoryRequest.setEntity(buildRepositorySettings("url", - Settings.builder().put("url", allowedUrl).build())); + createUrlRepositoryRequest.setEntity(buildRepositorySettings("url", Settings.builder().put("url", allowedUrl).build())); Response createUrlRepositoryResponse = client().performRequest(createUrlRepositoryRequest); assertThat(createUrlRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); break; diff --git a/modules/systemd/src/main/java/org/opensearch/systemd/SystemdPlugin.java b/modules/systemd/src/main/java/org/opensearch/systemd/SystemdPlugin.java index fa1ce73fc07..3a2d51950f0 100644 --- a/modules/systemd/src/main/java/org/opensearch/systemd/SystemdPlugin.java +++ b/modules/systemd/src/main/java/org/opensearch/systemd/SystemdPlugin.java @@ -111,7 +111,8 @@ public class SystemdPlugin extends Plugin implements ClusterPlugin { final NodeEnvironment nodeEnvironment, final NamedWriteableRegistry namedWriteableRegistry, final IndexNameExpressionResolver expressionResolver, - final Supplier repositoriesServiceSupplier) { + final Supplier repositoriesServiceSupplier + ) { if (enabled == false) { extender.set(null); return Collections.emptyList(); @@ -123,15 +124,12 @@ public class SystemdPlugin extends Plugin implements ClusterPlugin { * Therefore, every fifteen seconds we send systemd a message via sd_notify to extend the timeout by thirty seconds. We will cancel * this scheduled task after we successfully notify systemd that we are ready. */ - extender.set(threadPool.scheduleWithFixedDelay( - () -> { - final int rc = sd_notify(0, "EXTEND_TIMEOUT_USEC=30000000"); - if (rc < 0) { - logger.warn("extending startup timeout via sd_notify failed with [{}]", rc); - } - }, - TimeValue.timeValueSeconds(15), - ThreadPool.Names.SAME)); + extender.set(threadPool.scheduleWithFixedDelay(() -> { + final int rc = sd_notify(0, "EXTEND_TIMEOUT_USEC=30000000"); + if (rc < 0) { + logger.warn("extending startup timeout via sd_notify failed with [{}]", rc); + } + }, TimeValue.timeValueSeconds(15), ThreadPool.Names.SAME)); return Collections.emptyList(); } diff --git a/modules/systemd/src/test/java/org/opensearch/systemd/SystemdPluginTests.java b/modules/systemd/src/test/java/org/opensearch/systemd/SystemdPluginTests.java index 8c54d2c39b3..d55838dc9a1 100644 --- a/modules/systemd/src/test/java/org/opensearch/systemd/SystemdPluginTests.java +++ b/modules/systemd/src/test/java/org/opensearch/systemd/SystemdPluginTests.java @@ -60,8 +60,10 @@ import static org.mockito.Mockito.when; public class SystemdPluginTests extends OpenSearchTestCase { private final Build.Type randomPackageBuildType = randomFrom(Build.Type.DEB, Build.Type.RPM); - private final Build.Type randomNonPackageBuildType = - randomValueOtherThanMany(t -> t == Build.Type.DEB || t == Build.Type.RPM, () -> randomFrom(Build.Type.values())); + private final Build.Type randomNonPackageBuildType = randomValueOtherThanMany( + t -> t == Build.Type.DEB || t == Build.Type.RPM, + () -> randomFrom(Build.Type.values()) + ); final Scheduler.Cancellable extender = mock(Scheduler.Cancellable.class); final ThreadPool threadPool = mock(ThreadPool.class); @@ -103,46 +105,38 @@ public class SystemdPluginTests extends OpenSearchTestCase { public void testInvalid() { final String esSDNotify = randomValueOtherThanMany( s -> Boolean.TRUE.toString().equals(s) || Boolean.FALSE.toString().equals(s), - () -> randomAlphaOfLength(4)); - final RuntimeException e = expectThrows(RuntimeException.class, - () -> new SystemdPlugin(false, randomPackageBuildType, esSDNotify)); + () -> randomAlphaOfLength(4) + ); + final RuntimeException e = expectThrows(RuntimeException.class, () -> new SystemdPlugin(false, randomPackageBuildType, esSDNotify)); assertThat(e, hasToString(containsString("OPENSEARCH_SD_NOTIFY set to unexpected value [" + esSDNotify + "]"))); } public void testOnNodeStartedSuccess() { - runTestOnNodeStarted( - Boolean.TRUE.toString(), - randomIntBetween(0, Integer.MAX_VALUE), - (maybe, plugin) -> { - assertThat(maybe, OptionalMatchers.isEmpty()); - verify(plugin.extender()).cancel(); - }); + runTestOnNodeStarted(Boolean.TRUE.toString(), randomIntBetween(0, Integer.MAX_VALUE), (maybe, plugin) -> { + assertThat(maybe, OptionalMatchers.isEmpty()); + verify(plugin.extender()).cancel(); + }); } public void testOnNodeStartedFailure() { final int rc = randomIntBetween(Integer.MIN_VALUE, -1); - runTestOnNodeStarted( - Boolean.TRUE.toString(), - rc, - (maybe, plugin) -> { - assertThat(maybe, OptionalMatchers.isPresent()); - // noinspection OptionalGetWithoutIsPresent - assertThat(maybe.get(), instanceOf(RuntimeException.class)); - assertThat(maybe.get(), hasToString(containsString("sd_notify returned error [" + rc + "]"))); - }); + runTestOnNodeStarted(Boolean.TRUE.toString(), rc, (maybe, plugin) -> { + assertThat(maybe, OptionalMatchers.isPresent()); + // noinspection OptionalGetWithoutIsPresent + assertThat(maybe.get(), instanceOf(RuntimeException.class)); + assertThat(maybe.get(), hasToString(containsString("sd_notify returned error [" + rc + "]"))); + }); } public void testOnNodeStartedNotEnabled() { - runTestOnNodeStarted( - Boolean.FALSE.toString(), - randomInt(), - (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty())); + runTestOnNodeStarted(Boolean.FALSE.toString(), randomInt(), (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty())); } private void runTestOnNodeStarted( final String esSDNotify, final int rc, - final BiConsumer, SystemdPlugin> assertions) { + final BiConsumer, SystemdPlugin> assertions + ) { runTest(esSDNotify, rc, assertions, SystemdPlugin::onNodeStarted, "READY=1"); } @@ -150,27 +144,23 @@ public class SystemdPluginTests extends OpenSearchTestCase { runTestClose( Boolean.TRUE.toString(), randomIntBetween(1, Integer.MAX_VALUE), - (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty())); + (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty()) + ); } public void testCloseFailure() { runTestClose( Boolean.TRUE.toString(), randomIntBetween(Integer.MIN_VALUE, -1), - (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty())); + (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty()) + ); } public void testCloseNotEnabled() { - runTestClose( - Boolean.FALSE.toString(), - randomInt(), - (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty())); + runTestClose(Boolean.FALSE.toString(), randomInt(), (maybe, plugin) -> assertThat(maybe, OptionalMatchers.isEmpty())); } - private void runTestClose( - final String esSDNotify, - final int rc, - final BiConsumer, SystemdPlugin> assertions) { + private void runTestClose(final String esSDNotify, final int rc, final BiConsumer, SystemdPlugin> assertions) { runTest(esSDNotify, rc, assertions, SystemdPlugin::close, "STOPPING=1"); } @@ -179,7 +169,8 @@ public class SystemdPluginTests extends OpenSearchTestCase { final int rc, final BiConsumer, SystemdPlugin> assertions, final CheckedConsumer invocation, - final String expectedState) { + final String expectedState + ) { final AtomicBoolean invoked = new AtomicBoolean(); final AtomicInteger invokedUnsetEnvironment = new AtomicInteger(); final AtomicReference invokedState = new AtomicReference<>(); diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/OpenSearchNetty4IntegTestCase.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/OpenSearchNetty4IntegTestCase.java index a8d7fa7ff5b..86e26ec15c3 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/OpenSearchNetty4IntegTestCase.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/OpenSearchNetty4IntegTestCase.java @@ -69,7 +69,7 @@ public abstract class OpenSearchNetty4IntegTestCase extends OpenSearchIntegTestC protected Settings transportClientSettings() { Settings.Builder builder = Settings.builder().put(super.transportClientSettings()); builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME); - return builder.build(); + return builder.build(); } @Override diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java index a348ee4aa5a..f8711ce5ff6 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java @@ -124,8 +124,7 @@ public class Netty4HttpRequestSizeLimitIT extends OpenSearchNetty4IntegTestCase List> requestUris = new ArrayList<>(); for (int i = 0; i < 1500; i++) { - requestUris.add(Tuple.tuple("/_cluster/settings", - "{ \"transient\": {\"search.default_search_timeout\": \"40s\" } }")); + requestUris.add(Tuple.tuple("/_cluster/settings", "{ \"transient\": {\"search.default_search_timeout\": \"40s\" } }")); } HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); @@ -149,8 +148,11 @@ public class Netty4HttpRequestSizeLimitIT extends OpenSearchNetty4IntegTestCase private void assertAllInExpectedStatus(Collection responses, HttpResponseStatus expectedStatus) { long countUnexpectedStatus = responses.stream().filter(r -> r.status().equals(expectedStatus) == false).count(); - assertThat("Expected all requests with status [" + expectedStatus + "] but [" + countUnexpectedStatus + - "] requests had a different one", countUnexpectedStatus, equalTo(0L)); + assertThat( + "Expected all requests with status [" + expectedStatus + "] but [" + countUnexpectedStatus + "] requests had a different one", + countUnexpectedStatus, + equalTo(0L) + ); } } diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java index e6b0a043a95..2bd1fa07f8a 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java @@ -55,7 +55,7 @@ public class Netty4PipeliningIT extends OpenSearchNetty4IntegTestCase { } public void testThatNettyHttpServerSupportsPipelining() throws Exception { - String[] requests = new String[]{"/", "/_nodes/stats", "/", "/_cluster/state", "/"}; + String[] requests = new String[] { "/", "/_nodes/stats", "/", "/_cluster/state", "/" }; HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses(); diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java index 5397a4a040c..eb70015d882 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java @@ -60,8 +60,7 @@ import static org.hamcrest.core.Is.is; // These tests are here today so they have access to a proper REST client. They cannot be in :server:integTest since the REST client needs a // proper transport implementation, and they cannot be REST tests today since they need to restart nodes. When #35599 and friends land we // should be able to move these tests to run against a proper cluster instead. TODO do this. -@OpenSearchIntegTestCase.ClusterScope( - scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, transportClientRatio = 0, autoManageMasterNodes = false) +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, transportClientRatio = 0, autoManageMasterNodes = false) public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase { @Override @@ -72,12 +71,14 @@ public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase { public void testRollingRestartOfTwoNodeCluster() throws Exception { internalCluster().setBootstrapMasterNodeIndex(1); final List nodes = internalCluster().startNodes(2); - createIndex("test", + createIndex( + "test", Settings.builder() .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.ZERO) // assign shards .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) // causes rebalancing .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .build()); + .build() + ); ensureGreen("test"); RestClient restClient = getRestClient(); @@ -86,9 +87,9 @@ public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase { @Override public void doAfterNodes(int n, Client client) throws IOException { ensureGreen("test"); - Response response = - restClient.performRequest(new Request("POST", "/_cluster/voting_config_exclusions/" + - internalCluster().getNodeNames()[n])); + Response response = restClient.performRequest( + new Request("POST", "/_cluster/voting_config_exclusions/" + internalCluster().getNodeNames()[n]) + ); assertThat(response.getStatusLine().getStatusCode(), is(200)); } @@ -103,7 +104,9 @@ public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase { new Node( HttpHost.create( internalCluster().getInstance(HttpServerTransport.class, viaNode) - .boundAddress().publishAddress().toString() + .boundAddress() + .publishAddress() + .toString() ) ) ) @@ -111,7 +114,9 @@ public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase { Response deleteResponse = restClient.performRequest(new Request("DELETE", "/_cluster/voting_config_exclusions")); assertThat(deleteResponse.getStatusLine().getStatusCode(), is(200)); - ClusterHealthResponse clusterHealthResponse = client(viaNode).admin().cluster().prepareHealth() + ClusterHealthResponse clusterHealthResponse = client(viaNode).admin() + .cluster() + .prepareHealth() .setWaitForEvents(Priority.LANGUID) .setWaitForNodes(Integer.toString(1)) .setTimeout(TimeValue.timeValueSeconds(30L)) @@ -138,7 +143,8 @@ public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase { assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getEntity().getContentLength(), is(0L)); Response deleteResponse = restClient.performRequest( - new Request("DELETE", "/_cluster/voting_config_exclusions/?wait_for_removal=false")); + new Request("DELETE", "/_cluster/voting_config_exclusions/?wait_for_removal=false") + ); assertThat(deleteResponse.getStatusLine().getStatusCode(), is(200)); assertThat(deleteResponse.getEntity().getContentLength(), is(0L)); } @@ -180,8 +186,9 @@ public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase { List nodes = internalCluster().startNodes(3); ensureStableCluster(3); RestClient restClient = getRestClient(); - Response response = restClient.performRequest(new Request("POST", "/_cluster/voting_config_exclusions/" + - nodes.get(2) + "," + nodes.get(0))); + Response response = restClient.performRequest( + new Request("POST", "/_cluster/voting_config_exclusions/" + nodes.get(2) + "," + nodes.get(0)) + ); assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getEntity().getContentLength(), is(0L)); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(0))); diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java index 9363bc9ad96..f62dbbc0068 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java @@ -115,8 +115,10 @@ public class Netty4TransportMultiPortIntegrationIT extends OpenSearchNetty4Integ // bound addresses for (TransportAddress transportAddress : boundTransportAddress.boundAddresses()) { assertThat(transportAddress, instanceOf(TransportAddress.class)); - assertThat(transportAddress.address().getPort(), - is(allOf(greaterThanOrEqualTo(randomPort), lessThanOrEqualTo(randomPort + 10)))); + assertThat( + transportAddress.address().getPort(), + is(allOf(greaterThanOrEqualTo(randomPort), lessThanOrEqualTo(randomPort + 10))) + ); } // publish address diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java index 6306959ac68..90006ef78bb 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/OpenSearchLoggingHandlerIT.java @@ -68,36 +68,41 @@ public class OpenSearchLoggingHandlerIT extends OpenSearchNetty4IntegTestCase { super.tearDown(); } - @TestLogging( - value = "org.opensearch.transport.netty4.OpenSearchLoggingHandler:trace,org.opensearch.transport.TransportLogger:trace", - reason = "to ensure we log network events on TRACE level") + @TestLogging(value = "org.opensearch.transport.netty4.OpenSearchLoggingHandler:trace,org.opensearch.transport.TransportLogger:trace", reason = "to ensure we log network events on TRACE level") public void testLoggingHandler() { - final String writePattern = - ".*\\[length: \\d+" + - ", request id: \\d+" + - ", type: request" + - ", version: .*" + - ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + - " WRITE: \\d+B"; - final MockLogAppender.LoggingExpectation writeExpectation = - new MockLogAppender.PatternSeenEventExpectation( - "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern); + final String writePattern = ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + " WRITE: \\d+B"; + final MockLogAppender.LoggingExpectation writeExpectation = new MockLogAppender.PatternSeenEventExpectation( + "hot threads request", + TransportLogger.class.getCanonicalName(), + Level.TRACE, + writePattern + ); - final MockLogAppender.LoggingExpectation flushExpectation = - new MockLogAppender.SeenEventExpectation( - "flush", OpenSearchLoggingHandler.class.getCanonicalName(), Level.TRACE, "*FLUSH*"); + final MockLogAppender.LoggingExpectation flushExpectation = new MockLogAppender.SeenEventExpectation( + "flush", + OpenSearchLoggingHandler.class.getCanonicalName(), + Level.TRACE, + "*FLUSH*" + ); - final String readPattern = - ".*\\[length: \\d+" + - ", request id: \\d+" + - ", type: request" + - ", version: .*" + - ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + - " READ: \\d+B"; + final String readPattern = ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + " READ: \\d+B"; - final MockLogAppender.LoggingExpectation readExpectation = - new MockLogAppender.PatternSeenEventExpectation( - "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, readPattern); + final MockLogAppender.LoggingExpectation readExpectation = new MockLogAppender.PatternSeenEventExpectation( + "hot threads request", + TransportLogger.class.getCanonicalName(), + Level.TRACE, + readPattern + ); appender.addExpectation(writeExpectation); appender.addExpectation(flushExpectation); @@ -108,12 +113,22 @@ public class OpenSearchLoggingHandlerIT extends OpenSearchNetty4IntegTestCase { @TestLogging(value = "org.opensearch.transport.TcpTransport:DEBUG", reason = "to ensure we log connection events on DEBUG level") public void testConnectionLogging() throws IOException { - appender.addExpectation(new MockLogAppender.PatternSeenEventExpectation("open connection log", - TcpTransport.class.getCanonicalName(), Level.DEBUG, - ".*opened transport connection \\[[1-9][0-9]*\\] to .*")); - appender.addExpectation(new MockLogAppender.PatternSeenEventExpectation("close connection log", - TcpTransport.class.getCanonicalName(), Level.DEBUG, - ".*closed transport connection \\[[1-9][0-9]*\\] to .* with age \\[[0-9]+ms\\].*")); + appender.addExpectation( + new MockLogAppender.PatternSeenEventExpectation( + "open connection log", + TcpTransport.class.getCanonicalName(), + Level.DEBUG, + ".*opened transport connection \\[[1-9][0-9]*\\] to .*" + ) + ); + appender.addExpectation( + new MockLogAppender.PatternSeenEventExpectation( + "close connection log", + TcpTransport.class.getCanonicalName(), + Level.DEBUG, + ".*closed transport connection \\[[1-9][0-9]*\\] to .* with age \\[[0-9]+ms\\].*" + ) + ); final String nodeName = internalCluster().startNode(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodeName)); diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java index 64eb28b0ba9..e9de417f1ef 100644 --- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java +++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java @@ -63,13 +63,14 @@ public class Netty4BadRequestIT extends OpenSearchRestTestCase { final Setting httpMaxInitialLineLength = HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH; final String key = httpMaxInitialLineLength.getKey().substring("http.".length()); for (Map.Entry entry : map.entrySet()) { - @SuppressWarnings("unchecked") final Map settings = - (Map)((Map)entry.getValue()).get("settings"); - final int maxIntialLineLength; + @SuppressWarnings("unchecked") + final Map settings = (Map) ((Map) entry.getValue()).get("settings"); + final int maxIntialLineLength; if (settings.containsKey("http")) { - @SuppressWarnings("unchecked") final Map httpSettings = (Map)settings.get("http"); + @SuppressWarnings("unchecked") + final Map httpSettings = (Map) settings.get("http"); if (httpSettings.containsKey(key)) { - maxIntialLineLength = ByteSizeValue.parseBytesSizeValue((String)httpSettings.get(key), key).bytesAsInt(); + maxIntialLineLength = ByteSizeValue.parseBytesSizeValue((String) httpSettings.get(key), key).bytesAsInt(); } else { maxIntialLineLength = httpMaxInitialLineLength.getDefault(Settings.EMPTY).bytesAsInt(); } @@ -80,10 +81,10 @@ public class Netty4BadRequestIT extends OpenSearchRestTestCase { } final String path = "/" + new String(new byte[maxMaxInitialLineLength], Charset.forName("UTF-8")).replace('\0', 'a'); - final ResponseException e = - expectThrows( - ResponseException.class, - () -> client().performRequest(new Request(randomFrom("GET", "POST", "PUT"), path))); + final ResponseException e = expectThrows( + ResponseException.class, + () -> client().performRequest(new Request(randomFrom("GET", "POST", "PUT"), path)) + ); assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(BAD_REQUEST.getStatus())); assertThat(e, hasToString(containsString("too_long_frame_exception"))); assertThat(e, hasToString(matches("An HTTP line is larger than \\d+ bytes"))); diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java index 2eeeccbd7c3..a8fc705363b 100644 --- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -88,18 +88,38 @@ public class Netty4HeadBodyIsEmptyIT extends OpenSearchRestTestCase { public void testTypeExists() throws IOException { createTestDoc(); - headTestCase("/test/_mapping/_doc", emptyMap(), OK.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); - headTestCase("/test/_mapping/_doc", singletonMap("pretty", "true"), OK.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); + headTestCase( + "/test/_mapping/_doc", + emptyMap(), + OK.getStatus(), + greaterThan(0), + "Type exists requests are deprecated, as types have been deprecated." + ); + headTestCase( + "/test/_mapping/_doc", + singletonMap("pretty", "true"), + OK.getStatus(), + greaterThan(0), + "Type exists requests are deprecated, as types have been deprecated." + ); } public void testTypeDoesNotExist() throws IOException { createTestDoc(); - headTestCase("/test/_mapping/does-not-exist", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); - headTestCase("/text/_mapping/test,does-not-exist", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); + headTestCase( + "/test/_mapping/does-not-exist", + emptyMap(), + NOT_FOUND.getStatus(), + greaterThan(0), + "Type exists requests are deprecated, as types have been deprecated." + ); + headTestCase( + "/text/_mapping/test,does-not-exist", + emptyMap(), + NOT_FOUND.getStatus(), + greaterThan(0), + "Type exists requests are deprecated, as types have been deprecated." + ); } public void testAliasExists() throws IOException { @@ -201,11 +221,12 @@ public class Netty4HeadBodyIsEmptyIT extends OpenSearchRestTestCase { } private void headTestCase( - final String url, - final Map params, - final int expectedStatusCode, - final Matcher matcher, - final String... expectedWarnings) throws IOException { + final String url, + final Map params, + final int expectedStatusCode, + final Matcher matcher, + final String... expectedWarnings + ) throws IOException { Request request = new Request("HEAD", url); for (Map.Entry param : params.entrySet()) { request.addParameter(param.getKey(), param.getValue()); diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java index 8080896009d..66d60032d11 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java @@ -87,9 +87,6 @@ public class Netty4HttpChannel implements HttpChannel { @Override public String toString() { - return "Netty4HttpChannel{" + - "localAddress=" + getLocalAddress() + - ", remoteAddress=" + getRemoteAddress() + - '}'; + return "Netty4HttpChannel{" + "localAddress=" + getLocalAddress() + ", remoteAddress=" + getRemoteAddress() + '}'; } } diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java index 0daee04a710..8ce3af0bb14 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpRequest.java @@ -68,22 +68,44 @@ public class Netty4HttpRequest implements HttpRequest { private final boolean pooled; Netty4HttpRequest(FullHttpRequest request) { - this(request, new HttpHeadersMap(request.headers()), new AtomicBoolean(false), true, - Netty4Utils.toBytesReference(request.content())); + this( + request, + new HttpHeadersMap(request.headers()), + new AtomicBoolean(false), + true, + Netty4Utils.toBytesReference(request.content()) + ); } Netty4HttpRequest(FullHttpRequest request, Exception inboundException) { - this(request, new HttpHeadersMap(request.headers()), new AtomicBoolean(false), true, - Netty4Utils.toBytesReference(request.content()), inboundException); + this( + request, + new HttpHeadersMap(request.headers()), + new AtomicBoolean(false), + true, + Netty4Utils.toBytesReference(request.content()), + inboundException + ); } - private Netty4HttpRequest(FullHttpRequest request, HttpHeadersMap headers, AtomicBoolean released, boolean pooled, - BytesReference content) { + private Netty4HttpRequest( + FullHttpRequest request, + HttpHeadersMap headers, + AtomicBoolean released, + boolean pooled, + BytesReference content + ) { this(request, headers, released, pooled, content, null); } - private Netty4HttpRequest(FullHttpRequest request, HttpHeadersMap headers, AtomicBoolean released, boolean pooled, - BytesReference content, Exception inboundException) { + private Netty4HttpRequest( + FullHttpRequest request, + HttpHeadersMap headers, + AtomicBoolean released, + boolean pooled, + BytesReference content, + Exception inboundException + ) { this.request = request; this.headers = headers; this.content = content; @@ -95,17 +117,13 @@ public class Netty4HttpRequest implements HttpRequest { @Override public RestRequest.Method method() { HttpMethod httpMethod = request.method(); - if (httpMethod == HttpMethod.GET) - return RestRequest.Method.GET; + if (httpMethod == HttpMethod.GET) return RestRequest.Method.GET; - if (httpMethod == HttpMethod.POST) - return RestRequest.Method.POST; + if (httpMethod == HttpMethod.POST) return RestRequest.Method.POST; - if (httpMethod == HttpMethod.PUT) - return RestRequest.Method.PUT; + if (httpMethod == HttpMethod.PUT) return RestRequest.Method.PUT; - if (httpMethod == HttpMethod.DELETE) - return RestRequest.Method.DELETE; + if (httpMethod == HttpMethod.DELETE) return RestRequest.Method.DELETE; if (httpMethod == HttpMethod.HEAD) { return RestRequest.Method.HEAD; @@ -157,9 +175,19 @@ public class Netty4HttpRequest implements HttpRequest { try { final ByteBuf copiedContent = Unpooled.copiedBuffer(request.content()); return new Netty4HttpRequest( - new DefaultFullHttpRequest(request.protocolVersion(), request.method(), request.uri(), copiedContent, request.headers(), - request.trailingHeaders()), - headers, new AtomicBoolean(false), false, Netty4Utils.toBytesReference(copiedContent)); + new DefaultFullHttpRequest( + request.protocolVersion(), + request.method(), + request.uri(), + copiedContent, + request.headers(), + request.trailingHeaders() + ), + headers, + new AtomicBoolean(false), + false, + Netty4Utils.toBytesReference(copiedContent) + ); } finally { release(); } @@ -201,10 +229,15 @@ public class Netty4HttpRequest implements HttpRequest { HttpHeaders trailingHeaders = new DefaultHttpHeaders(); trailingHeaders.add(request.trailingHeaders()); trailingHeaders.remove(header); - FullHttpRequest requestWithoutHeader = new DefaultFullHttpRequest(request.protocolVersion(), request.method(), request.uri(), - request.content(), headersWithoutContentTypeHeader, trailingHeaders); - return new Netty4HttpRequest(requestWithoutHeader, new HttpHeadersMap(requestWithoutHeader.headers()), released, - pooled, content); + FullHttpRequest requestWithoutHeader = new DefaultFullHttpRequest( + request.protocolVersion(), + request.method(), + request.uri(), + request.content(), + headersWithoutContentTypeHeader, + trailingHeaders + ); + return new Netty4HttpRequest(requestWithoutHeader, new HttpHeadersMap(requestWithoutHeader.headers()), released, pooled, content); } @Override @@ -294,7 +327,9 @@ public class Netty4HttpRequest implements HttpRequest { @Override public Set>> entrySet() { - return httpHeaders.names().stream().map(k -> new AbstractMap.SimpleImmutableEntry<>(k, httpHeaders.getAll(k))) + return httpHeaders.names() + .stream() + .map(k -> new AbstractMap.SimpleImmutableEntry<>(k, httpHeaders.getAll(k))) .collect(Collectors.toSet()); } } diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpResponse.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpResponse.java index d5ff2e33a73..78df964ce9c 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpResponse.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpResponse.java @@ -64,4 +64,3 @@ public class Netty4HttpResponse extends DefaultFullHttpResponse implements HttpR return requestHeaders; } } - diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java index 8a9ffd2b88f..b077723bd65 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java @@ -111,8 +111,9 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private static final String SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = "http.netty.max_composite_buffer_components"; - public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = - new Setting<>(SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS, (s) -> { + public static Setting SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = new Setting<>( + SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS, + (s) -> { ByteSizeValue maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(s); /* * Netty accumulates buffers containing data from all incoming network packets that make up one HTTP request in an instance of @@ -136,12 +137,18 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { long maxBufferComponents = Math.max(2, Math.min(maxBufferComponentsEstimate, Integer.MAX_VALUE)); return String.valueOf(maxBufferComponents); // Netty's CompositeByteBuf implementation does not allow less than two components. - }, s -> Setting.parseInt(s, 2, Integer.MAX_VALUE, SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS), Property.NodeScope); + }, + s -> Setting.parseInt(s, 2, Integer.MAX_VALUE, SETTING_KEY_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS), + Property.NodeScope + ); public static final Setting SETTING_HTTP_WORKER_COUNT = Setting.intSetting("http.netty.worker_count", 0, Property.NodeScope); - public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = - Setting.byteSizeSetting("http.netty.receive_predictor_size", new ByteSizeValue(64, ByteSizeUnit.KB), Property.NodeScope); + public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( + "http.netty.receive_predictor_size", + new ByteSizeValue(64, ByteSizeUnit.KB), + Property.NodeScope + ); private final ByteSizeValue maxInitialLineLength; private final ByteSizeValue maxHeaderSize; @@ -158,9 +165,16 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private volatile ServerBootstrap serverBootstrap; private volatile SharedGroupFactory.SharedGroup sharedGroup; - public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, - NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, ClusterSettings clusterSettings, - SharedGroupFactory sharedGroupFactory) { + public Netty4HttpServerTransport( + Settings settings, + NetworkService networkService, + BigArrays bigArrays, + ThreadPool threadPool, + NamedXContentRegistry xContentRegistry, + Dispatcher dispatcher, + ClusterSettings clusterSettings, + SharedGroupFactory sharedGroupFactory + ) { super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings); Netty4Utils.setAvailableProcessors(OpenSearchExecutors.NODE_PROCESSORS_SETTING.get(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); @@ -178,10 +192,17 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { ByteSizeValue receivePredictor = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE.get(settings); recvByteBufAllocator = new FixedRecvByteBufAllocator(receivePredictor.bytesAsInt()); - logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], " + - "receive_predictor[{}], max_composite_buffer_components[{}], pipelining_max_events[{}]", - maxChunkSize, maxHeaderSize, maxInitialLineLength, maxContentLength, receivePredictor, maxCompositeBufferComponents, - pipeliningMaxEvents); + logger.debug( + "using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], " + + "receive_predictor[{}], max_composite_buffer_components[{}], pipelining_max_events[{}]", + maxChunkSize, + maxHeaderSize, + maxInitialLineLength, + maxContentLength, + receivePredictor, + maxCompositeBufferComponents, + pipeliningMaxEvents + ); } public Settings settings() { @@ -222,8 +243,10 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { if (SETTING_HTTP_TCP_KEEP_INTERVAL.get(settings) >= 0) { final SocketOption keepIntervalOption = NetUtils.getTcpKeepIntervalSocketOptionOrNull(); if (keepIntervalOption != null) { - serverBootstrap.childOption(NioChannelOption.of(keepIntervalOption), - SETTING_HTTP_TCP_KEEP_INTERVAL.get(settings)); + serverBootstrap.childOption( + NioChannelOption.of(keepIntervalOption), + SETTING_HTTP_TCP_KEEP_INTERVAL.get(settings) + ); } } if (SETTING_HTTP_TCP_KEEP_COUNT.get(settings) >= 0) { @@ -306,8 +329,8 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { protected HttpChannelHandler(final Netty4HttpServerTransport transport, final HttpHandlingSettings handlingSettings) { this.transport = transport; this.handlingSettings = handlingSettings; - this.byteBufSizer = new NettyByteBufSizer(); - this.requestCreator = new Netty4HttpRequestCreator(); + this.byteBufSizer = new NettyByteBufSizer(); + this.requestCreator = new Netty4HttpRequestCreator(); this.requestHandler = new Netty4HttpRequestHandler(transport); this.responseCreator = new Netty4HttpResponseCreator(); } @@ -321,7 +344,8 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { final HttpRequestDecoder decoder = new HttpRequestDecoder( handlingSettings.getMaxInitialLineLength(), handlingSettings.getMaxHeaderSize(), - handlingSettings.getMaxChunkSize()); + handlingSettings.getMaxChunkSize() + ); decoder.setCumulator(ByteToMessageDecoder.COMPOSITE_CUMULATOR); ch.pipeline().addLast("decoder", decoder); ch.pipeline().addLast("decoder_compress", new HttpContentDecompressor()); diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java index 8e1609496e7..9a5459a5ab5 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java @@ -60,7 +60,6 @@ import java.nio.channels.SocketChannel; import static io.netty.channel.internal.ChannelUtils.MAX_BYTES_PER_GATHERING_WRITE_ATTEMPTED_LOW_THRESHOLD; - /** * This class is adapted from {@link NioSocketChannel} class in the Netty project. It overrides the channel * read/write behavior to ensure that the bytes are always copied to a thread-local direct bytes buffer. This @@ -74,8 +73,10 @@ import static io.netty.channel.internal.ChannelUtils.MAX_BYTES_PER_GATHERING_WRI @SuppressForbidden(reason = "Channel#write") public class CopyBytesSocketChannel extends Netty4NioSocketChannel { - private static final int MAX_BYTES_PER_WRITE = StrictMath.toIntExact(ByteSizeValue.parseBytesSizeValue( - System.getProperty("opensearch.transport.buffer.size", "1m"), "opensearch.transport.buffer.size").getBytes()); + private static final int MAX_BYTES_PER_WRITE = StrictMath.toIntExact( + ByteSizeValue.parseBytesSizeValue(System.getProperty("opensearch.transport.buffer.size", "1m"), "opensearch.transport.buffer.size") + .getBytes() + ); private static final ThreadLocal ioBuffer = ThreadLocal.withInitial(() -> ByteBuffer.allocateDirect(MAX_BYTES_PER_WRITE)); private final WriteConfig writeConfig = new WriteConfig(); diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4Plugin.java index 245e88f4cfa..73cfe4e46fb 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4Plugin.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4Plugin.java @@ -81,32 +81,62 @@ public class Netty4Plugin extends Plugin implements NetworkPlugin { @Override public Settings additionalSettings() { return Settings.builder() - // here we set the netty4 transport and http transport as the default. This is a set once setting - // ie. if another plugin does that as well the server will fail - only one default network can exist! - .put(NetworkModule.HTTP_DEFAULT_TYPE_SETTING.getKey(), NETTY_HTTP_TRANSPORT_NAME) - .put(NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.getKey(), NETTY_TRANSPORT_NAME) - .build(); + // here we set the netty4 transport and http transport as the default. This is a set once setting + // ie. if another plugin does that as well the server will fail - only one default network can exist! + .put(NetworkModule.HTTP_DEFAULT_TYPE_SETTING.getKey(), NETTY_HTTP_TRANSPORT_NAME) + .put(NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.getKey(), NETTY_TRANSPORT_NAME) + .build(); } @Override - public Map> getTransports(Settings settings, ThreadPool threadPool, PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NetworkService networkService) { - return Collections.singletonMap(NETTY_TRANSPORT_NAME, () -> new Netty4Transport(settings, Version.CURRENT, threadPool, - networkService, pageCacheRecycler, namedWriteableRegistry, circuitBreakerService, getSharedGroupFactory(settings))); + public Map> getTransports( + Settings settings, + ThreadPool threadPool, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedWriteableRegistry namedWriteableRegistry, + NetworkService networkService + ) { + return Collections.singletonMap( + NETTY_TRANSPORT_NAME, + () -> new Netty4Transport( + settings, + Version.CURRENT, + threadPool, + networkService, + pageCacheRecycler, + namedWriteableRegistry, + circuitBreakerService, + getSharedGroupFactory(settings) + ) + ); } @Override - public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings) { - return Collections.singletonMap(NETTY_HTTP_TRANSPORT_NAME, - () -> new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, - clusterSettings, getSharedGroupFactory(settings))); + public Map> getHttpTransports( + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher dispatcher, + ClusterSettings clusterSettings + ) { + return Collections.singletonMap( + NETTY_HTTP_TRANSPORT_NAME, + () -> new Netty4HttpServerTransport( + settings, + networkService, + bigArrays, + threadPool, + xContentRegistry, + dispatcher, + clusterSettings, + getSharedGroupFactory(settings) + ) + ); } private SharedGroupFactory getSharedGroupFactory(Settings settings) { diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java index 3bf0dea72fa..e25853d8648 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java @@ -65,7 +65,8 @@ public class NettyAllocator { if (Booleans.parseBoolean(System.getProperty(USE_NETTY_DEFAULT), false)) { ALLOCATOR = ByteBufAllocator.DEFAULT; SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; - DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) + DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) + ", factors={opensearch.unsafe.use_netty_default_allocator=true}]"; } else { final long heapSizeInBytes = JvmInfo.jvmInfo().getMem().getHeapMax().getBytes(); @@ -85,11 +86,17 @@ public class NettyAllocator { } else { SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; } - DESCRIPTION = "[name=unpooled, suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) - + ", factors={opensearch.unsafe.use_unpooled_allocator=" + System.getProperty(USE_UNPOOLED) - + ", g1gc_enabled=" + g1gcEnabled - + ", g1gc_region_size=" + g1gcRegionSize - + ", heap_size=" + heapSize + "}]"; + DESCRIPTION = "[name=unpooled, suggested_max_allocation_size=" + + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) + + ", factors={opensearch.unsafe.use_unpooled_allocator=" + + System.getProperty(USE_UNPOOLED) + + ", g1gc_enabled=" + + g1gcEnabled + + ", g1gc_region_size=" + + g1gcRegionSize + + ", heap_size=" + + heapSize + + "}]"; } else { int nHeapArena = PooledByteBufAllocator.defaultNumHeapArena(); int pageSize; @@ -114,16 +121,31 @@ public class NettyAllocator { int smallCacheSize = PooledByteBufAllocator.defaultSmallCacheSize(); int normalCacheSize = PooledByteBufAllocator.defaultNormalCacheSize(); boolean useCacheForAllThreads = PooledByteBufAllocator.defaultUseCacheForAllThreads(); - delegate = new PooledByteBufAllocator(false, nHeapArena, 0, pageSize, maxOrder, tinyCacheSize, - smallCacheSize, normalCacheSize, useCacheForAllThreads); + delegate = new PooledByteBufAllocator( + false, + nHeapArena, + 0, + pageSize, + maxOrder, + tinyCacheSize, + smallCacheSize, + normalCacheSize, + useCacheForAllThreads + ); int chunkSizeInBytes = pageSize << maxOrder; ByteSizeValue chunkSize = new ByteSizeValue(chunkSizeInBytes); SUGGESTED_MAX_ALLOCATION_SIZE = chunkSizeInBytes; - DESCRIPTION = "[name=opensearch_configured, chunk_size=" + chunkSize - + ", suggested_max_allocation_size=" + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) - + ", factors={opensearch.unsafe.use_netty_default_chunk_and_page_size=" + useDefaultChunkAndPageSize() - + ", g1gc_enabled=" + g1gcEnabled - + ", g1gc_region_size=" + g1gcRegionSize + "}]"; + DESCRIPTION = "[name=opensearch_configured, chunk_size=" + + chunkSize + + ", suggested_max_allocation_size=" + + new ByteSizeValue(SUGGESTED_MAX_ALLOCATION_SIZE) + + ", factors={opensearch.unsafe.use_netty_default_chunk_and_page_size=" + + useDefaultChunkAndPageSize() + + ", g1gc_enabled=" + + g1gcEnabled + + ", g1gc_region_size=" + + g1gcRegionSize + + "}]"; } ALLOCATOR = new NoDirectBuffers(delegate); } diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/SharedGroupFactory.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/SharedGroupFactory.java index dbfdea68fa4..d70f6bc83cc 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/SharedGroupFactory.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/SharedGroupFactory.java @@ -88,8 +88,10 @@ public final class SharedGroupFactory { return getGenericGroup(); } else { if (dedicatedHttpGroup == null) { - NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(httpWorkerCount, - daemonThreadFactory(settings, HttpServerTransport.HTTP_SERVER_WORKER_THREAD_NAME_PREFIX)); + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup( + httpWorkerCount, + daemonThreadFactory(settings, HttpServerTransport.HTTP_SERVER_WORKER_THREAD_NAME_PREFIX) + ); dedicatedHttpGroup = new SharedGroup(new RefCountedGroup(eventLoopGroup)); } return dedicatedHttpGroup; @@ -98,8 +100,10 @@ public final class SharedGroupFactory { private SharedGroup getGenericGroup() { if (genericGroup == null) { - EventLoopGroup eventLoopGroup = new NioEventLoopGroup(workerCount, - daemonThreadFactory(settings, TcpTransport.TRANSPORT_WORKER_THREAD_NAME_PREFIX)); + EventLoopGroup eventLoopGroup = new NioEventLoopGroup( + workerCount, + daemonThreadFactory(settings, TcpTransport.TRANSPORT_WORKER_THREAD_NAME_PREFIX) + ); this.genericGroup = new RefCountedGroup(eventLoopGroup); } else { genericGroup.incRef(); diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4MessageChannelHandler.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4MessageChannelHandler.java index 48eabe2fb3d..ad2e287f7a2 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4MessageChannelHandler.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4MessageChannelHandler.java @@ -70,8 +70,15 @@ final class Netty4MessageChannelHandler extends ChannelDuplexHandler { this.transport = transport; final ThreadPool threadPool = transport.getThreadPool(); final Transport.RequestHandlers requestHandlers = transport.getRequestHandlers(); - this.pipeline = new InboundPipeline(transport.getVersion(), transport.getStatsTracker(), recycler, threadPool::relativeTimeInMillis, - transport.getInflightBreaker(), requestHandlers::getHandler, transport::inboundMessage); + this.pipeline = new InboundPipeline( + transport.getVersion(), + transport.getStatsTracker(), + recycler, + threadPool::relativeTimeInMillis, + transport.getInflightBreaker(), + requestHandlers::getHandler, + transport::inboundMessage + ); } @Override diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpChannel.java index 9a1164fd7cd..eeee0ff33e7 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpChannel.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpChannel.java @@ -169,9 +169,6 @@ public class Netty4TcpChannel implements TcpChannel { @Override public String toString() { - return "Netty4TcpChannel{" + - "localAddress=" + getLocalAddress() + - ", remoteAddress=" + channel.remoteAddress() + - '}'; + return "Netty4TcpChannel{" + "localAddress=" + getLocalAddress() + ", remoteAddress=" + channel.remoteAddress() + '}'; } } diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpServerChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpServerChannel.java index 0bf59aae099..6131d7fe2ac 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpServerChannel.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4TcpServerChannel.java @@ -71,8 +71,6 @@ public class Netty4TcpServerChannel implements TcpServerChannel { @Override public String toString() { - return "Netty4TcpChannel{" + - "localAddress=" + getLocalAddress() + - '}'; + return "Netty4TcpChannel{" + "localAddress=" + getLocalAddress() + '}'; } } diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java index 20039943fb7..45bd5464485 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java @@ -90,20 +90,29 @@ import static org.opensearch.common.util.concurrent.ConcurrentCollections.newCon public class Netty4Transport extends TcpTransport { private static final Logger logger = LogManager.getLogger(Netty4Transport.class); - public static final Setting WORKER_COUNT = - new Setting<>("transport.netty.worker_count", - (s) -> Integer.toString(OpenSearchExecutors.allocatedProcessors(s)), - (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), Property.NodeScope); + public static final Setting WORKER_COUNT = new Setting<>( + "transport.netty.worker_count", + (s) -> Integer.toString(OpenSearchExecutors.allocatedProcessors(s)), + (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"), + Property.NodeScope + ); public static final Setting NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting( - "transport.netty.receive_predictor_size", new ByteSizeValue(64, ByteSizeUnit.KB), Property.NodeScope); - public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = - byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); - public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = - byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope); - public static final Setting NETTY_BOSS_COUNT = - intSetting("transport.netty.boss_count", 1, 1, Property.NodeScope); - + "transport.netty.receive_predictor_size", + new ByteSizeValue(64, ByteSizeUnit.KB), + Property.NodeScope + ); + public static final Setting NETTY_RECEIVE_PREDICTOR_MIN = byteSizeSetting( + "transport.netty.receive_predictor_min", + NETTY_RECEIVE_PREDICTOR_SIZE, + Property.NodeScope + ); + public static final Setting NETTY_RECEIVE_PREDICTOR_MAX = byteSizeSetting( + "transport.netty.receive_predictor_max", + NETTY_RECEIVE_PREDICTOR_SIZE, + Property.NodeScope + ); + public static final Setting NETTY_BOSS_COUNT = intSetting("transport.netty.boss_count", 1, 1, Property.NodeScope); private final SharedGroupFactory sharedGroupFactory; private final RecvByteBufAllocator recvByteBufAllocator; @@ -113,9 +122,16 @@ public class Netty4Transport extends TcpTransport { private volatile Bootstrap clientBootstrap; private volatile SharedGroupFactory.SharedGroup sharedGroup; - public Netty4Transport(Settings settings, Version version, ThreadPool threadPool, NetworkService networkService, - PageCacheRecycler pageCacheRecycler, NamedWriteableRegistry namedWriteableRegistry, - CircuitBreakerService circuitBreakerService, SharedGroupFactory sharedGroupFactory) { + public Netty4Transport( + Settings settings, + Version version, + ThreadPool threadPool, + NetworkService networkService, + PageCacheRecycler pageCacheRecycler, + NamedWriteableRegistry namedWriteableRegistry, + CircuitBreakerService circuitBreakerService, + SharedGroupFactory sharedGroupFactory + ) { super(settings, version, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService); Netty4Utils.setAvailableProcessors(OpenSearchExecutors.NODE_PROCESSORS_SETTING.get(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); @@ -127,8 +143,11 @@ public class Netty4Transport extends TcpTransport { if (receivePredictorMax.getBytes() == receivePredictorMin.getBytes()) { recvByteBufAllocator = new FixedRecvByteBufAllocator((int) receivePredictorMax.getBytes()); } else { - recvByteBufAllocator = new AdaptiveRecvByteBufAllocator((int) receivePredictorMin.getBytes(), - (int) receivePredictorMin.getBytes(), (int) receivePredictorMax.getBytes()); + recvByteBufAllocator = new AdaptiveRecvByteBufAllocator( + (int) receivePredictorMin.getBytes(), + (int) receivePredictorMin.getBytes(), + (int) receivePredictorMax.getBytes() + ); } } @@ -207,9 +226,16 @@ public class Netty4Transport extends TcpTransport { private void createServerBootstrap(ProfileSettings profileSettings, SharedGroupFactory.SharedGroup sharedGroup) { String name = profileSettings.profileName; if (logger.isDebugEnabled()) { - logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], receive_predictor[{}->{}]", - name, sharedGroupFactory.getTransportWorkerCount(), profileSettings.portOrRange, profileSettings.bindHosts, - profileSettings.publishHosts, receivePredictorMin, receivePredictorMax); + logger.debug( + "using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], receive_predictor[{}->{}]", + name, + sharedGroupFactory.getTransportWorkerCount(), + profileSettings.portOrRange, + profileSettings.bindHosts, + profileSettings.publishHosts, + receivePredictorMin, + receivePredictorMax + ); } final ServerBootstrap serverBootstrap = new ServerBootstrap(); diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Utils.java index 3e64e73d56b..e13329b8c25 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Utils.java @@ -78,10 +78,11 @@ public class Netty4Utils { * in Netty and our previous value did not take, bail. */ final String message = String.format( - Locale.ROOT, - "available processors value [%d] did not match current value [%d]", - availableProcessors, - NettyRuntime.availableProcessors()); + Locale.ROOT, + "available processors value [%d] did not match current value [%d]", + availableProcessors, + NettyRuntime.availableProcessors() + ); throw new IllegalStateException(message); } } diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java index fdf2c1626d8..a0100930c7d 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java @@ -102,15 +102,26 @@ public class Netty4BadRequestTests extends OpenSearchTestCase { }; Settings settings = Settings.builder().put(HttpTransportSettings.SETTING_HTTP_PORT.getKey(), getPortRange()).build(); - try (HttpServerTransport httpServerTransport = new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, - xContentRegistry(), dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - new SharedGroupFactory(Settings.EMPTY))) { + try ( + HttpServerTransport httpServerTransport = new Netty4HttpServerTransport( + settings, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + dispatcher, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + new SharedGroupFactory(Settings.EMPTY) + ) + ) { httpServerTransport.start(); final TransportAddress transportAddress = randomFrom(httpServerTransport.boundAddress().boundAddresses()); try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { - final Collection responses = - nettyHttpClient.get(transportAddress.address(), "/_cluster/settings?pretty=%"); + final Collection responses = nettyHttpClient.get( + transportAddress.address(), + "/_cluster/settings?pretty=%" + ); try { assertThat(responses, hasSize(1)); assertThat(responses.iterator().next().status().code(), equalTo(400)); @@ -120,7 +131,9 @@ public class Netty4BadRequestTests extends OpenSearchTestCase { assertThat( responseBodies.iterator().next(), containsString( - "\"reason\":\"java.lang.IllegalArgumentException: unterminated escape sequence at end of string: %\"")); + "\"reason\":\"java.lang.IllegalArgumentException: unterminated escape sequence at end of string: %\"" + ) + ); } finally { responses.forEach(ReferenceCounted::release); } diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java index 366121cb872..57f95a022a3 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java @@ -99,8 +99,7 @@ class Netty4HttpClient implements Closeable { private final Bootstrap clientBootstrap; Netty4HttpClient() { - clientBootstrap = new Bootstrap() - .channel(NettyAllocator.getChannelType()) + clientBootstrap = new Bootstrap().channel(NettyAllocator.getChannelType()) .option(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator()) .group(new NioEventLoopGroup(1)); } @@ -132,8 +131,11 @@ class Netty4HttpClient implements Closeable { return processRequestsWithBody(HttpMethod.PUT, remoteAddress, urisAndBodies); } - private List processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, List> urisAndBodies) throws InterruptedException { + private List processRequestsWithBody( + HttpMethod method, + SocketAddress remoteAddress, + List> urisAndBodies + ) throws InterruptedException { List requests = new ArrayList<>(urisAndBodies.size()); for (Tuple uriAndBody : urisAndBodies) { ByteBuf content = Unpooled.copiedBuffer(uriAndBody.v2(), StandardCharsets.UTF_8); @@ -146,9 +148,8 @@ class Netty4HttpClient implements Closeable { return sendRequests(remoteAddress, requests); } - private synchronized List sendRequests( - final SocketAddress remoteAddress, - final Collection requests) throws InterruptedException { + private synchronized List sendRequests(final SocketAddress remoteAddress, final Collection requests) + throws InterruptedException { final CountDownLatch latch = new CountDownLatch(requests.size()); final List content = Collections.synchronizedList(new ArrayList<>(requests.size())); diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index 744678a184d..d37e28dabf1 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -100,8 +100,10 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase { public void testThatPipeliningWorksWithFastSerializedRequests() throws InterruptedException { final int numberOfRequests = randomIntBetween(2, 128); - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests), - new WorkEmulatorHandler()); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel( + new Netty4HttpPipeliningHandler(logger, numberOfRequests), + new WorkEmulatorHandler() + ); for (int i = 0; i < numberOfRequests; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); @@ -127,8 +129,10 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase { public void testThatPipeliningWorksWhenSlowRequestsInDifferentOrder() throws InterruptedException { final int numberOfRequests = randomIntBetween(2, 128); - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests), - new WorkEmulatorHandler()); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel( + new Netty4HttpPipeliningHandler(logger, numberOfRequests), + new WorkEmulatorHandler() + ); for (int i = 0; i < numberOfRequests; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); @@ -157,8 +161,10 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase { public void testThatPipeliningClosesConnectionWithTooManyEvents() throws InterruptedException { final int numberOfRequests = randomIntBetween(2, 128); - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests), - new WorkEmulatorHandler()); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel( + new Netty4HttpPipeliningHandler(logger, numberOfRequests), + new WorkEmulatorHandler() + ); for (int i = 0; i < 1 + numberOfRequests + 1; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + Integer.toString(i))); @@ -185,8 +191,7 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase { public void testPipeliningRequestsAreReleased() throws InterruptedException { final int numberOfRequests = 10; - final EmbeddedChannel embeddedChannel = - new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests + 1)); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests + 1)); for (int i = 0; i < numberOfRequests; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + i)); @@ -217,7 +222,6 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase { } } - private void assertReadHttpMessageHasContent(EmbeddedChannel embeddedChannel, String expectedContent) { FullHttpResponse response = (FullHttpResponse) embeddedChannel.outboundMessages().poll(); assertNotNull("Expected response to exist, maybe you did not wait long enough?", response); diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java index cd3ca650b29..029aed1f3cc 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -94,9 +94,7 @@ public class Netty4HttpServerPipeliningTests extends OpenSearchTestCase { } public void testThatHttpPipeliningWorks() throws Exception { - final Settings settings = Settings.builder() - .put("http.port", "0") - .build(); + final Settings settings = Settings.builder().put("http.port", "0").build(); try (HttpServerTransport httpServerTransport = new CustomNettyHttpServerTransport(settings)) { httpServerTransport.start(); final TransportAddress transportAddress = randomFrom(httpServerTransport.boundAddress().boundAddresses()); @@ -112,7 +110,7 @@ public class Netty4HttpServerPipeliningTests extends OpenSearchTestCase { } try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { - Collection responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[]{})); + Collection responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[] {})); try { Collection responseBodies = Netty4HttpClient.returnHttpResponseBodies(responses); assertThat(responseBodies, contains(requests.toArray())); @@ -128,12 +126,16 @@ public class Netty4HttpServerPipeliningTests extends OpenSearchTestCase { private final ExecutorService executorService = Executors.newCachedThreadPool(); CustomNettyHttpServerTransport(final Settings settings) { - super(settings, + super( + settings, Netty4HttpServerPipeliningTests.this.networkService, Netty4HttpServerPipeliningTests.this.bigArrays, Netty4HttpServerPipeliningTests.this.threadPool, - xContentRegistry(), new NullDispatcher(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - new SharedGroupFactory(settings)); + xContentRegistry(), + new NullDispatcher(), + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + new SharedGroupFactory(settings) + ); } @Override @@ -204,8 +206,10 @@ public class Netty4HttpServerPipeliningTests extends OpenSearchTestCase { final ByteBuf buffer = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8); - HttpResponse response = - pipelinedRequest.createResponse(RestStatus.OK, new BytesArray(uri.getBytes(StandardCharsets.UTF_8))); + HttpResponse response = pipelinedRequest.createResponse( + RestStatus.OK, + new BytesArray(uri.getBytes(StandardCharsets.UTF_8)) + ); response.addHeader("content-length", Integer.toString(buffer.readableBytes())); final boolean slow = uri.matches("/slow/\\d+"); diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java index 4152c30667a..ec879e538fe 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java @@ -155,8 +155,7 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { final int maxContentLength = randomIntBetween(1, 104857600); final Settings settings = createBuilderWithPort().put(key, maxContentLength + "b").build(); final int contentLength = randomIntBetween(maxContentLength + 1, Integer.MAX_VALUE); - runExpectHeaderTest( - settings, HttpHeaderValues.CONTINUE.toString(), contentLength, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE); + runExpectHeaderTest(settings, HttpHeaderValues.CONTINUE.toString(), contentLength, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE); } /** @@ -169,10 +168,11 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { } private void runExpectHeaderTest( - final Settings settings, - final String expectation, - final int contentLength, - final HttpResponseStatus expectedStatus) throws InterruptedException { + final Settings settings, + final String expectation, + final int contentLength, + final HttpResponseStatus expectedStatus + ) throws InterruptedException { final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() { @Override public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { @@ -181,13 +181,25 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { @Override public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, Throwable cause) { - logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", - FakeRestRequest.requestToString(channel.request())), cause); + logger.error( + new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), + cause + ); throw new AssertionError(); } }; - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, - xContentRegistry(), dispatcher, clusterSettings, new SharedGroupFactory(settings))) { + try ( + Netty4HttpServerTransport transport = new Netty4HttpServerTransport( + settings, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + dispatcher, + clusterSettings, + new SharedGroupFactory(settings) + ) + ) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); try (Netty4HttpClient client = new Netty4HttpClient()) { @@ -199,13 +211,18 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { try { assertThat(response.status(), equalTo(expectedStatus)); if (expectedStatus.equals(HttpResponseStatus.CONTINUE)) { - final FullHttpRequest continuationRequest = - new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", Unpooled.EMPTY_BUFFER); + final FullHttpRequest continuationRequest = new DefaultFullHttpRequest( + HttpVersion.HTTP_1_1, + HttpMethod.POST, + "/", + Unpooled.EMPTY_BUFFER + ); final FullHttpResponse continuationResponse = client.send(remoteAddress.address(), continuationRequest); try { assertThat(continuationResponse.status(), is(HttpResponseStatus.OK)); assertThat( - new String(ByteBufUtil.getBytes(continuationResponse.content()), StandardCharsets.UTF_8), is("done") + new String(ByteBufUtil.getBytes(continuationResponse.content()), StandardCharsets.UTF_8), + is("done") ); } finally { continuationResponse.release(); @@ -220,21 +237,38 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { public void testBindUnavailableAddress() { Settings initialSettings = createSettings(); - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(initialSettings, networkService, bigArrays, threadPool, - xContentRegistry(), new NullDispatcher(), clusterSettings, new SharedGroupFactory(Settings.EMPTY))) { + try ( + Netty4HttpServerTransport transport = new Netty4HttpServerTransport( + initialSettings, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + new NullDispatcher(), + clusterSettings, + new SharedGroupFactory(Settings.EMPTY) + ) + ) { transport.start(); TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); Settings settings = Settings.builder() .put("http.port", remoteAddress.getPort()) .put("network.host", remoteAddress.getAddress()) .build(); - try (Netty4HttpServerTransport otherTransport = new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, - xContentRegistry(), new NullDispatcher(), clusterSettings, new SharedGroupFactory(settings))) { + try ( + Netty4HttpServerTransport otherTransport = new Netty4HttpServerTransport( + settings, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + new NullDispatcher(), + clusterSettings, + new SharedGroupFactory(settings) + ) + ) { BindHttpException bindHttpException = expectThrows(BindHttpException.class, otherTransport::start); - assertEquals( - "Failed to bind to " + NetworkAddress.format(remoteAddress.address()), - bindHttpException.getMessage() - ); + assertEquals("Failed to bind to " + NetworkAddress.format(remoteAddress.address()), bindHttpException.getMessage()); } } } @@ -273,9 +307,18 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { settings = createBuilderWithPort().put(httpMaxInitialLineLengthSetting.getKey(), maxInitialLineLength + "b").build(); } - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport( - settings, networkService, bigArrays, threadPool, xContentRegistry(), dispatcher, clusterSettings, - new SharedGroupFactory(settings))) { + try ( + Netty4HttpServerTransport transport = new Netty4HttpServerTransport( + settings, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + dispatcher, + clusterSettings, + new SharedGroupFactory(settings) + ) + ) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); @@ -288,7 +331,8 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { assertThat(response.status(), equalTo(HttpResponseStatus.BAD_REQUEST)); assertThat( new String(response.content().array(), Charset.forName("UTF-8")), - containsString("you sent a bad request and you should feel bad")); + containsString("you sent a bad request and you should feel bad") + ); } finally { response.release(); } @@ -316,16 +360,27 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { @Override public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) { - logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", - FakeRestRequest.requestToString(channel.request())), cause); + logger.error( + new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), + cause + ); throw new AssertionError(); } }; - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport( - Settings.EMPTY, networkService, bigArrays, threadPool, xContentRegistry(), dispatcher, clusterSettings, - new SharedGroupFactory(Settings.EMPTY))) { + try ( + Netty4HttpServerTransport transport = new Netty4HttpServerTransport( + Settings.EMPTY, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + dispatcher, + clusterSettings, + new SharedGroupFactory(Settings.EMPTY) + ) + ) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); @@ -369,23 +424,32 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { } @Override - public void dispatchBadRequest(final RestChannel channel, - final ThreadContext threadContext, - final Throwable cause) { - logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", - FakeRestRequest.requestToString(channel.request())), cause); + public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) { + logger.error( + new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), + cause + ); throw new AssertionError(); } }; - final Settings settings = createBuilderWithPort() - .put(SETTING_CORS_ENABLED.getKey(), true) - .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "test-cors.org").build(); + final Settings settings = createBuilderWithPort().put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "test-cors.org") + .build(); - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, - xContentRegistry(), dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - new SharedGroupFactory(settings))) { + try ( + Netty4HttpServerTransport transport = new Netty4HttpServerTransport( + settings, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + dispatcher, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + new SharedGroupFactory(settings) + ) + ) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); @@ -431,40 +495,51 @@ public class Netty4HttpServerTransportTests extends OpenSearchTestCase { } @Override - public void dispatchBadRequest(final RestChannel channel, - final ThreadContext threadContext, - final Throwable cause) { - logger.error(new ParameterizedMessage("--> Unexpected bad request [{}]", - FakeRestRequest.requestToString(channel.request())), cause); + public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) { + logger.error( + new ParameterizedMessage("--> Unexpected bad request [{}]", FakeRestRequest.requestToString(channel.request())), + cause + ); throw new AssertionError("Should not have received a dispatched request"); } }; - Settings settings = createBuilderWithPort() - .put(HttpTransportSettings.SETTING_HTTP_READ_TIMEOUT.getKey(), new TimeValue(randomIntBetween(100, 300))) - .build(); + Settings settings = createBuilderWithPort().put( + HttpTransportSettings.SETTING_HTTP_READ_TIMEOUT.getKey(), + new TimeValue(randomIntBetween(100, 300)) + ).build(); NioEventLoopGroup group = new NioEventLoopGroup(); - try (Netty4HttpServerTransport transport = new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, - xContentRegistry(), dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - new SharedGroupFactory(settings))) { + try ( + Netty4HttpServerTransport transport = new Netty4HttpServerTransport( + settings, + networkService, + bigArrays, + threadPool, + xContentRegistry(), + dispatcher, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + new SharedGroupFactory(settings) + ) + ) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); CountDownLatch channelClosedLatch = new CountDownLatch(1); - Bootstrap clientBootstrap = new Bootstrap() - .option(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator()) + Bootstrap clientBootstrap = new Bootstrap().option(ChannelOption.ALLOCATOR, NettyAllocator.getAllocator()) .channel(NioSocketChannel.class) .handler(new ChannelInitializer() { - @Override - protected void initChannel(SocketChannel ch) { - ch.pipeline().addLast(new ChannelHandlerAdapter() {}); + @Override + protected void initChannel(SocketChannel ch) { + ch.pipeline().addLast(new ChannelHandlerAdapter() { + }); - } - }).group(group); + } + }) + .group(group); ChannelFuture connect = clientBootstrap.connect(remoteAddress.address()); connect.channel().closeFuture().addListener(future -> channelClosedLatch.countDown()); diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index e7d4d606053..cd263a20c9a 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -78,8 +78,16 @@ public class Netty4SizeHeaderFrameDecoderTests extends OpenSearchTestCase { threadPool = new ThreadPool(settings); NetworkService networkService = new NetworkService(Collections.emptyList()); PageCacheRecycler recycler = new MockPageCacheRecycler(Settings.EMPTY); - nettyTransport = new Netty4Transport(settings, Version.CURRENT, threadPool, networkService, recycler, - new NamedWriteableRegistry(Collections.emptyList()), new NoneCircuitBreakerService(), new SharedGroupFactory(settings)); + nettyTransport = new Netty4Transport( + settings, + Version.CURRENT, + threadPool, + networkService, + recycler, + new NamedWriteableRegistry(Collections.emptyList()), + new NoneCircuitBreakerService(), + new SharedGroupFactory(settings) + ); nettyTransport.start(); TransportAddress[] boundAddresses = nettyTransport.boundAddress().boundAddresses(); diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java index 3954415131a..6aafd78ab2f 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java @@ -74,8 +74,10 @@ public class Netty4UtilsTests extends OpenSearchTestCase { int sliceLength = randomIntBetween(ref.length() - sliceOffset, ref.length() - sliceOffset); ByteBuf buffer = Netty4Utils.toByteBuf(ref); BytesReference bytesReference = Netty4Utils.toBytesReference(buffer); - assertArrayEquals(BytesReference.toBytes(ref.slice(sliceOffset, sliceLength)), - BytesReference.toBytes(bytesReference.slice(sliceOffset, sliceLength))); + assertArrayEquals( + BytesReference.toBytes(ref.slice(sliceOffset, sliceLength)), + BytesReference.toBytes(bytesReference.slice(sliceOffset, sliceLength)) + ); } public void testToChannelBuffer() throws IOException { @@ -101,8 +103,7 @@ public class Netty4UtilsTests extends OpenSearchTestCase { return new BytesArray(ref.toBytesRef()); } else if (randomBoolean()) { BytesRef bytesRef = ref.toBytesRef(); - return Netty4Utils.toBytesReference(Unpooled.wrappedBuffer(bytesRef.bytes, bytesRef.offset, - bytesRef.length)); + return Netty4Utils.toBytesReference(Unpooled.wrappedBuffer(bytesRef.bytes, bytesRef.offset, bytesRef.length)); } else { return ref; } diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java index fd4a76a558b..78a3a353fbf 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java @@ -133,9 +133,16 @@ public class NettyTransportMultiPortTests extends OpenSearchTestCase { private TcpTransport startTransport(Settings settings, ThreadPool threadPool) { PageCacheRecycler recycler = new MockPageCacheRecycler(Settings.EMPTY); - TcpTransport transport = new Netty4Transport(settings, Version.CURRENT, threadPool, new NetworkService(Collections.emptyList()), - recycler, new NamedWriteableRegistry(Collections.emptyList()), new NoneCircuitBreakerService(), - new SharedGroupFactory(settings)); + TcpTransport transport = new Netty4Transport( + settings, + Version.CURRENT, + threadPool, + new NetworkService(Collections.emptyList()), + recycler, + new NamedWriteableRegistry(Collections.emptyList()), + new NoneCircuitBreakerService(), + new SharedGroupFactory(settings) + ); transport.start(); assertThat(transport.lifecycleState(), is(Lifecycle.State.STARTED)); diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java index a0c6abb94f2..b690ba9e35e 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java @@ -75,13 +75,24 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase @Override protected Transport build(Settings settings, final Version version, ClusterSettings clusterSettings, boolean doHandshake) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); - return new Netty4Transport(settings, version, threadPool, new NetworkService(Collections.emptyList()), - PageCacheRecycler.NON_RECYCLING_INSTANCE, namedWriteableRegistry, new NoneCircuitBreakerService(), - new SharedGroupFactory(settings)) { + return new Netty4Transport( + settings, + version, + threadPool, + new NetworkService(Collections.emptyList()), + PageCacheRecycler.NON_RECYCLING_INSTANCE, + namedWriteableRegistry, + new NoneCircuitBreakerService(), + new SharedGroupFactory(settings) + ) { @Override - public void executeHandshake(DiscoveryNode node, TcpChannel channel, ConnectionProfile profile, - ActionListener listener) { + public void executeHandshake( + DiscoveryNode node, + TcpChannel channel, + ConnectionProfile profile, + ActionListener listener + ) { if (doHandshake) { super.executeHandshake(node, channel, profile, listener); } else { @@ -93,8 +104,15 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase public void testConnectException() throws UnknownHostException { try { - serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876), - emptyMap(), emptySet(),Version.CURRENT)); + serviceA.connectToNode( + new DiscoveryNode( + "C", + new TransportAddress(InetAddress.getByName("localhost"), 9876), + emptyMap(), + emptySet(), + Version.CURRENT + ) + ); fail("Expected ConnectTransportException"); } catch (ConnectTransportException e) { assertThat(e.getMessage(), containsString("connect_exception")); @@ -103,11 +121,14 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase } public void testDefaultKeepAliveSettings() throws IOException { - assumeTrue("setting default keepalive options not supported on this platform", - (IOUtils.LINUX || IOUtils.MAC_OS_X) && - JavaVersion.current().compareTo(JavaVersion.parse("11")) >= 0); - try (MockTransportService serviceC = buildService("TS_C", Version.CURRENT, Settings.EMPTY); - MockTransportService serviceD = buildService("TS_D", Version.CURRENT, Settings.EMPTY)) { + assumeTrue( + "setting default keepalive options not supported on this platform", + (IOUtils.LINUX || IOUtils.MAC_OS_X) && JavaVersion.current().compareTo(JavaVersion.parse("11")) >= 0 + ); + try ( + MockTransportService serviceC = buildService("TS_C", Version.CURRENT, Settings.EMPTY); + MockTransportService serviceD = buildService("TS_D", Version.CURRENT, Settings.EMPTY) + ) { serviceC.start(); serviceC.acceptIncomingRequests(); serviceD.start(); diff --git a/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java b/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java index 7e80d277776..4cdd836a602 100644 --- a/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java +++ b/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java @@ -48,6 +48,7 @@ public class Netty4ClientYamlTestSuiteIT extends OpenSearchClientYamlSuiteTestCa public static void muteInFips() { assumeFalse("We run with DEFAULT distribution in FIPS mode and default to security4 instead of netty4", inFipsJvm()); } + public Netty4ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); }