From 3adaf096758a6015ca4f733e2e49ee5528ac3cd5 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 24 Mar 2016 11:54:05 -0700 Subject: [PATCH 0001/1311] Settings: Cleanup placeholder replacement This change moves placeholder replacement to a pkg private class for settings. It also adds a null check when calling replacement, as settings objects can still contain null values, because we only prohibit nulls on file loading. Finally, this cleans up file and stream loading a bit to not have unnecessary exception wrapping. --- .../common/logging/LogConfigurator.java | 2 +- .../PropertyPlaceholder.java | 37 ++++------ .../common/settings/Settings.java | 35 +++++---- .../indices/analysis/HunspellService.java | 2 +- .../internal/InternalSettingsPreparer.java | 6 +- .../PropertyPlaceholderTests.java | 62 ++++++---------- .../loader/YamlSettingsLoaderTests.java | 60 ++++++++-------- .../index/analysis/AnalysisModuleTests.java | 2 +- .../index/analysis/CompoundAnalysisTests.java | 4 +- .../cloud/azure/AbstractAzureTestCase.java | 72 ------------------- .../cloud/aws/AbstractAwsTestCase.java | 7 +- .../AbstractAzureWithThirdPartyTestCase.java | 7 +- .../cloud/aws/AbstractAwsTestCase.java | 7 +- 13 files changed, 110 insertions(+), 193 deletions(-) rename core/src/main/java/org/elasticsearch/common/{property => settings}/PropertyPlaceholder.java (83%) rename core/src/test/java/org/elasticsearch/common/{property => settings}/PropertyPlaceholderTests.java (78%) delete mode 100644 plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index da628b09d2b..5ccbfe4a2dd 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -167,7 +167,7 @@ public class LogConfigurator { static void loadConfig(Path file, Settings.Builder settingsBuilder) { try { settingsBuilder.loadFromPath(file); - } catch (SettingsException | NoClassDefFoundError e) { + } catch (IOException | SettingsException | NoClassDefFoundError e) { // ignore } } diff --git a/core/src/main/java/org/elasticsearch/common/property/PropertyPlaceholder.java b/core/src/main/java/org/elasticsearch/common/settings/PropertyPlaceholder.java similarity index 83% rename from core/src/main/java/org/elasticsearch/common/property/PropertyPlaceholder.java rename to core/src/main/java/org/elasticsearch/common/settings/PropertyPlaceholder.java index 70e6807cb92..2eb7e2b8e70 100644 --- a/core/src/main/java/org/elasticsearch/common/property/PropertyPlaceholder.java +++ b/core/src/main/java/org/elasticsearch/common/settings/PropertyPlaceholder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.property; +package org.elasticsearch.common.settings; import org.elasticsearch.common.Strings; @@ -34,23 +34,12 @@ import java.util.Set; * Values for substitution can be supplied using a {@link Properties} instance or using a * {@link PlaceholderResolver}. */ -public class PropertyPlaceholder { +class PropertyPlaceholder { private final String placeholderPrefix; private final String placeholderSuffix; private final boolean ignoreUnresolvablePlaceholders; - /** - * Creates a new PropertyPlaceholderHelper that uses the supplied prefix and suffix. Unresolvable - * placeholders are ignored. - * - * @param placeholderPrefix the prefix that denotes the start of a placeholder. - * @param placeholderSuffix the suffix that denotes the end of a placeholder. - */ - public PropertyPlaceholder(String placeholderPrefix, String placeholderSuffix) { - this(placeholderPrefix, placeholderSuffix, true); - } - /** * Creates a new PropertyPlaceholderHelper that uses the supplied prefix and suffix. * @@ -59,12 +48,10 @@ public class PropertyPlaceholder { * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should be ignored * (true) or cause an exception (false). */ - public PropertyPlaceholder(String placeholderPrefix, String placeholderSuffix, + PropertyPlaceholder(String placeholderPrefix, String placeholderSuffix, boolean ignoreUnresolvablePlaceholders) { - Objects.requireNonNull(placeholderPrefix, "Argument 'placeholderPrefix' must not be null."); - Objects.requireNonNull(placeholderSuffix, "Argument 'placeholderSuffix' must not be null."); - this.placeholderPrefix = placeholderPrefix; - this.placeholderSuffix = placeholderSuffix; + this.placeholderPrefix = Objects.requireNonNull(placeholderPrefix); + this.placeholderSuffix = Objects.requireNonNull(placeholderSuffix); this.ignoreUnresolvablePlaceholders = ignoreUnresolvablePlaceholders; } @@ -75,15 +62,15 @@ public class PropertyPlaceholder { * @param value the value containing the placeholders to be replaced. * @param placeholderResolver the PlaceholderResolver to use for replacement. * @return the supplied value with placeholders replaced inline. + * @throws NullPointerException if value is null */ - public String replacePlaceholders(String key, String value, PlaceholderResolver placeholderResolver) { - Objects.requireNonNull(key); - Objects.requireNonNull(value, "value can not be null for [" + key + "]"); - return parseStringValue(value, placeholderResolver, new HashSet()); + String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) { + Objects.requireNonNull(value); + return parseStringValue(value, placeholderResolver, new HashSet<>()); } - protected String parseStringValue(String strVal, PlaceholderResolver placeholderResolver, - Set visitedPlaceholders) { + private String parseStringValue(String strVal, PlaceholderResolver placeholderResolver, + Set visitedPlaceholders) { StringBuilder buf = new StringBuilder(strVal); int startIndex = strVal.indexOf(this.placeholderPrefix); @@ -164,7 +151,7 @@ public class PropertyPlaceholder { * * @see PropertyPlaceholder */ - public interface PlaceholderResolver { + interface PlaceholderResolver { /** * Resolves the supplied placeholder name into the replacement value. diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index ce79bf92d20..887edf48443 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.property.PropertyPlaceholder; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.settings.loader.SettingsLoaderFactory; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -1114,26 +1113,20 @@ public final class Settings implements ToXContent { * Loads settings from a url that represents them using the * {@link SettingsLoaderFactory#loaderFromSource(String)}. */ - public Builder loadFromPath(Path path) throws SettingsException { - try { - return loadFromStream(path.getFileName().toString(), Files.newInputStream(path)); - } catch (IOException e) { - throw new SettingsException("Failed to open stream for url [" + path + "]", e); - } + public Builder loadFromPath(Path path) throws IOException { + // NOTE: loadFromStream will close the input stream + return loadFromStream(path.getFileName().toString(), Files.newInputStream(path)); } /** * Loads settings from a stream that represents them using the * {@link SettingsLoaderFactory#loaderFromSource(String)}. */ - public Builder loadFromStream(String resourceName, InputStream is) throws SettingsException { + public Builder loadFromStream(String resourceName, InputStream is) throws IOException { SettingsLoader settingsLoader = SettingsLoaderFactory.loaderFromResource(resourceName); - try { - Map loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8))); - put(loadedSettings); - } catch (Exception e) { - throw new SettingsException("Failed to load settings from [" + resourceName + "]", e); - } + // NOTE: copyToString will close the input stream + Map loadedSettings = settingsLoader.load(Streams.copyToString(new InputStreamReader(is, StandardCharsets.UTF_8))); + put(loadedSettings); return this; } @@ -1220,14 +1213,20 @@ public final class Settings implements ToXContent { return true; } }; - for (Map.Entry entry : new HashMap<>(map).entrySet()) { - String value = propertyPlaceholder.replacePlaceholders(entry.getKey(), entry.getValue(), placeholderResolver); + Iterator> entryItr = map.entrySet().iterator(); + while (entryItr.hasNext()) { + Map.Entry entry = entryItr.next(); + if (entry.getValue() == null) { + // a null value obviously can't be replaced + continue; + } + String value = propertyPlaceholder.replacePlaceholders(entry.getValue(), placeholderResolver); // if the values exists and has length, we should maintain it in the map // otherwise, the replace process resolved into removing it if (Strings.hasLength(value)) { - map.put(entry.getKey(), value); + entry.setValue(value); } else { - map.remove(entry.getKey()); + entryItr.remove(); } } return this; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 75c15f09778..4450bd557b6 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -210,7 +210,7 @@ public class HunspellService extends AbstractComponent { * @param defaults The default settings for this dictionary * @return The resolved settings. */ - private static Settings loadDictionarySettings(Path dir, Settings defaults) { + private static Settings loadDictionarySettings(Path dir, Settings defaults) throws IOException { Path file = dir.resolve("settings.yml"); if (Files.exists(file)) { return Settings.settingsBuilder().loadFromPath(file).put(defaults).build(); diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 8864a70ccdc..f9539f7c363 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -92,7 +92,11 @@ public class InternalSettingsPreparer { Path path = environment.configFile().resolve("elasticsearch" + allowedSuffix); if (Files.exists(path)) { if (!settingsFileFound) { - output.loadFromPath(path); + try { + output.loadFromPath(path); + } catch (IOException e) { + throw new SettingsException("Failed to settings from " + path.toString(), e); + } } settingsFileFound = true; foundSuffixes.add(allowedSuffix); diff --git a/core/src/test/java/org/elasticsearch/common/property/PropertyPlaceholderTests.java b/core/src/test/java/org/elasticsearch/common/settings/PropertyPlaceholderTests.java similarity index 78% rename from core/src/test/java/org/elasticsearch/common/property/PropertyPlaceholderTests.java rename to core/src/test/java/org/elasticsearch/common/settings/PropertyPlaceholderTests.java index 405ac566686..78176bc1d80 100644 --- a/core/src/test/java/org/elasticsearch/common/property/PropertyPlaceholderTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/PropertyPlaceholderTests.java @@ -17,14 +17,13 @@ * under the License. */ -package org.elasticsearch.common.property; - -import org.elasticsearch.test.ESTestCase; +package org.elasticsearch.common.settings; import java.util.LinkedHashMap; import java.util.Map; -import static org.hamcrest.Matchers.hasToString; +import org.elasticsearch.test.ESTestCase; + import static org.hamcrest.Matchers.is; public class PropertyPlaceholderTests extends ESTestCase { @@ -34,10 +33,10 @@ public class PropertyPlaceholderTests extends ESTestCase { map.put("foo1", "bar1"); map.put("foo2", "bar2"); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("bar1", propertyPlaceholder.replacePlaceholders("key", "{foo1}", placeholderResolver)); - assertEquals("a bar1b", propertyPlaceholder.replacePlaceholders("key", "a {foo1}b", placeholderResolver)); - assertEquals("bar1bar2", propertyPlaceholder.replacePlaceholders("key", "{foo1}{foo2}", placeholderResolver)); - assertEquals("a bar1 b bar2 c", propertyPlaceholder.replacePlaceholders("key", "a {foo1} b {foo2} c", placeholderResolver)); + assertEquals("bar1", propertyPlaceholder.replacePlaceholders("{foo1}", placeholderResolver)); + assertEquals("a bar1b", propertyPlaceholder.replacePlaceholders("a {foo1}b", placeholderResolver)); + assertEquals("bar1bar2", propertyPlaceholder.replacePlaceholders("{foo1}{foo2}", placeholderResolver)); + assertEquals("a bar1 b bar2 c", propertyPlaceholder.replacePlaceholders("a {foo1} b {foo2} c", placeholderResolver)); } public void testVariousPrefixSuffix() { @@ -48,24 +47,24 @@ public class PropertyPlaceholderTests extends ESTestCase { Map map = new LinkedHashMap<>(); map.put("foo", "bar"); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("bar", ppEqualsPrefix.replacePlaceholders("key", "{foo}", placeholderResolver)); - assertEquals("bar", ppLongerPrefix.replacePlaceholders("key", "${foo}", placeholderResolver)); - assertEquals("bar", ppShorterPrefix.replacePlaceholders("key", "{foo}}", placeholderResolver)); + assertEquals("bar", ppEqualsPrefix.replacePlaceholders("{foo}", placeholderResolver)); + assertEquals("bar", ppLongerPrefix.replacePlaceholders("${foo}", placeholderResolver)); + assertEquals("bar", ppShorterPrefix.replacePlaceholders("{foo}}", placeholderResolver)); } public void testDefaultValue() { PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); Map map = new LinkedHashMap<>(); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("bar", propertyPlaceholder.replacePlaceholders("key", "${foo:bar}", placeholderResolver)); - assertEquals("", propertyPlaceholder.replacePlaceholders("key", "${foo:}", placeholderResolver)); + assertEquals("bar", propertyPlaceholder.replacePlaceholders("${foo:bar}", placeholderResolver)); + assertEquals("", propertyPlaceholder.replacePlaceholders("${foo:}", placeholderResolver)); } public void testIgnoredUnresolvedPlaceholder() { PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", true); Map map = new LinkedHashMap<>(); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("${foo}", propertyPlaceholder.replacePlaceholders("key", "${foo}", placeholderResolver)); + assertEquals("${foo}", propertyPlaceholder.replacePlaceholders("${foo}", placeholderResolver)); } public void testNotIgnoredUnresolvedPlaceholder() { @@ -73,7 +72,7 @@ public class PropertyPlaceholderTests extends ESTestCase { Map map = new LinkedHashMap<>(); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); try { - propertyPlaceholder.replacePlaceholders("key", "${foo}", placeholderResolver); + propertyPlaceholder.replacePlaceholders("${foo}", placeholderResolver); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("Could not resolve placeholder 'foo'")); @@ -84,7 +83,7 @@ public class PropertyPlaceholderTests extends ESTestCase { PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); Map map = new LinkedHashMap<>(); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, true, true); - assertEquals("bar", propertyPlaceholder.replacePlaceholders("key", "bar${foo}", placeholderResolver)); + assertEquals("bar", propertyPlaceholder.replacePlaceholders("bar${foo}", placeholderResolver)); } public void testRecursive() { @@ -94,8 +93,8 @@ public class PropertyPlaceholderTests extends ESTestCase { map.put("foo1", "${foo2}"); map.put("foo2", "bar"); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("bar", propertyPlaceholder.replacePlaceholders("key", "${foo}", placeholderResolver)); - assertEquals("abarb", propertyPlaceholder.replacePlaceholders("key", "a${foo}b", placeholderResolver)); + assertEquals("bar", propertyPlaceholder.replacePlaceholders("${foo}", placeholderResolver)); + assertEquals("abarb", propertyPlaceholder.replacePlaceholders("a${foo}b", placeholderResolver)); } public void testNestedLongerPrefix() { @@ -106,7 +105,7 @@ public class PropertyPlaceholderTests extends ESTestCase { map.put("foo2", "bar"); map.put("barbar", "baz"); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("baz", propertyPlaceholder.replacePlaceholders("key", "${bar${foo}}", placeholderResolver)); + assertEquals("baz", propertyPlaceholder.replacePlaceholders("${bar${foo}}", placeholderResolver)); } public void testNestedSameLengthPrefixSuffix() { @@ -117,7 +116,7 @@ public class PropertyPlaceholderTests extends ESTestCase { map.put("foo2", "bar"); map.put("barbar", "baz"); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("baz", propertyPlaceholder.replacePlaceholders("key", "{bar{foo}}", placeholderResolver)); + assertEquals("baz", propertyPlaceholder.replacePlaceholders("{bar{foo}}", placeholderResolver)); } public void testNestedShorterPrefix() { @@ -128,7 +127,7 @@ public class PropertyPlaceholderTests extends ESTestCase { map.put("foo2", "bar"); map.put("barbar", "baz"); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); - assertEquals("baz", propertyPlaceholder.replacePlaceholders("key", "{bar{foo}}}}", placeholderResolver)); + assertEquals("baz", propertyPlaceholder.replacePlaceholders("{bar{foo}}}}", placeholderResolver)); } public void testCircularReference() { @@ -138,7 +137,7 @@ public class PropertyPlaceholderTests extends ESTestCase { map.put("bar", "${foo}"); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, false, true); try { - propertyPlaceholder.replacePlaceholders("key", "${foo}", placeholderResolver); + propertyPlaceholder.replacePlaceholders("${foo}", placeholderResolver); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("Circular placeholder reference 'foo' in property definitions")); @@ -149,24 +148,7 @@ public class PropertyPlaceholderTests extends ESTestCase { PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); Map map = new LinkedHashMap<>(); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, true, false); - assertEquals("bar${foo}", propertyPlaceholder.replacePlaceholders("key", "bar${foo}", placeholderResolver)); - } - - public void testNullKey() { - final PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); - final Map map = new LinkedHashMap<>(); - final PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, true, false); - expectThrows(NullPointerException.class, () -> propertyPlaceholder.replacePlaceholders(null, "value", placeholderResolver)); - } - - public void testNullValue() { - final PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); - final Map map = new LinkedHashMap<>(); - final PropertyPlaceholder.PlaceholderResolver placeholderResolver = new SimplePlaceholderResolver(map, true, false); - final String key = randomAsciiOfLength(10); - NullPointerException e = - expectThrows(NullPointerException.class, () -> propertyPlaceholder.replacePlaceholders(key, null, placeholderResolver)); - assertThat(e, hasToString("java.lang.NullPointerException: value can not be null for [" + key + "]")); + assertEquals("bar${foo}", propertyPlaceholder.replacePlaceholders("bar${foo}", placeholderResolver)); } private class SimplePlaceholderResolver implements PropertyPlaceholder.PlaceholderResolver { diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java index 2e2a187da0b..67db756cb3d 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java @@ -19,6 +19,11 @@ package org.elasticsearch.common.settings.loader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -48,42 +53,39 @@ public class YamlSettingsLoaderTests extends ESTestCase { assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2")); } - public void testIndentation() { - final String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml"; - final SettingsException e = - expectThrows( - SettingsException.class, - () -> settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)).build()); - assertThat(e.getMessage(), containsString("Failed to load settings")); + public void testIndentation() throws Exception { + String yaml = "/org/elasticsearch/common/settings/loader/indentation-settings.yml"; + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> { + settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)); + }); + assertTrue(e.getMessage(), e.getMessage().contains("malformed")); } - public void testIndentationWithExplicitDocumentStart() { - final String yaml = "/org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml"; - final SettingsException e = - expectThrows( - SettingsException.class, - () -> settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)).build()); - assertThat(e.getMessage(), containsString("Failed to load settings")); + public void testIndentationWithExplicitDocumentStart() throws Exception { + String yaml = "/org/elasticsearch/common/settings/loader/indentation-with-explicit-document-start-settings.yml"; + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> { + settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)); + }); + assertTrue(e.getMessage(), e.getMessage().contains("malformed")); } public void testDuplicateKeysThrowsException() { - final String yaml = "foo: bar\nfoo: baz"; - final SettingsException e = expectThrows(SettingsException.class, () -> settingsBuilder().loadFromSource(yaml).build()); + String yaml = "foo: bar\nfoo: baz"; + SettingsException e = expectThrows(SettingsException.class, () -> { + settingsBuilder().loadFromSource(yaml); + }); assertEquals(e.getCause().getClass(), ElasticsearchParseException.class); - assertThat( - e.toString(), - containsString("duplicate settings key [foo] " + - "found at line number [2], " + - "column number [6], " + - "previous value [bar], " + - "current value [baz]")); + String msg = e.getCause().getMessage(); + assertTrue(msg, msg.contains("duplicate settings key [foo] found")); + assertTrue(msg, msg.contains("previous value [bar], current value [baz]")); } - public void testNullValuedSettingThrowsException() { - final String yaml = "foo:"; - final ElasticsearchParseException e = - expectThrows(ElasticsearchParseException.class, () -> new YamlSettingsLoader(false).load(yaml)); - assertThat(e.toString(), containsString("null-valued setting found for key [foo] found at line number [1], column number [5]")); + public void testMissingValue() throws Exception { + Path tmp = createTempFile("test", ".yaml"); + Files.write(tmp, Collections.singletonList("foo: # missing value\n"), StandardCharsets.UTF_8); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> { + settingsBuilder().loadFromPath(tmp); + }); + assertTrue(e.getMessage(), e.getMessage().contains("null-valued setting found for key [foo]")); } - } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 6468fae9397..e1d35039f02 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -79,7 +79,7 @@ public class AnalysisModuleTests extends ModuleTestCase { Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.emptyMap(), Collections.emptyMap()); } - private Settings loadFromClasspath(String path) { + private Settings loadFromClasspath(String path) throws IOException { return settingsBuilder().loadFromStream(path, getClass().getResourceAsStream(path)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index fbedf42d083..fe5b0855798 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -91,7 +91,7 @@ public class CompoundAnalysisTests extends ESTestCase { return terms; } - private Settings getJsonSettings() { + private Settings getJsonSettings() throws IOException { String json = "/org/elasticsearch/index/analysis/test1.json"; return settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) @@ -100,7 +100,7 @@ public class CompoundAnalysisTests extends ESTestCase { .build(); } - private Settings getYamlSettings() { + private Settings getYamlSettings() throws IOException { String yaml = "/org/elasticsearch/index/analysis/test1.yml"; return settingsBuilder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java deleted file mode 100644 index ad7140f5020..00000000000 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cloud.azure; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsException; -import org.elasticsearch.env.Environment; -import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ThirdParty; - -import java.util.Collection; - -/** - * Base class for Azure tests that require credentials. - *

- * You must specify {@code -Dtests.thirdparty=true -Dtests.config=/path/to/config} - * in order to run these tests. - */ -@ThirdParty -public abstract class AbstractAzureTestCase extends ESIntegTestCase { - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(readSettingsFromFile()) - .build(); - } - - @Override - protected Collection> nodePlugins() { - return pluginList(AzureDiscoveryPlugin.class); - } - - protected Settings readSettingsFromFile() { - Settings.Builder settings = Settings.builder(); - settings.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); - - // if explicit, just load it and don't load from env - try { - if (Strings.hasText(System.getProperty("tests.config"))) { - settings.loadFromPath(PathUtils.get((System.getProperty("tests.config")))); - } else { - throw new IllegalStateException("to run integration tests, you need to set -Dtests.thirdparty=true and -Dtests.config=/path/to/elasticsearch.yml"); - } - } catch (SettingsException exception) { - throw new IllegalStateException("your test configuration file is incorrect: " + System.getProperty("tests.config"), exception); - } - return settings.build(); - } -} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java index cc9b0897600..dc794038598 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ThirdParty; +import java.io.IOException; import java.util.Collection; /** @@ -52,7 +53,11 @@ public abstract class AbstractAwsTestCase extends ESIntegTestCase { // if explicit, just load it and don't load from env try { if (Strings.hasText(System.getProperty("tests.config"))) { - settings.loadFromPath(PathUtils.get(System.getProperty("tests.config"))); + try { + settings.loadFromPath(PathUtils.get(System.getProperty("tests.config"))); + } catch (IOException e) { + throw new IllegalArgumentException("could not load aws tests config", e); + } } else { throw new IllegalStateException("to run integration tests, you need to set -Dtests.thirdparty=true and -Dtests.config=/path/to/elasticsearch.yml"); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureWithThirdPartyTestCase.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureWithThirdPartyTestCase.java index b7c2d373a58..7136befeaff 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureWithThirdPartyTestCase.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureWithThirdPartyTestCase.java @@ -27,6 +27,7 @@ import org.elasticsearch.plugin.repository.azure.AzureRepositoryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase.ThirdParty; +import java.io.IOException; import java.util.Collection; /** @@ -58,7 +59,11 @@ public abstract class AbstractAzureWithThirdPartyTestCase extends AbstractAzureT // if explicit, just load it and don't load from env try { if (Strings.hasText(System.getProperty("tests.config"))) { - settings.loadFromPath(PathUtils.get((System.getProperty("tests.config")))); + try { + settings.loadFromPath(PathUtils.get((System.getProperty("tests.config")))); + } catch (IOException e) { + throw new IllegalArgumentException("could not load azure tests config", e); + } } else { throw new IllegalStateException("to run integration tests, you need to set -Dtests.thirdparty=true and -Dtests.config=/path/to/elasticsearch.yml"); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java index ec8fb902d66..9d1768db58b 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ThirdParty; +import java.io.IOException; import java.util.Collection; /** @@ -52,7 +53,11 @@ public abstract class AbstractAwsTestCase extends ESIntegTestCase { // if explicit, just load it and don't load from env try { if (Strings.hasText(System.getProperty("tests.config"))) { - settings.loadFromPath(PathUtils.get(System.getProperty("tests.config"))); + try { + settings.loadFromPath(PathUtils.get(System.getProperty("tests.config"))); + } catch (IOException e) { + throw new IllegalArgumentException("could not load aws tests config", e); + } } else { throw new IllegalStateException("to run integration tests, you need to set -Dtests.thirdparty=true and -Dtests.config=/path/to/elasticsearch.yml"); } From 6380560dbb989242e48f68719fe6045c51f7017e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 11 Apr 2016 14:35:37 -0700 Subject: [PATCH 0002/1311] Check more complete exception message for missing setting value --- .../common/settings/loader/YamlSettingsLoaderTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java index f3926ebe07e..618209cf114 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java @@ -85,6 +85,6 @@ public class YamlSettingsLoaderTests extends ESTestCase { ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> { Settings.builder().loadFromPath(tmp); }); - assertTrue(e.getMessage(), e.getMessage().contains("null-valued setting found for key [foo]")); + assertTrue(e.getMessage(), e.getMessage().contains("null-valued setting found for key [foo] found at line")); } } From f2ee759ad56a11ee42236b13d7edf687149b7d94 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Fri, 15 Apr 2016 11:51:47 +0200 Subject: [PATCH 0003/1311] Upgrade AWS SDK to 1.10.69 * Moving from JSON.org to Jackson for request marshallers. * The Java SDK now supports retry throttling to limit the rate of retries during periods of reduced availability. This throttling behavior can be enabled via ClientConfiguration or via the system property "-Dcom.amazonaws.sdk.enableThrottledRetry". * Fixed String case conversion issues when running with non English locales. * AWS SDK for Java introduces a new dynamic endpoint system that can compute endpoints for services in new regions. * Introducing a new AWS region, ap-northeast-2. * Added a new metric, HttpSocketReadTime, that records socket read latency. You can enable this metric by adding enableHttpSocketReadMetric to the system property com.amazonaws.sdk.enableDefaultMetrics. For more information, see [Enabling Metrics with the AWS SDK for Java](https://java.awsblog.com/post/Tx3C0RV4NRRBKTG/Enabling-Metrics-with-the-AWS-SDK-for-Java). * New Client Execution timeout feature to set a limit spent across retries, backoffs, ummarshalling, etc. This new timeout can be specified at the client level or per request. Also included in this release is the ability to specify the existing HTTP Request timeout per request rather than just per client. * Added support for RequesterPays for all operations. * Ignore the 'Connection' header when generating S3 responses. * Allow users to generate an AmazonS3URI from a string without using URL encoding. * Fixed issue that prevented creating buckets when using a client configured for the s3-external-1 endpoint. * Amazon S3 bucket lifecycle configuration supports two new features: the removal of expired object delete markers and an action to abort incomplete multipart uploads. * Allow TransferManagerConfiguration to accept integer values for multipart upload threshold. * Copy the list of ETags before sorting https://github.com/aws/aws-sdk-java/pull/589. * Option to disable chunked encoding https://github.com/aws/aws-sdk-java/pull/586. * Adding retry on InternalErrors in CompleteMultipartUpload operation. https://github.com/aws/aws-sdk-java/issues/538 * Deprecated two APIs : AmazonS3#changeObjectStorageClass and AmazonS3#setObjectRedirectLocation. * Added support for the aws-exec-read canned ACL. Owner gets FULL_CONTROL. Amazon EC2 gets READ access to GET an Amazon Machine Image (AMI) bundle from Amazon S3. * Added support for referencing security groups in peered Virtual Private Clouds (VPCs). For more information see the service announcement at https://aws.amazon.com/about-aws/whats-new/2016/03/announcing-support-for-security-group-references-in-a-peered-vpc/ . * Fixed a bug in AWS SDK for Java - Amazon EC2 module that returns NPE for dry run requests. * Regenerated client with new implementation of code generator. * This feature enables support for DNS resolution of public hostnames to private IP addresses when queried over ClassicLink. Additionally, you can now access private hosted zones associated with your VPC from a linked EC2-Classic instance. ClassicLink DNS support makes it easier for EC2-Classic instances to communicate with VPC resources using public DNS hostnames. * You can now use Network Address Translation (NAT) Gateway, a highly available AWS managed service that makes it easy to connect to the Internet from instances within a private subnet in an AWS Virtual Private Cloud (VPC). Previously, you needed to launch a NAT instance to enable NAT for instances in a private subnet. Amazon VPC NAT Gateway is available in the US East (N. Virginia), US West (Oregon), US West (N. California), EU (Ireland), Asia Pacific (Tokyo), Asia Pacific (Singapore), and Asia Pacific (Sydney) regions. To learn more about Amazon VPC NAT, see [New - Managed NAT (Network Address Translation) Gateway for AWS](https://aws.amazon.com/blogs/aws/new-managed-nat-network-address-translation-gateway-for-aws/) * A default read timeout is now applied when querying data from EC2 metadata service. --- plugins/discovery-ec2/build.gradle | 7 +- .../aws-java-sdk-core-1.10.33.jar.sha1 | 1 - .../aws-java-sdk-core-1.10.69.jar.sha1 | 1 + .../aws-java-sdk-ec2-1.10.33.jar.sha1 | 1 - .../aws-java-sdk-ec2-1.10.69.jar.sha1 | 1 + .../discovery/ec2/AmazonEC2Mock.java | 128 ++++++++++++++++++ plugins/repository-s3/build.gradle | 10 +- .../aws-java-sdk-core-1.10.33.jar.sha1 | 1 - .../aws-java-sdk-core-1.10.69.jar.sha1 | 1 + .../aws-java-sdk-kms-1.10.33.jar.sha1 | 1 - .../aws-java-sdk-kms-1.10.69.jar.sha1 | 1 + .../licenses/aws-java-sdk-s3-1.10.33.jar.sha1 | 1 - .../licenses/aws-java-sdk-s3-1.10.69.jar.sha1 | 1 + .../cloud/aws/AmazonS3Wrapper.java | 18 +++ 14 files changed, 156 insertions(+), 17 deletions(-) delete mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.33.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.69.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.33.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.69.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/aws-java-sdk-core-1.10.33.jar.sha1 create mode 100644 plugins/repository-s3/licenses/aws-java-sdk-core-1.10.69.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.33.jar.sha1 create mode 100644 plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.69.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.33.jar.sha1 create mode 100644 plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.69.jar.sha1 diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index e8baa1ec502..15cce1aa32b 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -23,7 +23,7 @@ esplugin { } versions << [ - 'aws': '1.10.33' + 'aws': '1.10.69' ] dependencies { @@ -48,11 +48,6 @@ test { } thirdPartyAudit.excludes = [ - // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl - // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault - // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext - 'com.amazonaws.util.XpathUtils', - // classes are missing 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.33.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.33.jar.sha1 deleted file mode 100644 index 332a8f01035..00000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.33.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fabedbbe2b834b1add150b6a38395c5ef7380168 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.69.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.69.jar.sha1 new file mode 100644 index 00000000000..2971a33d7d9 --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.10.69.jar.sha1 @@ -0,0 +1 @@ +a1f02d5f26ba1d8c37e2bf9c847db3c6729dda00 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.33.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.33.jar.sha1 deleted file mode 100644 index 4737b80b3f2..00000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.33.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -202f6b5dbc196e355d50c131b0fd34969bfd89e6 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.69.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.69.jar.sha1 new file mode 100644 index 00000000000..a1a493d3b8f --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.10.69.jar.sha1 @@ -0,0 +1 @@ +afbff1ece8365859eb4cfe0d3ba543d68b154d26 \ No newline at end of file diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index 88d87a2d8c2..881cb98e1d1 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -29,6 +29,8 @@ import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionRequest; import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionResult; import com.amazonaws.services.ec2.model.AllocateAddressRequest; import com.amazonaws.services.ec2.model.AllocateAddressResult; +import com.amazonaws.services.ec2.model.AllocateHostsRequest; +import com.amazonaws.services.ec2.model.AllocateHostsResult; import com.amazonaws.services.ec2.model.AssignPrivateIpAddressesRequest; import com.amazonaws.services.ec2.model.AssociateAddressRequest; import com.amazonaws.services.ec2.model.AssociateAddressResult; @@ -80,6 +82,8 @@ import com.amazonaws.services.ec2.model.CreateInternetGatewayRequest; import com.amazonaws.services.ec2.model.CreateInternetGatewayResult; import com.amazonaws.services.ec2.model.CreateKeyPairRequest; import com.amazonaws.services.ec2.model.CreateKeyPairResult; +import com.amazonaws.services.ec2.model.CreateNatGatewayRequest; +import com.amazonaws.services.ec2.model.CreateNatGatewayResult; import com.amazonaws.services.ec2.model.CreateNetworkAclEntryRequest; import com.amazonaws.services.ec2.model.CreateNetworkAclRequest; import com.amazonaws.services.ec2.model.CreateNetworkAclResult; @@ -120,6 +124,8 @@ import com.amazonaws.services.ec2.model.DeleteFlowLogsRequest; import com.amazonaws.services.ec2.model.DeleteFlowLogsResult; import com.amazonaws.services.ec2.model.DeleteInternetGatewayRequest; import com.amazonaws.services.ec2.model.DeleteKeyPairRequest; +import com.amazonaws.services.ec2.model.DeleteNatGatewayRequest; +import com.amazonaws.services.ec2.model.DeleteNatGatewayResult; import com.amazonaws.services.ec2.model.DeleteNetworkAclEntryRequest; import com.amazonaws.services.ec2.model.DeleteNetworkAclRequest; import com.amazonaws.services.ec2.model.DeleteNetworkInterfaceRequest; @@ -161,6 +167,10 @@ import com.amazonaws.services.ec2.model.DescribeExportTasksRequest; import com.amazonaws.services.ec2.model.DescribeExportTasksResult; import com.amazonaws.services.ec2.model.DescribeFlowLogsRequest; import com.amazonaws.services.ec2.model.DescribeFlowLogsResult; +import com.amazonaws.services.ec2.model.DescribeHostsRequest; +import com.amazonaws.services.ec2.model.DescribeHostsResult; +import com.amazonaws.services.ec2.model.DescribeIdFormatRequest; +import com.amazonaws.services.ec2.model.DescribeIdFormatResult; import com.amazonaws.services.ec2.model.DescribeImageAttributeRequest; import com.amazonaws.services.ec2.model.DescribeImageAttributeResult; import com.amazonaws.services.ec2.model.DescribeImagesRequest; @@ -181,6 +191,8 @@ import com.amazonaws.services.ec2.model.DescribeKeyPairsRequest; import com.amazonaws.services.ec2.model.DescribeKeyPairsResult; import com.amazonaws.services.ec2.model.DescribeMovingAddressesRequest; import com.amazonaws.services.ec2.model.DescribeMovingAddressesResult; +import com.amazonaws.services.ec2.model.DescribeNatGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeNatGatewaysResult; import com.amazonaws.services.ec2.model.DescribeNetworkAclsRequest; import com.amazonaws.services.ec2.model.DescribeNetworkAclsResult; import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeRequest; @@ -203,6 +215,10 @@ import com.amazonaws.services.ec2.model.DescribeReservedInstancesRequest; import com.amazonaws.services.ec2.model.DescribeReservedInstancesResult; import com.amazonaws.services.ec2.model.DescribeRouteTablesRequest; import com.amazonaws.services.ec2.model.DescribeRouteTablesResult; +import com.amazonaws.services.ec2.model.DescribeScheduledInstanceAvailabilityRequest; +import com.amazonaws.services.ec2.model.DescribeScheduledInstanceAvailabilityResult; +import com.amazonaws.services.ec2.model.DescribeScheduledInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeScheduledInstancesResult; import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest; import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult; import com.amazonaws.services.ec2.model.DescribeSnapshotAttributeRequest; @@ -233,6 +249,8 @@ import com.amazonaws.services.ec2.model.DescribeVolumesRequest; import com.amazonaws.services.ec2.model.DescribeVolumesResult; import com.amazonaws.services.ec2.model.DescribeVpcAttributeRequest; import com.amazonaws.services.ec2.model.DescribeVpcAttributeResult; +import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkDnsSupportRequest; +import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkDnsSupportResult; import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkRequest; import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkResult; import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicesRequest; @@ -255,6 +273,8 @@ import com.amazonaws.services.ec2.model.DetachVolumeRequest; import com.amazonaws.services.ec2.model.DetachVolumeResult; import com.amazonaws.services.ec2.model.DetachVpnGatewayRequest; import com.amazonaws.services.ec2.model.DisableVgwRoutePropagationRequest; +import com.amazonaws.services.ec2.model.DisableVpcClassicLinkDnsSupportRequest; +import com.amazonaws.services.ec2.model.DisableVpcClassicLinkDnsSupportResult; import com.amazonaws.services.ec2.model.DisableVpcClassicLinkRequest; import com.amazonaws.services.ec2.model.DisableVpcClassicLinkResult; import com.amazonaws.services.ec2.model.DisassociateAddressRequest; @@ -263,6 +283,8 @@ import com.amazonaws.services.ec2.model.DryRunResult; import com.amazonaws.services.ec2.model.DryRunSupportedRequest; import com.amazonaws.services.ec2.model.EnableVgwRoutePropagationRequest; import com.amazonaws.services.ec2.model.EnableVolumeIORequest; +import com.amazonaws.services.ec2.model.EnableVpcClassicLinkDnsSupportRequest; +import com.amazonaws.services.ec2.model.EnableVpcClassicLinkDnsSupportResult; import com.amazonaws.services.ec2.model.EnableVpcClassicLinkRequest; import com.amazonaws.services.ec2.model.EnableVpcClassicLinkResult; import com.amazonaws.services.ec2.model.Filter; @@ -283,8 +305,13 @@ import com.amazonaws.services.ec2.model.ImportVolumeResult; import com.amazonaws.services.ec2.model.Instance; import com.amazonaws.services.ec2.model.InstanceState; import com.amazonaws.services.ec2.model.InstanceStateName; +import com.amazonaws.services.ec2.model.ModifyHostsRequest; +import com.amazonaws.services.ec2.model.ModifyHostsResult; +import com.amazonaws.services.ec2.model.ModifyIdFormatRequest; import com.amazonaws.services.ec2.model.ModifyImageAttributeRequest; import com.amazonaws.services.ec2.model.ModifyInstanceAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyInstancePlacementRequest; +import com.amazonaws.services.ec2.model.ModifyInstancePlacementResult; import com.amazonaws.services.ec2.model.ModifyNetworkInterfaceAttributeRequest; import com.amazonaws.services.ec2.model.ModifyReservedInstancesRequest; import com.amazonaws.services.ec2.model.ModifyReservedInstancesResult; @@ -302,12 +329,16 @@ import com.amazonaws.services.ec2.model.MoveAddressToVpcRequest; import com.amazonaws.services.ec2.model.MoveAddressToVpcResult; import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingRequest; import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingResult; +import com.amazonaws.services.ec2.model.PurchaseScheduledInstancesRequest; +import com.amazonaws.services.ec2.model.PurchaseScheduledInstancesResult; import com.amazonaws.services.ec2.model.RebootInstancesRequest; import com.amazonaws.services.ec2.model.RegisterImageRequest; import com.amazonaws.services.ec2.model.RegisterImageResult; import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionRequest; import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionResult; import com.amazonaws.services.ec2.model.ReleaseAddressRequest; +import com.amazonaws.services.ec2.model.ReleaseHostsRequest; +import com.amazonaws.services.ec2.model.ReleaseHostsResult; import com.amazonaws.services.ec2.model.ReplaceNetworkAclAssociationRequest; import com.amazonaws.services.ec2.model.ReplaceNetworkAclAssociationResult; import com.amazonaws.services.ec2.model.ReplaceNetworkAclEntryRequest; @@ -330,6 +361,8 @@ import com.amazonaws.services.ec2.model.RevokeSecurityGroupEgressRequest; import com.amazonaws.services.ec2.model.RevokeSecurityGroupIngressRequest; import com.amazonaws.services.ec2.model.RunInstancesRequest; import com.amazonaws.services.ec2.model.RunInstancesResult; +import com.amazonaws.services.ec2.model.RunScheduledInstancesRequest; +import com.amazonaws.services.ec2.model.RunScheduledInstancesResult; import com.amazonaws.services.ec2.model.StartInstancesRequest; import com.amazonaws.services.ec2.model.StartInstancesResult; import com.amazonaws.services.ec2.model.StopInstancesRequest; @@ -519,6 +552,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public DeleteNatGatewayResult deleteNatGateway(DeleteNatGatewayRequest deleteNatGatewayRequest) { + return null; + } + @Override public UnmonitorInstancesResult unmonitorInstances(UnmonitorInstancesRequest unmonitorInstancesRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -639,6 +677,16 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public ModifyHostsResult modifyHosts(ModifyHostsRequest modifyHostsRequest) { + return null; + } + + @Override + public void modifyIdFormat(ModifyIdFormatRequest modifyIdFormatRequest) { + + } + @Override public DescribeSecurityGroupsResult describeSecurityGroups(DescribeSecurityGroupsRequest describeSecurityGroupsRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -964,6 +1012,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public DisableVpcClassicLinkDnsSupportResult disableVpcClassicLinkDnsSupport(DisableVpcClassicLinkDnsSupportRequest disableVpcClassicLinkDnsSupportRequest) { + return null; + } + @Override public DescribeInstanceAttributeResult describeInstanceAttribute(DescribeInstanceAttributeRequest describeInstanceAttributeRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -989,6 +1042,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public RunScheduledInstancesResult runScheduledInstances(RunScheduledInstancesRequest runScheduledInstancesRequest) { + return null; + } + @Override public DescribeSubnetsResult describeSubnets(DescribeSubnetsRequest describeSubnetsRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1024,6 +1082,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public ModifyInstancePlacementResult modifyInstancePlacement(ModifyInstancePlacementRequest modifyInstancePlacementRequest) { + return null; + } + @Override public CancelReservedInstancesListingResult cancelReservedInstancesListing(CancelReservedInstancesListingRequest cancelReservedInstancesListingRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1159,6 +1222,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public PurchaseScheduledInstancesResult purchaseScheduledInstances(PurchaseScheduledInstancesRequest purchaseScheduledInstancesRequest) { + return null; + } + @Override public void modifySnapshotAttribute(ModifySnapshotAttributeRequest modifySnapshotAttributeRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1334,6 +1402,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public ReleaseHostsResult releaseHosts(ReleaseHostsRequest releaseHostsRequest) { + return null; + } + @Override public void resetInstanceAttribute(ResetInstanceAttributeRequest resetInstanceAttributeRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1344,6 +1417,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public CreateNatGatewayResult createNatGateway(CreateNatGatewayRequest createNatGatewayRequest) { + return null; + } + @Override public void replaceNetworkAclEntry(ReplaceNetworkAclEntryRequest replaceNetworkAclEntryRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1374,6 +1452,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public EnableVpcClassicLinkDnsSupportResult enableVpcClassicLinkDnsSupport(EnableVpcClassicLinkDnsSupportRequest enableVpcClassicLinkDnsSupportRequest) { + return null; + } + @Override public void createVpnConnectionRoute(CreateVpnConnectionRouteRequest createVpnConnectionRouteRequest) throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1489,6 +1572,16 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public DescribeScheduledInstanceAvailabilityResult describeScheduledInstanceAvailability(DescribeScheduledInstanceAvailabilityRequest describeScheduledInstanceAvailabilityRequest) { + return null; + } + + @Override + public DescribeScheduledInstancesResult describeScheduledInstances(DescribeScheduledInstancesRequest describeScheduledInstancesRequest) { + return null; + } + @Override public DescribeDhcpOptionsResult describeDhcpOptions() throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1529,6 +1622,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public DescribeNatGatewaysResult describeNatGateways(DescribeNatGatewaysRequest describeNatGatewaysRequest) { + return null; + } + @Override public DescribeConversionTasksResult describeConversionTasks() throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1544,6 +1642,26 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public DescribeHostsResult describeHosts(DescribeHostsRequest describeHostsRequest) { + return null; + } + + @Override + public DescribeHostsResult describeHosts() { + return null; + } + + @Override + public DescribeIdFormatResult describeIdFormat(DescribeIdFormatRequest describeIdFormatRequest) { + return null; + } + + @Override + public DescribeIdFormatResult describeIdFormat() { + return null; + } + @Override public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections() throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1584,6 +1702,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public DescribeVpcClassicLinkDnsSupportResult describeVpcClassicLinkDnsSupport(DescribeVpcClassicLinkDnsSupportRequest describeVpcClassicLinkDnsSupportRequest) { + return null; + } + @Override public DescribeClassicLinkInstancesResult describeClassicLinkInstances() throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); @@ -1649,6 +1772,11 @@ public class AmazonEC2Mock implements AmazonEC2 { throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public AllocateHostsResult allocateHosts(AllocateHostsRequest allocateHostsRequest) { + return null; + } + @Override public DescribeSnapshotsResult describeSnapshots() throws AmazonServiceException, AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index a083309891e..d01ebd29c12 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -23,7 +23,7 @@ esplugin { } versions << [ - 'aws': '1.10.33' + 'aws': '1.10.69' ] dependencies { @@ -49,11 +49,6 @@ test { } thirdPartyAudit.excludes = [ - // uses internal java api: com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl - // uses internal java api: com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault - // uses internal java api: com.sun.org.apache.xpath.internal.XPathContext - 'com.amazonaws.util.XpathUtils', - // classes are missing 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', @@ -61,3 +56,6 @@ thirdPartyAudit.excludes = [ 'org.apache.log.Hierarchy', 'org.apache.log.Logger', ] + +// AWS SDK is exposing some deprecated methods which we call using a delegate +compileTestJava.options.compilerArgs << "-Xlint:-deprecation" diff --git a/plugins/repository-s3/licenses/aws-java-sdk-core-1.10.33.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-core-1.10.33.jar.sha1 deleted file mode 100644 index 332a8f01035..00000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-core-1.10.33.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fabedbbe2b834b1add150b6a38395c5ef7380168 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-core-1.10.69.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-core-1.10.69.jar.sha1 new file mode 100644 index 00000000000..2971a33d7d9 --- /dev/null +++ b/plugins/repository-s3/licenses/aws-java-sdk-core-1.10.69.jar.sha1 @@ -0,0 +1 @@ +a1f02d5f26ba1d8c37e2bf9c847db3c6729dda00 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.33.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.33.jar.sha1 deleted file mode 100644 index 0d7ab9f8381..00000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.33.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35881245894ecc4d893c074eacdf2e6b56820fda \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.69.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.69.jar.sha1 new file mode 100644 index 00000000000..22e1f924664 --- /dev/null +++ b/plugins/repository-s3/licenses/aws-java-sdk-kms-1.10.69.jar.sha1 @@ -0,0 +1 @@ +ed74ff3872193b4704a751f0b72ab2cf0db0651b \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.33.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.33.jar.sha1 deleted file mode 100644 index 3328f01c658..00000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.33.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5665cf77102a932a16e99ebf41d197e03ddbf25c \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.69.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.69.jar.sha1 new file mode 100644 index 00000000000..64e7336c2bc --- /dev/null +++ b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.10.69.jar.sha1 @@ -0,0 +1 @@ +6fa48bf0bff43f26436956b88d8d3764b6cf109e \ No newline at end of file diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java index 97829f9d689..7cda9ee0947 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AmazonS3Wrapper.java @@ -50,6 +50,7 @@ import com.amazonaws.services.s3.model.CreateBucketRequest; import com.amazonaws.services.s3.model.DeleteBucketCrossOriginConfigurationRequest; import com.amazonaws.services.s3.model.DeleteBucketLifecycleConfigurationRequest; import com.amazonaws.services.s3.model.DeleteBucketPolicyRequest; +import com.amazonaws.services.s3.model.DeleteBucketReplicationConfigurationRequest; import com.amazonaws.services.s3.model.DeleteBucketRequest; import com.amazonaws.services.s3.model.DeleteBucketTaggingConfigurationRequest; import com.amazonaws.services.s3.model.DeleteBucketWebsiteConfigurationRequest; @@ -69,6 +70,7 @@ import com.amazonaws.services.s3.model.GetBucketReplicationConfigurationRequest; import com.amazonaws.services.s3.model.GetBucketTaggingConfigurationRequest; import com.amazonaws.services.s3.model.GetBucketVersioningConfigurationRequest; import com.amazonaws.services.s3.model.GetBucketWebsiteConfigurationRequest; +import com.amazonaws.services.s3.model.GetObjectAclRequest; import com.amazonaws.services.s3.model.GetObjectMetadataRequest; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.GetS3AccountOwnerRequest; @@ -253,6 +255,11 @@ public class AmazonS3Wrapper implements AmazonS3 { return delegate.getObjectAcl(bucketName, key, versionId); } + @Override + public AccessControlList getObjectAcl(GetObjectAclRequest getObjectAclRequest) throws AmazonClientException, AmazonServiceException { + return delegate.getObjectAcl(getObjectAclRequest); + } + @Override public void setObjectAcl(String bucketName, String key, AccessControlList acl) throws AmazonClientException, AmazonServiceException { delegate.setObjectAcl(bucketName, key, acl); @@ -358,6 +365,17 @@ public class AmazonS3Wrapper implements AmazonS3 { delegate.deleteBucketReplicationConfiguration(bucketName); } + @Override + public void deleteBucketReplicationConfiguration(DeleteBucketReplicationConfigurationRequest request) throws AmazonServiceException, + AmazonClientException { + delegate.deleteBucketReplicationConfiguration(request); + } + + @Override + public boolean doesObjectExist(String bucketName, String objectName) throws AmazonServiceException, AmazonClientException { + return delegate.doesObjectExist(bucketName, objectName); + } + @Override public PutObjectResult putObject(PutObjectRequest putObjectRequest) throws AmazonClientException, AmazonServiceException { return delegate.putObject(putObjectRequest); From 44080a007f08a90d7eb266429a3826b074ab88bf Mon Sep 17 00:00:00 2001 From: David Pilato Date: Fri, 15 Apr 2016 14:52:27 +0200 Subject: [PATCH 0004/1311] Add cloud.aws.s3.throttle_retries setting Defaults to `true`. If anyone is having trouble with this option, you could disable it with `cloud.aws.s3.throttle_retries: false` in `elasticsearch.yml` file. --- .../main/java/org/elasticsearch/cloud/aws/AwsS3Service.java | 4 ++++ .../org/elasticsearch/cloud/aws/InternalAwsS3Service.java | 1 + .../plugin/repository/s3/S3RepositoryPlugin.java | 1 + 3 files changed, 6 insertions(+) diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index 427c454fa28..383be6825c2 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -151,6 +151,10 @@ public interface AwsS3Service extends LifecycleComponent { * cloud.aws.s3.endpoint: Endpoint. If not set, endpoint will be guessed based on region setting. */ Setting ENDPOINT_SETTING = Setting.simpleString("cloud.aws.s3.endpoint", Property.NodeScope); + /** + * cloud.aws.s3.throttle_retries: Set to `true` if you want to throttle retries. Defaults to `true`. + */ + Setting THROTTLE_RETRIES_SETTING = Setting.boolSetting("cloud.aws.s3.throttle_retries", true, Property.NodeScope); } AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 81b6463a746..67fae30657d 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -101,6 +101,7 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent Date: Wed, 20 Apr 2016 19:10:01 +0300 Subject: [PATCH 0005/1311] Row-centric output for _cat/fielddata --- .../rest/action/cat/RestFielddataAction.java | 63 +++++-------------- docs/reference/cat/fielddata.asciidoc | 33 +++++----- .../test/cat.fielddata/10_basic.yaml | 37 ++++++----- 3 files changed, 54 insertions(+), 79 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java index 46e75ae7de5..8febf04e892 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java @@ -19,8 +19,7 @@ package org.elasticsearch.rest.action.cat; -import com.carrotsearch.hppc.ObjectLongHashMap; -import com.carrotsearch.hppc.ObjectLongMap; +import com.carrotsearch.hppc.cursors.ObjectLongCursor; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -36,11 +35,6 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import static org.elasticsearch.rest.RestRequest.Method.GET; /** @@ -57,7 +51,6 @@ public class RestFielddataAction extends AbstractCatAction { @Override protected void doRequest(final RestRequest request, final RestChannel channel, final Client client) { - final NodesStatsRequest nodesStatsRequest = new NodesStatsRequest("data:true"); nodesStatsRequest.clear(); nodesStatsRequest.indices(true); @@ -86,56 +79,30 @@ public class RestFielddataAction extends AbstractCatAction { .addCell("host", "alias:h;desc:host name") .addCell("ip", "desc:ip address") .addCell("node", "alias:n;desc:node name") - .addCell("total", "text-align:right;desc:total field data usage") + .addCell("field", "alias:f;desc:field name") + .addCell("size", "text-align:right;alias:s;desc:field data usage") .endHeaders(); return table; } private Table buildTable(final RestRequest request, final NodesStatsResponse nodeStatses) { - Set fieldNames = new HashSet<>(); - Map> nodesFields = new HashMap<>(); + Table table = getTableWithHeader(request); - // Collect all the field names so a new table can be built - for (NodeStats ns : nodeStatses.getNodes()) { - ObjectLongHashMap fields = ns.getIndices().getFieldData().getFields(); - nodesFields.put(ns, fields); - if (fields != null) { - for (String key : fields.keys().toArray(String.class)) { - fieldNames.add(key); + for (NodeStats nodeStats: nodeStatses.getNodes()) { + if (nodeStats.getIndices().getFieldData().getFields() != null) { + for (ObjectLongCursor cursor : nodeStats.getIndices().getFieldData().getFields()) { + table.startRow(); + table.addCell(nodeStats.getNode().getId()); + table.addCell(nodeStats.getNode().getHostName()); + table.addCell(nodeStats.getNode().getHostAddress()); + table.addCell(nodeStats.getNode().getName()); + table.addCell(cursor.key); + table.addCell(new ByteSizeValue(cursor.value)); + table.endRow(); } } } - // The table must be rebuilt because it has dynamic headers based on the fields - Table table = new Table(); - table.startHeaders() - .addCell("id", "desc:node id") - .addCell("host", "alias:h;desc:host name") - .addCell("ip", "desc:ip address") - .addCell("node", "alias:n;desc:node name") - .addCell("total", "text-align:right;desc:total field data usage"); - // The table columns must be built dynamically since the number of fields is unknown - for (String fieldName : fieldNames) { - table.addCell(fieldName, "text-align:right;desc:" + fieldName + " field"); - } - table.endHeaders(); - - for (Map.Entry> statsEntry : nodesFields.entrySet()) { - table.startRow(); - // add the node info and field data total before each individual field - NodeStats ns = statsEntry.getKey(); - table.addCell(ns.getNode().getId()); - table.addCell(ns.getNode().getHostName()); - table.addCell(ns.getNode().getHostAddress()); - table.addCell(ns.getNode().getName()); - table.addCell(ns.getIndices().getFieldData().getMemorySize()); - ObjectLongMap fields = statsEntry.getValue(); - for (String fieldName : fieldNames) { - table.addCell(new ByteSizeValue(fields == null ? 0L : fields.getOrDefault(fieldName, 0L))); - } - table.endRow(); - } - return table; } } diff --git a/docs/reference/cat/fielddata.asciidoc b/docs/reference/cat/fielddata.asciidoc index 250d0b39eaf..854853e4d39 100644 --- a/docs/reference/cat/fielddata.asciidoc +++ b/docs/reference/cat/fielddata.asciidoc @@ -7,10 +7,13 @@ on every data node in the cluster. [source,sh] -------------------------------------------------- % curl '192.168.56.10:9200/_cat/fielddata?v' -id host ip node total body text -c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones 385.6kb 159.8kb 225.7kb -waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary 435.2kb 159.8kb 275.3kb -yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip 284.6kb 109.2kb 175.3kb +id host ip node field size +c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones body 159.8kb +c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones text 225.7kb +waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary body 159.8kb +waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary text 275.3kb +yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip body 109.2kb +yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip text 175.3kb -------------------------------------------------- Fields can be specified either as a query parameter, or in the URL path: @@ -18,17 +21,19 @@ Fields can be specified either as a query parameter, or in the URL path: [source,sh] -------------------------------------------------- % curl '192.168.56.10:9200/_cat/fielddata?v&fields=body' -id host ip node total body -c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones 385.6kb 159.8kb -waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary 435.2kb 159.8kb -yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip 284.6kb 109.2kb +id host ip node field size +c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones body 159.8kb +waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary body 159.8kb +yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip body 109.2kb % curl '192.168.56.10:9200/_cat/fielddata/body,text?v' -id host ip node total body text -c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones 385.6kb 159.8kb 225.7kb -waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary 435.2kb 159.8kb 275.3kb -yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip 284.6kb 109.2kb 175.3kb +id host ip node field size +c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones body 159.8kb +c223lARiSGeezlbrcugAYQ myhost1 10.20.100.200 Jessica Jones text 225.7kb +waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary body 159.8kb +waPCbitNQaCL6xC8VxjAwg myhost2 10.20.100.201 Adversary text 275.3kb +yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip body 109.2kb +yaDkp-G3R0q1AJ-HUEvkSQ myhost3 10.20.100.202 Microchip text 175.3kb -------------------------------------------------- -The output shows the total fielddata and then the individual fielddata for the -`body` and `text` fields. +The output shows the individual fielddata for the`body` and `text` fields, one row per field per node. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml index cd3a53a103c..363ff9b477c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml @@ -10,7 +10,8 @@ host .+ \n ip .+ \n node .+ \n - total .+ \n + field .+ \n + size .+ \n $/ --- @@ -38,39 +39,41 @@ type: type body: { foo: bar } refresh: true + - do: search: index: index body: query: { match_all: {} } sort: foo + - do: cat.fielddata: - h: total + h: field,size v: true - match: $body: | - /^ total \n - (\s*\d+(\.\d+)?[gmk]?b \n)+ $/ + /^ field \s+ size \n + foo \s+ (\d+(\.\d+)?[gmk]?b \n)+ $/ - do: cat.fielddata: - h: total,foo - v: true - - - match: - $body: | - /^ total \s+ foo \n - (\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/ - - - do: - cat.fielddata: - h: total,foo + h: field,size fields: notfoo,foo v: true - match: $body: | - /^ total \s+ foo \n - (\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/ + /^ field \s+ size \n + foo \s+ (\d+(\.\d+)?[gmk]?b \n)+ $/ + + - do: + cat.fielddata: + h: field,size + fields: notfoo + v: true + + - match: + $body: | + /^ field \s+ size \n $/ From 6ef81c5dcdad0a65c350379dd12daadcc7ffaae5 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 27 Apr 2016 14:11:17 +0200 Subject: [PATCH 0006/1311] S3 repositories credentials should be filtered When working on #18008 I found while reading the code that we don't filter anymore `repositories.s3.access_key` and `repositories.s3.secret_key`. Also fixed a typo in REST test --- .../java/org/elasticsearch/repositories/s3/S3Repository.java | 4 ++-- .../rest-api-spec/test/repository_s3/20_repository.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index a09d57ebc93..8fef5f8e893 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -67,13 +67,13 @@ public class S3Repository extends BlobStoreRepository { * @see CLOUD_S3#KEY_SETTING */ Setting KEY_SETTING = - new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), Property.NodeScope); + new Setting<>("repositories.s3.access_key", CLOUD_S3.KEY_SETTING, Function.identity(), Property.NodeScope, Property.Filtered); /** * repositories.s3.secret_key: AWS Secret key specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.secret_key. * @see CLOUD_S3#SECRET_SETTING */ Setting SECRET_SETTING = - new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), Property.NodeScope); + new Setting<>("repositories.s3.secret_key", CLOUD_S3.SECRET_SETTING, Function.identity(), Property.NodeScope, Property.Filtered); /** * repositories.s3.region: Region specific for all S3 Repositories API calls. Defaults to cloud.aws.s3.region. * @see CLOUD_S3#REGION_SETTING diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/20_repository.yaml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/20_repository.yaml index 34384653a74..eb909f91f1d 100644 --- a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/20_repository.yaml +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/20_repository.yaml @@ -1,6 +1,6 @@ # Integration tests for Repository S3 component # -"S3 repository can be registereed": +"S3 repository can be registered": - do: snapshot.create_repository: repository: test_repo_s3_1 From 78ff4f52d605c109efd60039b9ef600a072c24ff Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 12:10:56 +0200 Subject: [PATCH 0007/1311] Introduces GeoValidationMethod to GeoDistanceSortBuilder Previously like in other geo related query parsers we were using a combination of two booleans for coerce and ignore_malformed which was error prone and not very clear. Switched to using GeoValidationMethod instead as we already do e.g. in GeoBoundingBoxQueryBuilder. Left support for both, coerce and ignore_malformed in the parser but deprecated the two in favour of validation method. Introduced the same deprecation in geo bounding box query builder. --- .../query/GeoBoundingBoxQueryBuilder.java | 6 +- .../search/sort/GeoDistanceSortBuilder.java | 102 +++++++++--------- .../search/sort/GeoDistanceSortBuilderIT.java | 3 +- .../sort/GeoDistanceSortBuilderTests.java | 74 ++++++++++--- 4 files changed, 114 insertions(+), 71 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index db743b971c7..7f5d5e2ec8d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -64,10 +64,12 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "_geo_distance"; public static final String ALTERNATIVE_NAME = "_geoDistance"; - public static final boolean DEFAULT_COERCE = false; - public static final boolean DEFAULT_IGNORE_MALFORMED = false; - public static final ParseField UNIT_FIELD = new ParseField("unit"); - public static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type"); - public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize"); - public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed"); - public static final ParseField SORTMODE_FIELD = new ParseField("mode", "sort_mode"); - public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path"); - public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter"); - public static final ParseField REVERSE_FORBIDDEN = new ParseField("reverse"); + public static final GeoValidationMethod DEFAULT_VALIDATION = GeoValidationMethod.DEFAULT; + + private static final ParseField UNIT_FIELD = new ParseField("unit"); + private static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type"); + private static final ParseField VALIDATION_METHOD_FIELD = new ParseField("validation_method"); + private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed") + .withAllDeprecated("use validation_method instead"); + private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize") + .withAllDeprecated("use validation_method instead"); + private static final ParseField SORTMODE_FIELD = new ParseField("mode", "sort_mode"); + private static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path"); + private static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter"); private final String fieldName; private final List points = new ArrayList<>(); @@ -87,9 +90,7 @@ public class GeoDistanceSortBuilder extends SortBuilder private QueryBuilder nestedFilter; private String nestedPath; - // TODO switch to GeoValidationMethod enum - private boolean coerce = DEFAULT_COERCE; - private boolean ignoreMalformed = DEFAULT_IGNORE_MALFORMED; + private GeoValidationMethod validation = DEFAULT_VALIDATION; /** * Constructs a new distance based sort on a geo point like field. @@ -144,8 +145,7 @@ public class GeoDistanceSortBuilder extends SortBuilder this.sortMode = original.sortMode; this.nestedFilter = original.nestedFilter; this.nestedPath = original.nestedPath; - this.coerce = original.coerce; - this.ignoreMalformed = original.ignoreMalformed; + this.validation = original.validation; } /** @@ -161,8 +161,7 @@ public class GeoDistanceSortBuilder extends SortBuilder sortMode = in.readOptionalWriteable(SortMode::readFromStream); nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nestedPath = in.readOptionalString(); - coerce = in.readBoolean(); - ignoreMalformed =in.readBoolean(); + validation = GeoValidationMethod.readFromStream(in); } @Override @@ -175,8 +174,7 @@ public class GeoDistanceSortBuilder extends SortBuilder out.writeOptionalWriteable(sortMode); out.writeOptionalNamedWriteable(nestedFilter); out.writeOptionalString(nestedPath); - out.writeBoolean(coerce); - out.writeBoolean(ignoreMalformed); + validation.writeTo(out); } /** @@ -257,6 +255,21 @@ public class GeoDistanceSortBuilder extends SortBuilder return this.unit; } + /** + * Sets validation method for this sort builder. + */ + public GeoDistanceSortBuilder validation(GeoValidationMethod method) { + this.validation = method; + return this; + } + + /** + * Returns the validation method to use for this sort builder. + */ + public GeoValidationMethod validation() { + return validation; + } + /** * Defines which distance to use for sorting in the case a document contains multiple geo points. * Possible values: min and max @@ -309,26 +322,6 @@ public class GeoDistanceSortBuilder extends SortBuilder return this.nestedPath; } - public GeoDistanceSortBuilder coerce(boolean coerce) { - this.coerce = coerce; - return this; - } - - public boolean coerce() { - return this.coerce; - } - - public GeoDistanceSortBuilder ignoreMalformed(boolean ignoreMalformed) { - if (coerce == false) { - this.ignoreMalformed = ignoreMalformed; - } - return this; - } - - public boolean ignoreMalformed() { - return this.ignoreMalformed; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -354,8 +347,7 @@ public class GeoDistanceSortBuilder extends SortBuilder if (nestedFilter != null) { builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, params); } - builder.field(COERCE_FIELD.getPreferredName(), coerce); - builder.field(IGNORE_MALFORMED_FIELD.getPreferredName(), ignoreMalformed); + builder.field(VALIDATION_METHOD_FIELD.getPreferredName(), validation); builder.endObject(); builder.endObject(); @@ -386,14 +378,14 @@ public class GeoDistanceSortBuilder extends SortBuilder Objects.equals(order, other.order) && Objects.equals(nestedFilter, other.nestedFilter) && Objects.equals(nestedPath, other.nestedPath) && - Objects.equals(coerce, other.coerce) && - Objects.equals(ignoreMalformed, other.ignoreMalformed); + Objects.equals(validation, other.validation); } @Override public int hashCode() { return Objects.hash(this.fieldName, this.points, this.geoDistance, - this.unit, this.sortMode, this.order, this.nestedFilter, this.nestedPath, this.coerce, this.ignoreMalformed); + this.unit, this.sortMode, this.order, this.nestedFilter, + this.nestedPath, this.validation); } /** @@ -417,8 +409,9 @@ public class GeoDistanceSortBuilder extends SortBuilder QueryBuilder nestedFilter = null; String nestedPath = null; - boolean coerce = GeoDistanceSortBuilder.DEFAULT_COERCE; - boolean ignoreMalformed = GeoDistanceSortBuilder.DEFAULT_IGNORE_MALFORMED; + boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + GeoValidationMethod validation = null; XContentParser.Token token; String currentName = parser.currentName(); @@ -463,6 +456,8 @@ public class GeoDistanceSortBuilder extends SortBuilder if (coerce == false) { ignoreMalformed = ignore_malformed_value; } + } else if (parseFieldMatcher.match(currentName, VALIDATION_METHOD_FIELD)) { + validation = GeoValidationMethod.fromString(parser.text()); } else if (parseFieldMatcher.match(currentName, SORTMODE_FIELD)) { sortMode = SortMode.fromString(parser.text()); } else if (parseFieldMatcher.match(currentName, NESTED_PATH_FIELD)) { @@ -498,8 +493,13 @@ public class GeoDistanceSortBuilder extends SortBuilder } result.setNestedFilter(nestedFilter); result.setNestedPath(nestedPath); - result.coerce(coerce); - result.ignoreMalformed(ignoreMalformed); + if (validation == null) { + // looks like either validation was left unset or we are parsing old validation json + result.validation(GeoValidationMethod.infer(coerce, ignoreMalformed)); + } else { + // ignore deprecated coerce/ignore_malformed + result.validation(validation); + } return result; } @@ -512,7 +512,7 @@ public class GeoDistanceSortBuilder extends SortBuilder localPoints.add(new GeoPoint(geoPoint)); } - if (!indexCreatedBeforeV2_0 && !ignoreMalformed) { + if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validation)) { for (GeoPoint point : localPoints) { if (GeoUtils.isValidLatitude(point.lat()) == false) { throw new ElasticsearchParseException( @@ -529,9 +529,9 @@ public class GeoDistanceSortBuilder extends SortBuilder } } - if (coerce) { + if (GeoValidationMethod.isCoerce(validation)) { for (GeoPoint point : localPoints) { - GeoUtils.normalizePoint(point, coerce, coerce); + GeoUtils.normalizePoint(point, true, true); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 31c1d5793e0..5229c905bf8 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -314,7 +315,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { .setSource( new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0) .unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE) - .ignoreMalformed(true).coerce(true))).execute().actionGet(); + .validation(GeoValidationMethod.COERCE))).execute().actionGet(); checkCorrectSortOrderForGeoSort(searchResponse); } diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 87fd183f1ce..f65050ca9ce 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; +import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.test.geo.RandomGeoGenerator; @@ -94,10 +95,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase GeoDistanceSortBuilder.fromXContent(context, "")); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + + } + + public void testIgnoreMalformedIsDeprecated() throws IOException { + String json = "{\n" + + " \"testname\" : [ {\n" + + " \"lat\" : -6.046997540714173,\n" + + " \"lon\" : -51.94128329747579\n" + + " } ],\n" + + " \"unit\" : \"m\",\n" + + " \"distance_type\" : \"sloppy_arc\",\n" + + " \"mode\" : \"SUM\",\n" + + " \"ignore_malformed\" : true\n" + + "}"; + XContentParser itemParser = XContentHelper.createParser(new BytesArray(json)); + itemParser.nextToken(); + + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(context, "")); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + + } public void testSortModeSumIsRejectedInJSON() throws IOException { String json = "{\n" + @@ -279,9 +322,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase Date: Thu, 28 Apr 2016 13:56:50 +0200 Subject: [PATCH 0008/1311] Deprecate coerce/ignore_malformed for GeoPolygonQueryBuilder Includes update to parsing code, tests, migration docs and reference docs. --- .../index/query/GeoPolygonQueryBuilder.java | 10 ++--- .../query/GeoPolygonQueryBuilderTests.java | 37 +++++++++++++++++-- .../migration/migrate_5_0/search.asciidoc | 1 + .../query-dsl/geo-polygon-query.asciidoc | 5 ++- 4 files changed, 43 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index d415c87b5ab..35ab98e25de 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -54,8 +54,10 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + + } + + public void testFromJsonCoerceDeprecated() throws IOException { + String json = + "{\n" + + " \"geo_polygon\" : {\n" + + " \"person.location\" : {\n" + + " \"points\" : [ [ -70.0, 40.0 ], [ -80.0, 30.0 ], [ -90.0, 20.0 ], [ -70.0, 40.0 ] ]\n" + + " },\n" + + " \"coerce\" : false,\n" + + " \"ignore_unmapped\" : false,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + } + @Override public void testMustRewrite() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index 2d22ee1c1c3..a2c56996c2b 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -126,6 +126,7 @@ in favour of `query` and `no_match_query`. * The `exists` query will now fail if the `_field_names` field is disabled. +* Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoPolygonQuery. Use parameter validation_method instead. ==== Top level `filter` parameter diff --git a/docs/reference/query-dsl/geo-polygon-query.asciidoc b/docs/reference/query-dsl/geo-polygon-query.asciidoc index 938a0b3dc87..ef80de82079 100644 --- a/docs/reference/query-dsl/geo-polygon-query.asciidoc +++ b/docs/reference/query-dsl/geo-polygon-query.asciidoc @@ -34,8 +34,9 @@ points. Here is an example: |Option |Description |`_name` |Optional name field to identify the filter -|`ignore_malformed` |Set to `true` to accept geo points with invalid latitude or -longitude (default is `false`). +|`validation_method` |Set to `IGNORE_MALFORMED` to accept geo points with +invalid latitude or longitude, `COERCE` to try and infer correct latitude +or longitude, or `STRICT` (default is `STRICT`). |======================================================================= [float] From 3160798084f61e4c613e1bf87f12708a270a8a6a Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 14:01:54 +0200 Subject: [PATCH 0009/1311] Deprecate coerce/ignore_malformed for GeoDistanceRangeQuery --- .../query/GeoDistanceRangeQueryBuilder.java | 6 ++- .../query/GeoDistanceRangeQueryTests.java | 42 +++++++++++++++++++ .../migration/migrate_5_0/search.asciidoc | 1 + 3 files changed, 47 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java index c23ee423a55..7afbecad4d3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java @@ -78,8 +78,10 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + } + + public void testFromJsonIgnoreMalformedFails() throws IOException { + String json = + "{\n" + + " \"geo_distance_range\" : {\n" + + " \"pin.location\" : [ -70.0, 40.0 ],\n" + + " \"from\" : \"200km\",\n" + + " \"to\" : \"400km\",\n" + + " \"include_lower\" : true,\n" + + " \"include_upper\" : true,\n" + + " \"unit\" : \"m\",\n" + + " \"distance_type\" : \"sloppy_arc\",\n" + + " \"optimize_bbox\" : \"memory\",\n" + + " \"ignore_malformed\" : true,\n" + + " \"ignore_unmapped\" : false,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + } + @Override public void testMustRewrite() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index a2c56996c2b..58435256abb 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -128,6 +128,7 @@ in favour of `query` and `no_match_query`. * Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoPolygonQuery. Use parameter validation_method instead. +* Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoDistanceRangeQuery. Use parameter validation_method instead. ==== Top level `filter` parameter Removed support for the deprecated top level `filter` in the search api, From 3f743a30cfba4aa27de5a468bbbb920fa3a3b800 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 14:06:27 +0200 Subject: [PATCH 0010/1311] Deprecate coerce/ignore_malformed in GeoDistanceQueryBuilder --- .../index/query/GeoDistanceQueryBuilder.java | 6 ++-- .../query/GeoDistanceQueryBuilderTests.java | 34 +++++++++++++++++++ .../migration/migrate_5_0/search.asciidoc | 2 ++ .../query-dsl/geo-distance-query.asciidoc | 7 ++-- 4 files changed, 44 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index a422b6eb6f3..0f2b4694e27 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -73,8 +73,10 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + } + + public void testFromJsonIgnoreMalformedFails() throws IOException { + String json = + "{\n" + + " \"geo_distance\" : {\n" + + " \"pin.location\" : [ -70.0, 40.0 ],\n" + + " \"distance\" : 12000.0,\n" + + " \"distance_type\" : \"sloppy_arc\",\n" + + " \"optimize_bbox\" : \"memory\",\n" + + " \"ignore_malformed\" : true,\n" + + " \"ignore_unmapped\" : false,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + } + @Override public void testMustRewrite() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index 58435256abb..f5efde6403d 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -129,6 +129,8 @@ in favour of `query` and `no_match_query`. * Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoPolygonQuery. Use parameter validation_method instead. * Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoDistanceRangeQuery. Use parameter validation_method instead. + +* Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoDistanceQuery. Use parameter validation_method instead. ==== Top level `filter` parameter Removed support for the deprecated top level `filter` in the search api, diff --git a/docs/reference/query-dsl/geo-distance-query.asciidoc b/docs/reference/query-dsl/geo-distance-query.asciidoc index f053fd3a2dd..2b80241dfc5 100644 --- a/docs/reference/query-dsl/geo-distance-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-query.asciidoc @@ -162,10 +162,11 @@ The following are options allowed on the filter: Optional name field to identify the query -`ignore_malformed`:: +`validation_method`:: - Set to `true` to accept geo points with invalid latitude or - longitude (default is `false`). + Set to `IGNORE_MALFORMED` to accept geo points with invalid latitude or + longitude, set to `COERCE` to additionally try and infer correct + coordinates (default is `STRICT`). [float] ==== geo_point Type From a19c426e0f354f3a91bdd98d36ded091fa619054 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 14:10:59 +0200 Subject: [PATCH 0011/1311] Deprecate coerce/ignore_malformed for GeoBoundingBoxQuery --- .../GeoBoundingBoxQueryBuilderTests.java | 36 +++++++++++++++++++ .../migration/migrate_5_0/search.asciidoc | 2 ++ .../query-dsl/geo-bounding-box-query.asciidoc | 5 +-- 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index 558d8b0e3f1..8ba48839aba 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -505,6 +505,42 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + } + + public void testFromJsonIgnoreMalformedFails() throws IOException { + String json = + "{\n" + + " \"geo_bounding_box\" : {\n" + + " \"pin.location\" : {\n" + + " \"top_left\" : [ -74.1, 40.73 ],\n" + + " \"bottom_right\" : [ -71.12, 40.01 ]\n" + + " },\n" + + " \"ignore_malformed\" : true,\n" + + " \"type\" : \"MEMORY\",\n" + + " \"ignore_unmapped\" : false,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); + assertTrue(e.getMessage().startsWith("Deprecated field ")); + } + @Override public void testMustRewrite() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index f5efde6403d..f1892573f1d 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -131,6 +131,8 @@ in favour of `query` and `no_match_query`. * Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoDistanceRangeQuery. Use parameter validation_method instead. * Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoDistanceQuery. Use parameter validation_method instead. + +* Deprecated support for the coerce, normalize, ignore_malformed parameters in GeoBoundingBoxQuery. Use parameter validation_method instead. ==== Top level `filter` parameter Removed support for the deprecated top level `filter` in the search api, diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index 593086f2a1a..2630e6cf048 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -52,8 +52,9 @@ Then the following simple query can be executed with a |Option |Description |`_name` |Optional name field to identify the filter -|`ignore_malformed` |Set to `true` to -accept geo points with invalid latitude or longitude (default is `false`). +|`validation_method` |Set to `IGNORE_MALFORMED` to +accept geo points with invalid latitude or longitude, set to +`COERCE` to also try to infer correct latitude or longitude. (default is `STRICT`). |`type` |Set to one of `indexed` or `memory` to defines whether this filter will be executed in memory or indexed. See <> below for further details From c1fa9cd18e221357f7c21708ee631db72bef503e Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 14:13:58 +0200 Subject: [PATCH 0012/1311] Add note that coerce and ignore_malformed are deprecated for geo distance sorting --- docs/reference/migration/migrate_5_0/search.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index f1892573f1d..dac80ad05ed 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -155,6 +155,8 @@ vectors don't support distributed document frequencies anymore. The `reverse` parameter has been removed, in favour of explicitly specifying the sort order with the `order` option. +The `coerce` and `ignore_malformed` parameters were deprecated in favour of `validation_method`. + ==== Inner hits * The format of top level inner hits has been changed to be more readable. All options are now set on the same level. From 3ab006e42272507202dfaa0f00a92ede7f06bbc6 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 14:27:22 +0200 Subject: [PATCH 0013/1311] Switch to new random value supplier for test. --- .../sort/GeoDistanceSortBuilderTests.java | 39 +++++-------------- .../org/elasticsearch/test/ESTestCase.java | 11 ++++++ 2 files changed, 20 insertions(+), 30 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index f65050ca9ce..77d04d6cae2 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; @@ -80,13 +81,13 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(DistanceUnit.values()))); } if (randomBoolean()) { result.order(RandomSortDataGenerator.order(null)); } if (randomBoolean()) { - result.sortMode(mode(result.sortMode())); + result.sortMode(ESTestCase.randomValueOtherThan(SortMode.SUM, () -> randomFrom(SortMode.values()))); } if (randomBoolean()) { result.setNestedFilter(RandomSortDataGenerator.nestedFilter(result.getNestedFilter())); @@ -95,7 +96,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(GeoValidationMethod.values()))); } return result; @@ -108,30 +109,6 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(DistanceUnit.values()))); break; case 4: result.order(RandomSortDataGenerator.order(original.order())); break; case 5: - result.sortMode(mode(original.sortMode())); + result.sortMode(ESTestCase.randomValueOtherThanMany( + Arrays.asList(SortMode.SUM, result.sortMode()), + () -> randomFrom(SortMode.values()))); break; case 6: result.setNestedFilter(RandomSortDataGenerator.nestedFilter(original.getNestedFilter())); @@ -185,7 +164,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(GeoValidationMethod.values()))); break; } return result; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 8bbc95fe2ad..d8a3221c88f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -417,6 +417,17 @@ public abstract class ESTestCase extends LuceneTestCase { return randomValue; } + /** + * helper to get a random value in a certain range that's different from the input + */ + public static T randomValueOtherThanMany(Collection input, Supplier randomSupplier) { + T randomValue = null; + do { + randomValue = randomSupplier.get(); + } while (input.contains(randomValue)); + return randomValue; + } + /** * Runs the code block for 10 seconds waiting for no assertion to trip. */ From 47fefdd273f7784e9c166eedb2419011395a7574 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 14:45:56 +0200 Subject: [PATCH 0014/1311] Switch from separate sort_mode to more general randomValueOtherThan ... for sort tests only ... --- .../search/sort/FieldSortBuilderTests.java | 5 +++-- .../search/sort/RandomSortDataGenerator.java | 14 -------------- .../search/sort/ScriptSortBuilderTests.java | 7 ++++--- .../java/org/elasticsearch/test/ESTestCase.java | 11 +++++++++++ 4 files changed, 18 insertions(+), 19 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 5ca9aef658c..f209ebefdef 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -51,7 +52,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); } if (randomBoolean()) { @@ -77,7 +78,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); break; case 3: mutated.unmappedType(RandomSortDataGenerator.randomAscii(mutated.unmappedType())); diff --git a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java index a31de756bc9..3a40e9b56c6 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java @@ -63,20 +63,6 @@ public class RandomSortDataGenerator { return nestedPath; } - public static SortMode mode(SortMode original) { - Set set = new HashSet<>(); - set.add(original); - return mode(set); - } - - public static SortMode mode(Set except) { - SortMode mode = ESTestCase.randomFrom(SortMode.values()); - while (except.contains(mode)) { - mode = ESTestCase.randomFrom(SortMode.values()); - } - return mode; - } - public static Object missing(Object original) { Object missing = null; Object otherMissing = original; diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index a1a24632f09..85ee079ffe4 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; +import org.elasticsearch.test.ESTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; @@ -52,13 +53,13 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); } else { Set exceptThis = new HashSet<>(); exceptThis.add(SortMode.SUM); exceptThis.add(SortMode.AVG); exceptThis.add(SortMode.MEDIAN); - builder.sortMode(RandomSortDataGenerator.mode(exceptThis)); + builder.sortMode(ESTestCase.randomValueOtherThanMany(exceptThis, () -> randomFrom(SortMode.values()))); } } if (randomBoolean()) { @@ -101,7 +102,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); } else { // script sort type String only allows MIN and MAX, so we only switch if (original.sortMode() == SortMode.MIN) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 8bbc95fe2ad..d8a3221c88f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -417,6 +417,17 @@ public abstract class ESTestCase extends LuceneTestCase { return randomValue; } + /** + * helper to get a random value in a certain range that's different from the input + */ + public static T randomValueOtherThanMany(Collection input, Supplier randomSupplier) { + T randomValue = null; + do { + randomValue = randomSupplier.get(); + } while (input.contains(randomValue)); + return randomValue; + } + /** * Runs the code block for 10 seconds waiting for no assertion to trip. */ From 8dc0610eaa86b0b2bf5db0e6387fe98ded94bc55 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 15:07:55 +0200 Subject: [PATCH 0015/1311] Get rid of duplicated random ascii generation --- .../search/sort/FieldSortBuilderTests.java | 16 ++++++++++++---- .../search/sort/GeoDistanceSortBuilderTests.java | 10 ++++++++-- .../search/sort/RandomSortDataGenerator.java | 8 -------- .../search/sort/ScriptSortBuilderTests.java | 4 +++- 4 files changed, 23 insertions(+), 15 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index f209ebefdef..c557c714d0f 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -48,7 +48,9 @@ public class FieldSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); } if (randomBoolean()) { @@ -60,7 +62,9 @@ public class FieldSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); } return builder; @@ -72,7 +76,9 @@ public class FieldSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); break; case 1: mutated.setNestedFilter(RandomSortDataGenerator.nestedFilter(mutated.getNestedFilter())); @@ -81,7 +87,9 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); break; case 3: - mutated.unmappedType(RandomSortDataGenerator.randomAscii(mutated.unmappedType())); + mutated.unmappedType(ESTestCase.randomValueOtherThan( + mutated.unmappedType(), + () -> ESTestCase.randomAsciiOfLengthBetween(1, 10))); break; case 4: mutated.missing(RandomSortDataGenerator.missing(mutated.missing())); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 87fd183f1ce..a43f9c54bb3 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; @@ -91,7 +92,10 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); } if (randomBoolean()) { result.coerce(! result.coerce()); @@ -176,7 +180,9 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); break; case 8: result.coerce(! original.coerce()); diff --git a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java index 3a40e9b56c6..86a12f7ad84 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java @@ -55,14 +55,6 @@ public class RandomSortDataGenerator { return nested; } - public static String randomAscii(String original) { - String nestedPath = ESTestCase.randomAsciiOfLengthBetween(1, 10); - while (nestedPath.equals(original)) { - nestedPath = ESTestCase.randomAsciiOfLengthBetween(1, 10); - } - return nestedPath; - } - public static Object missing(Object original) { Object missing = null; Object otherMissing = original; diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 85ee079ffe4..b14b82aa092 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -66,7 +66,9 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); } return builder; } From 0c7e58de93c33d4d64073abd583978cd82a8e451 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 15:14:15 +0200 Subject: [PATCH 0016/1311] Get rid of duplicate random sort order generation --- .../search/sort/FieldSortBuilderTests.java | 4 ++-- .../search/sort/GeoDistanceSortBuilderTests.java | 4 ++-- .../search/sort/RandomSortDataGenerator.java | 15 --------------- .../search/sort/ScoreSortBuilderTests.java | 3 ++- .../search/sort/ScriptSortBuilderTests.java | 2 +- 5 files changed, 7 insertions(+), 21 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index c557c714d0f..ebfa3bbb76f 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -40,7 +40,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomFrom(SortOrder.values()))); break; default: throw new IllegalStateException("Unsupported mutation."); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index a43f9c54bb3..723e569bdb5 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -83,7 +83,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(SortOrder.values()))); break; case 5: result.sortMode(mode(original.sortMode())); diff --git a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java index 86a12f7ad84..bad5ff83f76 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java @@ -84,19 +84,4 @@ public class RandomSortDataGenerator { } return missing; } - - /** - * return a random {@link SortOrder} settings, except the one provided by parameter if set - */ - public static SortOrder order(@Nullable SortOrder original) { - if (original == null) { - return ESTestCase.randomBoolean() ? SortOrder.ASC : SortOrder.DESC; - } - if (original.equals(SortOrder.ASC)) { - return SortOrder.DESC; - } else { - return SortOrder.ASC; - } - } - } diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java index 142a88c4036..222240a9ac4 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.test.ESTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; @@ -45,7 +46,7 @@ public class ScoreSortBuilderTests extends AbstractSortTestCase randomFrom(SortOrder.values()))); return result; } diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index b14b82aa092..9637df30772 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -49,7 +49,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase Date: Thu, 28 Apr 2016 15:27:36 +0200 Subject: [PATCH 0017/1311] Get rid of no longer needed missing generation method --- .../search/sort/FieldSortBuilderTests.java | 15 +++++++-- .../search/sort/RandomSortDataGenerator.java | 33 ------------------- 2 files changed, 12 insertions(+), 36 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index ebfa3bbb76f..27f87681777 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -28,6 +28,8 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Arrays; +import java.util.List; public class FieldSortBuilderTests extends AbstractSortTestCase { @@ -36,7 +38,14 @@ public class FieldSortBuilderTests extends AbstractSortTestCase missingContent = Arrays.asList( + "_last", + "_first", + ESTestCase.randomAsciiOfLength(10), ESTestCase.randomUnicodeOfCodepointLengthBetween(5, 15), + ESTestCase.randomInt()); + + + public FieldSortBuilder randomFieldSortBuilder() { String fieldName = rarely() ? FieldSortBuilder.DOC_FIELD_NAME : randomAsciiOfLengthBetween(1, 10); FieldSortBuilder builder = new FieldSortBuilder(fieldName); if (randomBoolean()) { @@ -44,7 +53,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomFrom(missingContent))); } if (randomBoolean()) { @@ -92,7 +101,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); break; case 4: - mutated.missing(RandomSortDataGenerator.missing(mutated.missing())); + mutated.missing(ESTestCase.randomValueOtherThan(mutated.missing(), () -> randomFrom(missingContent))); break; case 5: mutated.order(ESTestCase.randomValueOtherThan(mutated.order(), () -> randomFrom(SortOrder.values()))); diff --git a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java index bad5ff83f76..43ff2428d81 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/sort/RandomSortDataGenerator.java @@ -19,16 +19,12 @@ package org.elasticsearch.search.sort; -import org.elasticsearch.common.Nullable; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.test.ESTestCase; -import java.util.HashSet; -import java.util.Set; - public class RandomSortDataGenerator { private RandomSortDataGenerator() { // this is a helper class only, doesn't need a constructor @@ -55,33 +51,4 @@ public class RandomSortDataGenerator { return nested; } - public static Object missing(Object original) { - Object missing = null; - Object otherMissing = original; - - while (missing == null || missing.equals(otherMissing)) { - int missingId = ESTestCase.randomIntBetween(0, 4); - switch (missingId) { - case 0: - missing = ("_last"); - break; - case 1: - missing = ("_first"); - break; - case 2: - missing = ESTestCase.randomAsciiOfLength(10); - break; - case 3: - missing = ESTestCase.randomUnicodeOfCodepointLengthBetween(5, 15); - break; - case 4: - missing = ESTestCase.randomInt(); - break; - default: - throw new IllegalStateException("Unknown missing type."); - - } - } - return missing; - } } From a03b131319dd815c826c211100db5efacf8a7cb4 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 28 Apr 2016 15:31:56 +0200 Subject: [PATCH 0018/1311] Shorten nested test query builder generation. --- .../search/sort/FieldSortBuilderTests.java | 4 +-- .../sort/GeoDistanceSortBuilderTests.java | 4 +-- ....java => NestedQueryBuilderGenerator.java} | 33 ++++++++----------- .../search/sort/ScriptSortBuilderTests.java | 4 +-- 4 files changed, 19 insertions(+), 26 deletions(-) rename core/src/test/java/org/elasticsearch/search/sort/{RandomSortDataGenerator.java => NestedQueryBuilderGenerator.java} (61%) diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 27f87681777..abcab363dcc 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -67,7 +67,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); break; case 1: - mutated.setNestedFilter(RandomSortDataGenerator.nestedFilter(mutated.getNestedFilter())); + mutated.setNestedFilter(NestedQueryBuilderGenerator.nestedFilter(mutated.getNestedFilter())); break; case 2: mutated.sortMode(ESTestCase.randomValueOtherThan(mutated.sortMode(), () -> randomFrom(SortMode.values()))); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 723e569bdb5..9f1b1a325ef 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -89,7 +89,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase> builders = Arrays.asList( + new MatchAllQueryBuilder(), + new IdsQueryBuilder(), + new TermQueryBuilder(ESTestCase.randomAsciiOfLengthBetween(1, 10), ESTestCase.randomDouble())); + + public static QueryBuilder nestedFilter(QueryBuilder original) { @SuppressWarnings("rawtypes") - QueryBuilder nested = null; - while (nested == null || nested.equals(original)) { - switch (ESTestCase.randomInt(2)) { - case 0: - nested = new MatchAllQueryBuilder(); - break; - case 1: - nested = new IdsQueryBuilder(); - break; - default: - case 2: - nested = new TermQueryBuilder(ESTestCase.randomAsciiOfLengthBetween(1, 10), ESTestCase.randomDouble()); - break; - } - nested.boost((float) ESTestCase.randomDoubleBetween(0, 10, false)); - } + QueryBuilder nested = ESTestCase.randomValueOtherThan(original, () -> ESTestCase.randomFrom(builders)); + nested.boost((float) ESTestCase.randomDoubleBetween(0, 10, false)); return nested; } - } diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 9637df30772..ba9d608f610 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -63,7 +63,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase Date: Fri, 29 Apr 2016 14:40:02 +0200 Subject: [PATCH 0019/1311] Add new UnsupportedException for EC Mock --- .../discovery/ec2/AmazonEC2Mock.java | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index 881cb98e1d1..49f4f885419 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -554,7 +554,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DeleteNatGatewayResult deleteNatGateway(DeleteNatGatewayRequest deleteNatGatewayRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -679,12 +679,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ModifyHostsResult modifyHosts(ModifyHostsRequest modifyHostsRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override public void modifyIdFormat(ModifyIdFormatRequest modifyIdFormatRequest) { - + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1014,7 +1014,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DisableVpcClassicLinkDnsSupportResult disableVpcClassicLinkDnsSupport(DisableVpcClassicLinkDnsSupportRequest disableVpcClassicLinkDnsSupportRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1044,7 +1044,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public RunScheduledInstancesResult runScheduledInstances(RunScheduledInstancesRequest runScheduledInstancesRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1084,7 +1084,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ModifyInstancePlacementResult modifyInstancePlacement(ModifyInstancePlacementRequest modifyInstancePlacementRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1224,7 +1224,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public PurchaseScheduledInstancesResult purchaseScheduledInstances(PurchaseScheduledInstancesRequest purchaseScheduledInstancesRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1404,7 +1404,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public ReleaseHostsResult releaseHosts(ReleaseHostsRequest releaseHostsRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1419,7 +1419,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public CreateNatGatewayResult createNatGateway(CreateNatGatewayRequest createNatGatewayRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1454,7 +1454,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public EnableVpcClassicLinkDnsSupportResult enableVpcClassicLinkDnsSupport(EnableVpcClassicLinkDnsSupportRequest enableVpcClassicLinkDnsSupportRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1574,12 +1574,12 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeScheduledInstanceAvailabilityResult describeScheduledInstanceAvailability(DescribeScheduledInstanceAvailabilityRequest describeScheduledInstanceAvailabilityRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeScheduledInstancesResult describeScheduledInstances(DescribeScheduledInstancesRequest describeScheduledInstancesRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1624,7 +1624,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeNatGatewaysResult describeNatGateways(DescribeNatGatewaysRequest describeNatGatewaysRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1644,22 +1644,22 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeHostsResult describeHosts(DescribeHostsRequest describeHostsRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeHostsResult describeHosts() { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeIdFormatResult describeIdFormat(DescribeIdFormatRequest describeIdFormatRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeIdFormatResult describeIdFormat() { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1704,7 +1704,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public DescribeVpcClassicLinkDnsSupportResult describeVpcClassicLinkDnsSupport(DescribeVpcClassicLinkDnsSupportRequest describeVpcClassicLinkDnsSupportRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override @@ -1774,7 +1774,7 @@ public class AmazonEC2Mock implements AmazonEC2 { @Override public AllocateHostsResult allocateHosts(AllocateHostsRequest allocateHostsRequest) { - return null; + throw new UnsupportedOperationException("Not supported in mock"); } @Override From 8c77399f1c56be28f31d47cc1f50aa5b2d8ef647 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 29 Apr 2016 15:47:03 +0200 Subject: [PATCH 0020/1311] test: changed test's expectation that a BooleanQuery is returned. A NoMatchDocsQuery is returned instead now. --- .../index/query/ExistsQueryBuilderTests.java | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index 6bac6392e14..d46cce420e1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -24,10 +24,12 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; import java.io.IOException; import java.util.Collection; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -56,9 +58,9 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase fields = context.simpleMatchToIndexNames(fieldPattern); if (getCurrentTypes().length == 0) { - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) query; - assertThat(booleanQuery.clauses().size(), equalTo(0)); + assertThat(query, instanceOf(MatchNoDocsQuery.class)); + MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query; + assertThat(matchNoDocsQuery.toString(null), containsString("Missing types in \"exists\" query.")); } else { assertThat(query, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; @@ -79,11 +81,11 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase Date: Fri, 29 Apr 2016 15:54:15 +0200 Subject: [PATCH 0021/1311] Add Azure discovery tests mocking Azure management endpoint (#18004) --- plugins/discovery-azure/build.gradle | 31 ++ .../azure/management/AzureComputeService.java | 26 +- .../management/AzureComputeServiceImpl.java | 28 +- .../azure/AzureUnicastHostsProvider.java | 21 +- .../discovery/azure/AzureDiscoveryPlugin.java | 3 + .../AzureDiscoveryClusterFormationTests.java | 285 ++++++++++++++++++ .../Ec2DiscoveryClusterFormationTests.java | 2 +- 7 files changed, 360 insertions(+), 36 deletions(-) create mode 100644 plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryClusterFormationTests.java diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index 1dd2aa26f23..ceddbc2ff44 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.LoggedExec + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -49,6 +51,35 @@ dependencies { compile 'org.codehaus.jackson:jackson-xc:1.9.2' } +// needed to be consistent with ssl host checking +String host = InetAddress.getLoopbackAddress().getHostAddress(); + +// location of keystore and files to generate it +File keystore = new File(project.buildDir, 'keystore/test-node.jks') + +// generate the keystore +task createKey(type: LoggedExec) { + doFirst { + project.delete(keystore.parentFile) + keystore.parentFile.mkdirs() + } + executable = 'keytool' + standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) + args '-genkey', + '-alias', 'test-node', + '-keystore', keystore, + '-keyalg', 'RSA', + '-keysize', '2048', + '-validity', '712', + '-dname', 'CN=' + host, + '-keypass', 'keypass', + '-storepass', 'keypass' +} + +// add keystore to test classpath: it expects it there +sourceSets.test.resources.srcDir(keystore.parentFile) +processTestResources.dependsOn(createKey) + dependencyLicenses { mapping from: /azure-.*/, to: 'azure' mapping from: /jackson-.*/, to: 'jackson' diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java index acc1e76bde4..526f98025b7 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java @@ -25,6 +25,11 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; +import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider.Deployment; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.function.Function; public interface AzureComputeService { @@ -43,19 +48,30 @@ public interface AzureComputeService { public static final Setting KEYSTORE_TYPE_SETTING = new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString, Property.NodeScope, Property.Filtered); + + // so that it can overridden for tests + public static final Setting ENDPOINT_SETTING = new Setting("cloud.azure.management.endpoint", + "https://management.core.windows.net/", s -> { + try { + return new URI(s); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); + } + }, Property.NodeScope); } final class Discovery { public static final Setting REFRESH_SETTING = Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), Property.NodeScope); - public static final Setting HOST_TYPE_SETTING = new Setting<>("discovery.azure.host.type", AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), AzureUnicastHostsProvider.HostType::fromString, Property.NodeScope); - - public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name"; - public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name"; - public static final String DEPLOYMENT_SLOT = "discovery.azure.deployment.slot"; + public static final Setting ENDPOINT_NAME_SETTING = new Setting<>("discovery.azure.endpoint.name", "elasticsearch", + Function.identity(), Property.NodeScope); + public static final Setting DEPLOYMENT_NAME_SETTING = Setting.simpleString("discovery.azure.deployment.name", + Property.NodeScope); + public static final Setting DEPLOYMENT_SLOT_SETTING = new Setting<>("discovery.azure.deployment.slot", + Deployment.PRODUCTION.name(), Deployment::fromString, Property.NodeScope); } HostedServiceGetDetailedResponse getServiceDetails(); diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java index 04b4f32ea92..0764ec99c12 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java @@ -33,8 +33,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; /** * @@ -42,10 +40,6 @@ import java.net.URISyntaxException; public class AzureComputeServiceImpl extends AbstractLifecycleComponent implements AzureComputeService { - static final class Azure { - private static final String ENDPOINT = "https://management.core.windows.net/"; - } - private final ComputeManagementClient computeManagementClient; private final String serviceName; @@ -59,18 +53,18 @@ public class AzureComputeServiceImpl extends AbstractLifecycleComponent> nodePlugins() { + return pluginList(AzureDiscoveryPlugin.class, TestPlugin.class); + } + + private static Path keyStoreFile; + + @BeforeClass + public static void setupKeyStore() throws IOException { + Path tempDir = createTempDir(); + keyStoreFile = tempDir.resolve("test-node.jks"); + try (InputStream stream = AzureDiscoveryClusterFormationTests.class.getResourceAsStream("/test-node.jks")) { + assertNotNull("can't find keystore file", stream); + Files.copy(stream, keyStoreFile); + } + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + Path resolve = logDir.resolve(Integer.toString(nodeOrdinal)); + try { + Files.createDirectory(resolve); + } catch (IOException e) { + throw new RuntimeException(e); + } + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), AzureDiscoveryPlugin.AZURE) + .put(Environment.PATH_LOGS_SETTING.getKey(), resolve) + .put(TransportSettings.PORT.getKey(), 0) + .put(Node.WRITE_PORTS_FIELD_SETTING.getKey(), "true") + .put(AzureComputeService.Management.ENDPOINT_SETTING.getKey(), "https://" + InetAddress.getLoopbackAddress().getHostAddress() + + ":" + httpsServer.getAddress().getPort()) + .put(Environment.PATH_CONF_SETTING.getKey(), keyStoreFile.getParent().toAbsolutePath()) + .put(AzureComputeService.Management.KEYSTORE_PATH_SETTING.getKey(), keyStoreFile.toAbsolutePath()) + .put(AzureComputeService.Discovery.HOST_TYPE_SETTING.getKey(), AzureUnicastHostsProvider.HostType.PUBLIC_IP.name()) + .put(AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING.getKey(), "keypass") + .put(AzureComputeService.Management.KEYSTORE_TYPE_SETTING.getKey(), "jks") + .put(AzureComputeService.Management.SERVICE_NAME_SETTING.getKey(), "myservice") + .put(AzureComputeService.Management.SUBSCRIPTION_ID_SETTING.getKey(), "subscription") + .put(AzureComputeService.Discovery.DEPLOYMENT_NAME_SETTING.getKey(), "mydeployment") + .put(AzureComputeService.Discovery.ENDPOINT_NAME_SETTING.getKey(), "myendpoint") + .put(AzureComputeService.Discovery.DEPLOYMENT_SLOT_SETTING.getKey(), AzureUnicastHostsProvider.Deployment.PRODUCTION.name()) + .build(); + } + + /** + * Creates mock EC2 endpoint providing the list of started nodes to the DescribeInstances API call + */ + @BeforeClass + public static void startHttpd() throws Exception { + logDir = createTempDir(); + SSLContext sslContext = getSSLContext(); + httpsServer = HttpsServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0); + httpsServer.setHttpsConfigurator(new HttpsConfigurator(sslContext)); + httpsServer.createContext("/subscription/services/hostedservices/myservice", (s) -> { + Headers headers = s.getResponseHeaders(); + headers.add("Content-Type", "text/xml; charset=UTF-8"); + XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory(); + xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true); + StringWriter out = new StringWriter(); + XMLStreamWriter sw; + try { + sw = xmlOutputFactory.createXMLStreamWriter(out); + sw.writeStartDocument(); + + String namespace = "http://schemas.microsoft.com/windowsazure"; + sw.setDefaultNamespace(namespace); + sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "HostedService", namespace); + { + sw.writeStartElement("Deployments"); + { + Path[] files = FileSystemUtils.files(logDir); + for (int i = 0; i < files.length; i++) { + Path resolve = files[i].resolve("transport.ports"); + if (Files.exists(resolve)) { + List addresses = Files.readAllLines(resolve); + Collections.shuffle(addresses, random()); + String address = addresses.get(0); + int indexOfLastColon = address.lastIndexOf(':'); + String host = address.substring(0, indexOfLastColon); + String port = address.substring(indexOfLastColon + 1); + + sw.writeStartElement("Deployment"); + { + sw.writeStartElement("Name"); + sw.writeCharacters("mydeployment"); + sw.writeEndElement(); + + sw.writeStartElement("DeploymentSlot"); + sw.writeCharacters(DeploymentSlot.Production.name()); + sw.writeEndElement(); + + sw.writeStartElement("Status"); + sw.writeCharacters(DeploymentStatus.Running.name()); + sw.writeEndElement(); + + sw.writeStartElement("RoleInstanceList"); + { + sw.writeStartElement("RoleInstance"); + { + sw.writeStartElement("RoleName"); + sw.writeCharacters(UUID.randomUUID().toString()); + sw.writeEndElement(); + + sw.writeStartElement("IpAddress"); + sw.writeCharacters(host); + sw.writeEndElement(); + + sw.writeStartElement("InstanceEndpoints"); + { + sw.writeStartElement("InstanceEndpoint"); + { + sw.writeStartElement("Name"); + sw.writeCharacters("myendpoint"); + sw.writeEndElement(); + + sw.writeStartElement("Vip"); + sw.writeCharacters(host); + sw.writeEndElement(); + + sw.writeStartElement("PublicPort"); + sw.writeCharacters(port); + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + } + } + sw.writeEndElement(); + } + sw.writeEndElement(); + + sw.writeEndDocument(); + sw.flush(); + + final byte[] responseAsBytes = out.toString().getBytes(StandardCharsets.UTF_8); + s.sendResponseHeaders(200, responseAsBytes.length); + OutputStream responseBody = s.getResponseBody(); + responseBody.write(responseAsBytes); + responseBody.close(); + } catch (XMLStreamException e) { + Loggers.getLogger(AzureDiscoveryClusterFormationTests.class).error("Failed serializing XML", e); + throw new RuntimeException(e); + } + }); + + httpsServer.start(); + } + + private static SSLContext getSSLContext() throws Exception { + char[] passphrase = "keypass".toCharArray(); + KeyStore ks = KeyStore.getInstance("JKS"); + try (InputStream stream = AzureDiscoveryClusterFormationTests.class.getResourceAsStream("/test-node.jks")) { + assertNotNull("can't find keystore file", stream); + ks.load(stream, passphrase); + } + KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); + kmf.init(ks, passphrase); + TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); + tmf.init(ks); + SSLContext ssl = SSLContext.getInstance("TLS"); + ssl.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); + return ssl; + } + + @AfterClass + public static void stopHttpd() throws IOException { + for (int i = 0; i < internalCluster().size(); i++) { + // shut them all down otherwise we get spammed with connection refused exceptions + internalCluster().stopRandomDataNode(); + } + httpsServer.stop(0); + httpsServer = null; + logDir = null; + } + + public void testJoin() throws ExecutionException, InterruptedException { + // only wait for the cluster to form + assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(2)).get()); + // add one more node and wait for it to join + internalCluster().startDataOnlyNodeAsync().get(); + assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(3)).get()); + } +} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java index d458fc2c73a..6d1104ce6e2 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java @@ -248,7 +248,7 @@ public class Ec2DiscoveryClusterFormationTests extends ESIntegTestCase { logDir = null; } - public void testJoin() throws ExecutionException, InterruptedException, XMLStreamException { + public void testJoin() throws ExecutionException, InterruptedException { // only wait for the cluster to form assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(2)).get()); // add one more node and wait for it to join From f349c4f1356f9365d39edf414f7e9950e3734c46 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 29 Apr 2016 10:26:34 -0400 Subject: [PATCH 0022/1311] Return null for "same" thread pool info This commit modifes the EsThreadPoolTestCase#info helper method to return null when info for the thread pool can not be found. This really should only happen for the "same" thread pool, and so we also assert that we only get to a place where there is no info if the thread pool that info was requested for is in fact the "same" thread pool. Not returning null here and instead throwing an exception would fail tests that tried to lookup info on the "same" thread pool. --- .../org/elasticsearch/threadpool/ESThreadPoolTestCase.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java b/core/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java index 7fbd3ccd31b..21035811905 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ESThreadPoolTestCase.java @@ -32,7 +32,8 @@ public abstract class ESThreadPoolTestCase extends ESTestCase { return info; } } - throw new IllegalArgumentException(name); + assert "same".equals(name); + return null; } protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final String name) { From 07c2fbf83a6786835462630f640584d1932a4a29 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Fri, 29 Apr 2016 07:58:27 -0700 Subject: [PATCH 0023/1311] Validate properties values according to database type (#17940) Fixes #17683. --- .../ingest/geoip/GeoIpProcessor.java | 64 +++++++++++++------ .../geoip/GeoIpProcessorFactoryTests.java | 41 +++++++++++- .../test/ingest_geoip/20_geoip_processor.yaml | 2 +- 3 files changed, 86 insertions(+), 21 deletions(-) diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 4b153d468ae..c524bf15a3d 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -58,6 +58,8 @@ import static org.elasticsearch.ingest.core.ConfigurationUtils.readStringPropert public final class GeoIpProcessor extends AbstractProcessor { public static final String TYPE = "geoip"; + private static final String CITY_DB_TYPE = "GeoLite2-City"; + private static final String COUNTRY_DB_TYPE = "GeoLite2-Country"; private final String field; private final String targetField; @@ -79,14 +81,14 @@ public final class GeoIpProcessor extends AbstractProcessor { Map geoData; switch (dbReader.getMetadata().getDatabaseType()) { - case "GeoLite2-City": + case CITY_DB_TYPE: try { geoData = retrieveCityGeoData(ipAddress); } catch (AddressNotFoundRuntimeException e) { geoData = Collections.emptyMap(); } break; - case "GeoLite2-Country": + case COUNTRY_DB_TYPE: try { geoData = retrieveCountryGeoData(ipAddress); } catch (AddressNotFoundRuntimeException e) { @@ -215,10 +217,11 @@ public final class GeoIpProcessor extends AbstractProcessor { } public static final class Factory extends AbstractProcessorFactory implements Closeable { - - static final Set DEFAULT_PROPERTIES = EnumSet.of( - Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION + static final Set DEFAULT_CITY_PROPERTIES = EnumSet.of( + Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, + Property.CITY_NAME, Property.LOCATION ); + static final Set DEFAULT_COUNTRY_PROPERTIES = EnumSet.of(Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE); private final Map databaseReaders; @@ -233,24 +236,33 @@ public final class GeoIpProcessor extends AbstractProcessor { String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb"); List propertyNames = readOptionalList(TYPE, processorTag, config, "properties"); + DatabaseReader databaseReader = databaseReaders.get(databaseFile); + if (databaseReader == null) { + throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist"); + } + + String databaseType = databaseReader.getMetadata().getDatabaseType(); + final Set properties; if (propertyNames != null) { properties = EnumSet.noneOf(Property.class); for (String fieldName : propertyNames) { try { - properties.add(Property.parse(fieldName)); - } catch (Exception e) { - throw newConfigurationException(TYPE, processorTag, "properties", "illegal field option [" + fieldName + "]. valid values are [" + Arrays.toString(Property.values()) + "]"); + properties.add(Property.parseProperty(databaseType, fieldName)); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); } } } else { - properties = DEFAULT_PROPERTIES; + if (CITY_DB_TYPE.equals(databaseType)) { + properties = DEFAULT_CITY_PROPERTIES; + } else if (COUNTRY_DB_TYPE.equals(databaseType)) { + properties = DEFAULT_COUNTRY_PROPERTIES; + } else { + throw newConfigurationException(TYPE, processorTag, "database_file", "Unsupported database type [" + databaseType + "]"); + } } - DatabaseReader databaseReader = databaseReaders.get(databaseFile); - if (databaseReader == null) { - throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist"); - } return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties); } @@ -279,13 +291,29 @@ public final class GeoIpProcessor extends AbstractProcessor { REGION_NAME, CITY_NAME, TIMEZONE, - LATITUDE, - LONGITUDE, LOCATION; - public static Property parse(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); + static final EnumSet ALL_CITY_PROPERTIES = EnumSet.allOf(Property.class); + static final EnumSet ALL_COUNTRY_PROPERTIES = EnumSet.of(Property.IP, Property.CONTINENT_NAME, + Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE); + + public static Property parseProperty(String databaseType, String value) { + Set validProperties = EnumSet.noneOf(Property.class); + if (CITY_DB_TYPE.equals(databaseType)) { + validProperties = ALL_CITY_PROPERTIES; + } else if (COUNTRY_DB_TYPE.equals(databaseType)) { + validProperties = ALL_COUNTRY_PROPERTIES; + } + + try { + Property property = valueOf(value.toUpperCase(Locale.ROOT)); + if (validProperties.contains(property) == false) { + throw new IllegalArgumentException("invalid"); + } + return property; + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("illegal property value [" + value + "]. valid values are " + Arrays.toString(validProperties.toArray())); + } } } - } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 794f64bf01f..0840b10b004 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -19,8 +19,10 @@ package org.elasticsearch.ingest.geoip; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Randomness; import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; @@ -79,7 +81,25 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES)); + } + + public void testCountryBuildDefaults() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("database_file", "GeoLite2-Country.mmdb"); + + String processorTag = randomAsciiOfLength(10); + config.put(AbstractProcessorFactory.TAG_KEY, processorTag); + + GeoIpProcessor processor = factory.create(config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("geoip")); + assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); + assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); } public void testBuildTargetField() throws Exception { @@ -101,6 +121,23 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); + assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); + } + + public void testBuildWithCountryDbAndCityFields() throws Exception { + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("database_file", "GeoLite2-Country.mmdb"); + EnumSet cityOnlyProperties = EnumSet.complementOf(GeoIpProcessor.Property.ALL_COUNTRY_PROPERTIES); + String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); + config.put("properties", Collections.singletonList(cityProperty)); + try { + factory.create(config); + fail("Exception expected"); + } catch (ElasticsearchParseException e) { + assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + cityProperty + "]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME]")); + } } public void testBuildNonExistingDbFile() throws Exception { @@ -146,7 +183,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { factory.create(config); fail("exception expected"); } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[properties] illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]")); + assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]")); } config = new HashMap<>(); diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml index 219497915d7..5e8a3e7c1ff 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yaml @@ -54,7 +54,7 @@ { "geoip" : { "field" : "field1", - "properties" : ["city_name", "country_iso_code", "ip", "latitude", "longitude", "location", "timezone", "country_name", "region_name", "continent_name"] + "properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_name", "continent_name"] } } ] From 7aca1389e2c57d12cb3f62cc293ef91b4817d1fe Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 25 Apr 2016 23:22:32 +0200 Subject: [PATCH 0024/1311] ingest: Add `date_index_name` processor. Closes #17814 --- .../processor/DateIndexNameProcessor.java | 156 ++++++++++++++++++ .../org/elasticsearch/node/NodeModule.java | 2 + .../processor/DateIndexNameFactoryTests.java | 99 +++++++++++ .../DateIndexNameProcessorTests.java | 77 +++++++++ docs/reference/ingest/ingest-node.asciidoc | 61 +++++++ .../test/ingest_grok/10_basic.yaml | 25 +-- .../test/ingest_attachment/10_basic.yaml | 23 +-- .../test/ingest_geoip/10_basic.yaml | 25 +-- .../ingest/90_date_index_name_processor.yaml | 29 ++++ 9 files changed, 462 insertions(+), 35 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/ingest/processor/DateIndexNameProcessor.java create mode 100644 core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameFactoryTests.java create mode 100644 core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameProcessorTests.java create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/ingest/90_date_index_name_processor.yaml diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateIndexNameProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateIndexNameProcessor.java new file mode 100644 index 00000000000..2de44addaf5 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateIndexNameProcessor.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ingest.core.AbstractProcessor; +import org.elasticsearch.ingest.core.AbstractProcessorFactory; +import org.elasticsearch.ingest.core.ConfigurationUtils; +import org.elasticsearch.ingest.core.IngestDocument; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.IllformedLocaleException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.function.Function; + +public final class DateIndexNameProcessor extends AbstractProcessor { + + public static final String TYPE = "date_index_name"; + + private final String field; + private final String indexNamePrefix; + private final String dateRounding; + private final String indexNameFormat; + private final DateTimeZone timezone; + private final List> dateFormats; + + DateIndexNameProcessor(String tag, String field, List> dateFormats, DateTimeZone timezone, + String indexNamePrefix, String dateRounding, String indexNameFormat) { + super(tag); + this.field = field; + this.timezone = timezone; + this.dateFormats = dateFormats; + this.indexNamePrefix = indexNamePrefix; + this.dateRounding = dateRounding; + this.indexNameFormat = indexNameFormat; + } + + @Override + public void execute(IngestDocument ingestDocument) throws Exception { + String date = ingestDocument.getFieldValue(field, String.class); + + DateTime dateTime = null; + Exception lastException = null; + for (Function dateParser : dateFormats) { + try { + dateTime = dateParser.apply(date); + } catch (Exception e) { + //try the next parser and keep track of the exceptions + lastException = ExceptionsHelper.useOrSuppress(lastException, e); + } + } + + if (dateTime == null) { + throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException); + } + + DateTimeFormatter formatter = DateTimeFormat.forPattern(indexNameFormat); + StringBuilder builder = new StringBuilder() + .append('<') + .append(indexNamePrefix) + .append('{') + .append(formatter.print(dateTime)).append("||/").append(dateRounding) + .append('{').append(indexNameFormat).append('|').append(timezone).append('}') + .append('}') + .append('>'); + String dynamicIndexName = builder.toString(); + ingestDocument.setFieldValue(IngestDocument.MetaData.INDEX.getFieldName(), dynamicIndexName); + } + + @Override + public String getType() { + return TYPE; + } + + String getField() { + return field; + } + + String getIndexNamePrefix() { + return indexNamePrefix; + } + + String getDateRounding() { + return dateRounding; + } + + String getIndexNameFormat() { + return indexNameFormat; + } + + DateTimeZone getTimezone() { + return timezone; + } + + List> getDateFormats() { + return dateFormats; + } + + public static final class Factory extends AbstractProcessorFactory { + + @Override + protected DateIndexNameProcessor doCreate(String tag, Map config) throws Exception { + String localeString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "locale"); + String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "timezone"); + DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); + Locale locale = Locale.ENGLISH; + if (localeString != null) { + try { + locale = (new Locale.Builder()).setLanguageTag(localeString).build(); + } catch (IllformedLocaleException e) { + throw new IllegalArgumentException("Invalid language tag specified: " + localeString); + } + } + List dateFormatStrings = ConfigurationUtils.readOptionalList(TYPE, tag, config, "date_formats"); + if (dateFormatStrings == null) { + dateFormatStrings = Collections.singletonList("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + } + List> dateFormats = new ArrayList<>(dateFormatStrings.size()); + for (String format : dateFormatStrings) { + DateFormat dateFormat = DateFormat.fromString(format); + dateFormats.add(dateFormat.getFunction(format, timezone, locale)); + } + + String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); + String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", ""); + String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding"); + String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd"); + return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefix, dateRounding, indexNameFormat); + } + } + +} diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java index 935b240b3d9..8565b14be8e 100644 --- a/core/src/main/java/org/elasticsearch/node/NodeModule.java +++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java @@ -28,6 +28,7 @@ import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.ingest.processor.AppendProcessor; import org.elasticsearch.ingest.processor.ConvertProcessor; import org.elasticsearch.ingest.processor.DateProcessor; +import org.elasticsearch.ingest.processor.DateIndexNameProcessor; import org.elasticsearch.ingest.processor.FailProcessor; import org.elasticsearch.ingest.processor.ForEachProcessor; import org.elasticsearch.ingest.processor.GsubProcessor; @@ -76,6 +77,7 @@ public class NodeModule extends AbstractModule { registerProcessor(GsubProcessor.TYPE, (templateService, registry) -> new GsubProcessor.Factory()); registerProcessor(FailProcessor.TYPE, (templateService, registry) -> new FailProcessor.Factory(templateService)); registerProcessor(ForEachProcessor.TYPE, (templateService, registry) -> new ForEachProcessor.Factory(registry)); + registerProcessor(DateIndexNameProcessor.TYPE, (templateService, registry) -> new DateIndexNameProcessor.Factory()); } @Override diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameFactoryTests.java new file mode 100644 index 00000000000..5bb674e950a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameFactoryTests.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import org.joda.time.DateTimeZone; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +public class DateIndexNameFactoryTests extends ESTestCase { + + public void testDefaults() throws Exception { + DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("date_rounding", "y"); + + DateIndexNameProcessor processor = factory.create(config); + assertThat(processor.getDateFormats().size(), Matchers.equalTo(1)); + assertThat(processor.getField(), Matchers.equalTo("_field")); + assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("")); + assertThat(processor.getDateRounding(), Matchers.equalTo("y")); + assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyy-MM-dd")); + assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.UTC)); + } + + public void testSpecifyOptionalSettings() throws Exception { + DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("index_name_prefix", "_prefix"); + config.put("date_rounding", "y"); + config.put("date_formats", Arrays.asList("UNIX", "UNIX_MS")); + + DateIndexNameProcessor processor = factory.create(config); + assertThat(processor.getDateFormats().size(), Matchers.equalTo(2)); + + config = new HashMap<>(); + config.put("field", "_field"); + config.put("index_name_prefix", "_prefix"); + config.put("date_rounding", "y"); + config.put("index_name_format", "yyyyMMdd"); + + processor = factory.create(config); + assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd")); + + config = new HashMap<>(); + config.put("field", "_field"); + config.put("index_name_prefix", "_prefix"); + config.put("date_rounding", "y"); + config.put("timezone", "+02:00"); + + processor = factory.create(config); + assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.forOffsetHours(2))); + + config = new HashMap<>(); + config.put("field", "_field"); + config.put("index_name_prefix", "_prefix"); + config.put("date_rounding", "y"); + + processor = factory.create(config); + assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix")); + } + + public void testRequiredFields() throws Exception { + DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); + Map config = new HashMap<>(); + config.put("date_rounding", "y"); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing")); + + config.clear(); + config.put("field", "_field"); + e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing")); + } + +} diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameProcessorTests.java new file mode 100644 index 00000000000..2dd5934387c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateIndexNameProcessorTests.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.ingest.processor; + +import org.elasticsearch.ingest.core.IngestDocument; +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.util.Collections; +import java.util.Locale; +import java.util.function.Function; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class DateIndexNameProcessorTests extends ESTestCase { + + public void testJodaPattern() throws Exception { + Function function = DateFormat.Joda.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZ", DateTimeZone.UTC, Locale.ROOT); + DateIndexNameProcessor processor = new DateIndexNameProcessor( + "_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, + "events-", "y", "yyyyMMdd" + ); + + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z")); + processor.execute(document); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + } + + public void testTAI64N()throws Exception { + Function function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null); + DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), + DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024")); + dateProcessor.execute(document); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + } + + public void testUnixMs()throws Exception { + Function function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null); + DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), + DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + Collections.singletonMap("_field", "1000500")); + dateProcessor.execute(document); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + } + + public void testUnix()throws Exception { + Function function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null); + DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), + DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + Collections.singletonMap("_field", "1000.5")); + dateProcessor.execute(document); + assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); + } + +} diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 93dabbd9122..7831ee86290 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -739,6 +739,67 @@ Here is an example that adds the parsed date to the `timestamp` field based on t } -------------------------------------------------- +[[date-index-name-processor]] +=== Date Index Name Processor + +The purpose of this processor is to point documents to the right time based index based +on a date or timestamp field in a document by using the <>. + +The processor sets the `_index` meta field with a date math index name expression based on the provided index name +prefix, a date or timestamp field in the documents being processed and the provided date rounding. + +First this processor fetches the date or timestamp from a field in the document being processed. Optionally +date formatting can be configured on how the field's value should be parsed into a date. Then this date, +the provided index name prefix and the provided date rounding get formatted into a date math index name expression. +Also here optionally date formatting can be specified on how the date should be formatted into a date math index name +expression. + +An example pipeline that points documents to a monthly index that starts with a `myindex-` prefix based on a +date in the `date1` field: + +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/1 +{ + "processors" : [ + { + "date_index_name" : { + "field" : "date1", + "index_name_prefix" : "myindex-", + "date_rounding" : "m" + } + } + ] +} +-------------------------------------------------- + +Using that pipeline for an index request: + +[source,js] +-------------------------------------------------- +PUT /myindex/type/1?pipeline=1 +{ + "date1" : "2016-04-25T12:02:01.789Z" +} +-------------------------------------------------- + +The above request will not index this document into the `myindex` index, but into the `myindex-2016-04-01` index. +This is because the date is being rounded by month. + +[[date-index-name-options]] +.Date index name options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to get the date or timestamp from. +| `index_name_prefix` | no | - | A prefix of the index name to be prepended before the printed date. +| `date_rounding` | yes | - | How to round the date when formatting the date into the index name. Valid values are: `y` (year), `m` (month), `w` (week), `d` (day), `h` (hour), `m` (minute) and `s` (second). +| `date_formats ` | no | yyyy-MM-dd'T'HH:mm:ss.SSSZ | An array of the expected date formats for parsing dates / timestamps in the document being preprocessed. Can be a Joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, or TAI64N. +| `timezone` | no | UTC | The timezone to use when parsing the date and when date math index supports resolves expressions into concrete index names. +| `locale` | no | ENGLISH | The locale to use when parsing the date from the document being preprocessed, relevant when parsing month names or week days. +| `index_name_format` | no | yyyy-MM-dd | The format to be used when printing the parsed date into the index name. An valid Joda pattern is expected here. +|====== + [[fail-processor]] === Fail Processor Raises an exception. This is useful for when diff --git a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml index ebb310ecf7a..f61027c3de3 100644 --- a/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml +++ b/modules/ingest-grok/src/test/resources/rest-api-spec/test/ingest_grok/10_basic.yaml @@ -12,15 +12,16 @@ - match: { nodes.$master.ingest.processors.0.type: append } - match: { nodes.$master.ingest.processors.1.type: convert } - match: { nodes.$master.ingest.processors.2.type: date } - - match: { nodes.$master.ingest.processors.3.type: fail } - - match: { nodes.$master.ingest.processors.4.type: foreach } - - match: { nodes.$master.ingest.processors.5.type: grok } - - match: { nodes.$master.ingest.processors.6.type: gsub } - - match: { nodes.$master.ingest.processors.7.type: join } - - match: { nodes.$master.ingest.processors.8.type: lowercase } - - match: { nodes.$master.ingest.processors.9.type: remove } - - match: { nodes.$master.ingest.processors.10.type: rename } - - match: { nodes.$master.ingest.processors.11.type: set } - - match: { nodes.$master.ingest.processors.12.type: split } - - match: { nodes.$master.ingest.processors.13.type: trim } - - match: { nodes.$master.ingest.processors.14.type: uppercase } + - match: { nodes.$master.ingest.processors.3.type: date_index_name } + - match: { nodes.$master.ingest.processors.4.type: fail } + - match: { nodes.$master.ingest.processors.5.type: foreach } + - match: { nodes.$master.ingest.processors.6.type: grok } + - match: { nodes.$master.ingest.processors.7.type: gsub } + - match: { nodes.$master.ingest.processors.8.type: join } + - match: { nodes.$master.ingest.processors.9.type: lowercase } + - match: { nodes.$master.ingest.processors.10.type: remove } + - match: { nodes.$master.ingest.processors.11.type: rename } + - match: { nodes.$master.ingest.processors.12.type: set } + - match: { nodes.$master.ingest.processors.13.type: split } + - match: { nodes.$master.ingest.processors.14.type: trim } + - match: { nodes.$master.ingest.processors.15.type: uppercase } diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml index 7c789b9c2ca..697ad558bb7 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yaml @@ -12,15 +12,16 @@ - match: { nodes.$master.ingest.processors.1.type: attachment } - match: { nodes.$master.ingest.processors.2.type: convert } - match: { nodes.$master.ingest.processors.3.type: date } - - match: { nodes.$master.ingest.processors.4.type: fail } - - match: { nodes.$master.ingest.processors.5.type: foreach } - - match: { nodes.$master.ingest.processors.6.type: gsub } - - match: { nodes.$master.ingest.processors.7.type: join } - - match: { nodes.$master.ingest.processors.8.type: lowercase } - - match: { nodes.$master.ingest.processors.9.type: remove } - - match: { nodes.$master.ingest.processors.10.type: rename } - - match: { nodes.$master.ingest.processors.11.type: set } - - match: { nodes.$master.ingest.processors.12.type: split } - - match: { nodes.$master.ingest.processors.13.type: trim } - - match: { nodes.$master.ingest.processors.14.type: uppercase } + - match: { nodes.$master.ingest.processors.4.type: date_index_name } + - match: { nodes.$master.ingest.processors.5.type: fail } + - match: { nodes.$master.ingest.processors.6.type: foreach } + - match: { nodes.$master.ingest.processors.7.type: gsub } + - match: { nodes.$master.ingest.processors.8.type: join } + - match: { nodes.$master.ingest.processors.9.type: lowercase } + - match: { nodes.$master.ingest.processors.10.type: remove } + - match: { nodes.$master.ingest.processors.11.type: rename } + - match: { nodes.$master.ingest.processors.12.type: set } + - match: { nodes.$master.ingest.processors.13.type: split } + - match: { nodes.$master.ingest.processors.14.type: trim } + - match: { nodes.$master.ingest.processors.15.type: uppercase } diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml index cf86f4c7f4c..e405bbb5a6c 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yaml @@ -11,15 +11,16 @@ - match: { nodes.$master.ingest.processors.0.type: append } - match: { nodes.$master.ingest.processors.1.type: convert } - match: { nodes.$master.ingest.processors.2.type: date } - - match: { nodes.$master.ingest.processors.3.type: fail } - - match: { nodes.$master.ingest.processors.4.type: foreach } - - match: { nodes.$master.ingest.processors.5.type: geoip } - - match: { nodes.$master.ingest.processors.6.type: gsub } - - match: { nodes.$master.ingest.processors.7.type: join } - - match: { nodes.$master.ingest.processors.8.type: lowercase } - - match: { nodes.$master.ingest.processors.9.type: remove } - - match: { nodes.$master.ingest.processors.10.type: rename } - - match: { nodes.$master.ingest.processors.11.type: set } - - match: { nodes.$master.ingest.processors.12.type: split } - - match: { nodes.$master.ingest.processors.13.type: trim } - - match: { nodes.$master.ingest.processors.14.type: uppercase } + - match: { nodes.$master.ingest.processors.3.type: date_index_name } + - match: { nodes.$master.ingest.processors.4.type: fail } + - match: { nodes.$master.ingest.processors.5.type: foreach } + - match: { nodes.$master.ingest.processors.6.type: geoip } + - match: { nodes.$master.ingest.processors.7.type: gsub } + - match: { nodes.$master.ingest.processors.8.type: join } + - match: { nodes.$master.ingest.processors.9.type: lowercase } + - match: { nodes.$master.ingest.processors.10.type: remove } + - match: { nodes.$master.ingest.processors.11.type: rename } + - match: { nodes.$master.ingest.processors.12.type: set } + - match: { nodes.$master.ingest.processors.13.type: split } + - match: { nodes.$master.ingest.processors.14.type: trim } + - match: { nodes.$master.ingest.processors.15.type: uppercase } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/90_date_index_name_processor.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/90_date_index_name_processor.yaml new file mode 100644 index 00000000000..c99c1025c10 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/90_date_index_name_processor.yaml @@ -0,0 +1,29 @@ +--- +"Test date index name processor with defaults": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "date_index_name" : { + "field" : "date", + "index_name_prefix": "events-", + "date_rounding": "d" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: events + type: event + id: 1 + pipeline: "1" + body: { + date: "2016-04-22T16:32:14.968Z" + } + - match: { _index: "events-2016-04-22"} From a31b94e23706a44e1ece9c270021787fa255afd8 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Thu, 28 Apr 2016 23:50:49 -0400 Subject: [PATCH 0025/1311] When checking if an index tombstone can be applied, use both the index name and uuid because the cluster state may contain an active index of the same name (but different uuid). Closes #18058 Closes #18054 --- .../elasticsearch/indices/IndicesService.java | 4 +-- .../indices/IndicesServiceTests.java | 32 +++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 48973bca5bd..61aaf551610 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -681,8 +681,8 @@ public class IndicesService extends AbstractLifecycleComponent i */ @Nullable public IndexMetaData verifyIndexIsDeleted(final Index index, final ClusterState clusterState) { - // this method should only be called when we know the index is not part of the cluster state - if (clusterState.metaData().hasIndex(index.getName())) { + // this method should only be called when we know the index (name + uuid) is not part of the cluster state + if (clusterState.metaData().index(index) != null) { throw new IllegalStateException("Cannot delete index [" + index + "], it is still part of the cluster state."); } if (nodeEnv.hasNodeFile() && FileSystemUtils.exists(nodeEnv.indexPaths(index))) { diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 02c415c247a..76f7a30e078 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -21,8 +21,10 @@ package org.elasticsearch.indices; import org.apache.lucene.store.LockObtainFailedException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasAction; +import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; @@ -283,6 +285,36 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { indicesService.deleteIndex(test.index(), "finished with test"); } + /** + * This test checks an edge case where, if a node had an index (lets call it A with UUID 1), then + * deleted it (so a tombstone entry for A will exist in the cluster state), then created + * a new index A with UUID 2, then shutdown, when the node comes back online, it will look at the + * tombstones for deletions, and it should proceed with trying to delete A with UUID 1 and not + * throw any errors that the index still exists in the cluster state. This is a case of ensuring + * that tombstones that have the same name as current valid indices don't cause confusion by + * trying to delete an index that exists. + * See https://github.com/elastic/elasticsearch/issues/18054 + */ + public void testIndexAndTombstoneWithSameNameOnStartup() throws Exception { + final String indexName = "test"; + final Index index = new Index(indexName, UUIDs.randomBase64UUID()); + final IndicesService indicesService = getIndicesService(); + final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) + .build(); + final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName()) + .settings(idxSettings) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + final Index tombstonedIndex = new Index(indexName, UUIDs.randomBase64UUID()); + final IndexGraveyard graveyard = IndexGraveyard.builder().addTombstone(tombstonedIndex).build(); + final MetaData metaData = MetaData.builder().put(indexMetaData, true).indexGraveyard(graveyard).build(); + final ClusterState clusterState = new ClusterState.Builder(new ClusterName("testCluster")).metaData(metaData).build(); + // if all goes well, this won't throw an exception, otherwise, it will throw an IllegalStateException + indicesService.verifyIndexIsDeleted(tombstonedIndex, clusterState); + } + private static class DanglingListener implements LocalAllocateDangledIndices.Listener { final CountDownLatch latch = new CountDownLatch(1); From e322903f2c849e52abfed07a45a96de4e8f67e26 Mon Sep 17 00:00:00 2001 From: Robin Joseph Date: Fri, 29 Apr 2016 09:02:59 -0700 Subject: [PATCH 0026/1311] Fix typo in include-in-all.asciidoc (#18055) --- docs/reference/mapping/params/include-in-all.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/mapping/params/include-in-all.asciidoc b/docs/reference/mapping/params/include-in-all.asciidoc index 76a0d14527c..18c454ae70c 100644 --- a/docs/reference/mapping/params/include-in-all.asciidoc +++ b/docs/reference/mapping/params/include-in-all.asciidoc @@ -30,7 +30,7 @@ PUT my_index -------------------------------- // AUTOSENSE -<1> The `title` and `content` fields with be included in the `_all` field. +<1> The `title` and `content` fields will be included in the `_all` field. <2> The `date` field will not be included in the `_all` field. TIP: The `include_in_all` setting is allowed to have different settings for From b2ce2f5afab8cb5d7c8125fb2e933fd8c1138d82 Mon Sep 17 00:00:00 2001 From: Josef Salyer Date: Fri, 29 Apr 2016 12:07:23 -0400 Subject: [PATCH 0027/1311] Fixed the documentation formatting. (#17990) The source code example for the initial example was missing the correct JSON object formatting and syntax. That has been fixed with my change. --- docs/reference/indices/create-index.asciidoc | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/reference/indices/create-index.asciidoc b/docs/reference/indices/create-index.asciidoc index 11216fa4c01..e225b2f15e8 100644 --- a/docs/reference/indices/create-index.asciidoc +++ b/docs/reference/indices/create-index.asciidoc @@ -14,13 +14,14 @@ associated with it. [source,js] -------------------------------------------------- -$ curl -XPUT 'http://localhost:9200/twitter/' - -$ curl -XPUT 'http://localhost:9200/twitter/' -d ' -index : - number_of_shards : 3 <1> - number_of_replicas : 2 <2> -' +$ curl -XPUT 'http://localhost:9200/twitter/' -d '{ + "settings" : { + "index" : { + "number_of_shards" : 3 <1> + "number_of_replicas" : 2 <2> + } + } +}' -------------------------------------------------- <1> Default for `number_of_shards` is 5 <2> Default for `number_of_replicas` is 1 (ie one replica for each primary shard) From 262a814c8dcf984b96db2b945d9470f5196856bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 29 Apr 2016 19:29:25 +0200 Subject: [PATCH 0028/1311] Tests: Fix TermsQueryBuilderTests expectations when lookup returns no terms When the termslookup (mocked in this case) doesn't return any terms, the query used to rewrite to an empty boolean query. Now it rewrites to a MatchNoDocsQuery. This changes the test expectation accordingly. Closes #18071 --- .../index/query/TermsQueryBuilderTests.java | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index baf7a1e528c..a16f4e2f2ea 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -96,16 +96,22 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase terms; if (queryBuilder.termsLookup() != null) { @@ -113,7 +119,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase booleanTerms = new ArrayList<>(); for (BooleanClause booleanClause : booleanQuery) { From c82e5648865d2d732fd4172c6a42b4176b63f93e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 29 Apr 2016 16:15:45 -0400 Subject: [PATCH 0029/1311] Remove racy but unnecessary assertion This commit removes a racy but unnecessary assertion in scaling thread pool idle test. Namely, the main test thread can reach the removed assertion before the last few threads in the thread pool have completed their tasks and caused the completed tasks count on the underlying executor to be updated. But this assertion is unnecessary. The main test thread already waits on a latch that is only decremented immediately before a task completes. This ensures that it was in fact the case that every submitted task was executed. Closes #18072 --- .../org/elasticsearch/threadpool/ScalingThreadPoolTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java index d27b43eeb8b..16c0a89e6c4 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java @@ -27,6 +27,7 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; +import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; @@ -193,7 +194,6 @@ public class ScalingThreadPoolTests extends ESThreadPoolTestCase { } catch (InterruptedException e) { throw new RuntimeException(e); } - assertThat(stats(threadPool, threadPoolName).getCompleted(), equalTo(128L)); })); } From 0b415806564585b2605a692d6e978fffcf0052aa Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 29 Apr 2016 16:21:59 -0400 Subject: [PATCH 0030/1311] Remove unused import from ScalingThreadPoolTests --- .../org/elasticsearch/threadpool/ScalingThreadPoolTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java index 16c0a89e6c4..94d6d075589 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java @@ -27,7 +27,6 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; From 9162fd27a01fd6c82e85fd03f2b610ef32bab4f0 Mon Sep 17 00:00:00 2001 From: Todd Dicken Date: Mon, 2 May 2016 06:06:07 -0500 Subject: [PATCH 0031/1311] Change qoutes to quotes (#18078) Updated misspelling in documentation --- docs/reference/setup/install/windows.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index 8fd8a19ce71..3e297a9b487 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -53,7 +53,7 @@ name, as follows: ./bin/elasticsearch -E es.cluster.name=my_cluster -E es.node.name=node_1 -------------------------------------------- -NOTE: Values that contain spaces must be surrounded with qoutes. For instance `-E es.path.logs="C:\My Logs\logs"`. +NOTE: Values that contain spaces must be surrounded with quotes. For instance `-E es.path.logs="C:\My Logs\logs"`. TIP: Typically, any cluster-wide settings (like `cluster.name`) should be added to the `elasticsearch.yml` config file, while any node-specific settings From 7c8397d99b624a77be29995dd0d1534ae16d78ee Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Mon, 2 May 2016 13:47:14 +0200 Subject: [PATCH 0032/1311] Update keyword.asciidoc `ignore_above` doesn't apply to analyzed `text` fields --- docs/reference/mapping/types/keyword.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/mapping/types/keyword.asciidoc b/docs/reference/mapping/types/keyword.asciidoc index 8de965eea70..48fbf3cd75b 100644 --- a/docs/reference/mapping/types/keyword.asciidoc +++ b/docs/reference/mapping/types/keyword.asciidoc @@ -62,7 +62,7 @@ The following parameters are accepted by `keyword` fields: <>:: - Do not index or analyze any string longer than this value. Defaults to + Do not index any string longer than this value. Defaults to `2147483647` so that all values would be accepted. <>:: From 4cf5385e4d4df9e652f5951f5ac0266b3467b926 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Mon, 2 May 2016 13:50:00 +0200 Subject: [PATCH 0033/1311] Use plain random values for generating *SortBuilders --- .../search/sort/FieldSortBuilderTests.java | 28 +++++++++---------- .../sort/GeoDistanceSortBuilderTests.java | 6 ++-- .../sort/NestedQueryBuilderGenerator.java | 7 ++--- .../search/sort/ScriptSortBuilderTests.java | 11 ++++---- 4 files changed, 25 insertions(+), 27 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index abcab363dcc..cab40588777 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -53,27 +53,23 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomFrom(missingContent))); + builder.missing(randomFrom(missingContent)); } if (randomBoolean()) { - builder.unmappedType(ESTestCase.randomValueOtherThan( - builder.unmappedType(), - () -> ESTestCase.randomAsciiOfLengthBetween(1, 10))); + builder.unmappedType(ESTestCase.randomAsciiOfLengthBetween(1, 10)); } if (randomBoolean()) { - builder.sortMode(ESTestCase.randomValueOtherThan(builder.sortMode(), () -> randomFrom(SortMode.values()))); + builder.sortMode(randomFrom(SortMode.values())); } if (randomBoolean()) { - builder.setNestedFilter(NestedQueryBuilderGenerator.nestedFilter(builder.getNestedFilter())); + builder.setNestedFilter(NestedQueryBuilderGenerator.randomNestedFilter()); } if (randomBoolean()) { - builder.setNestedPath(ESTestCase.randomValueOtherThan( - builder.getNestedPath(), - () -> ESTestCase.randomAsciiOfLengthBetween(1, 10))); + builder.setNestedPath(ESTestCase.randomAsciiOfLengthBetween(1, 10)); } return builder; @@ -86,25 +82,27 @@ public class FieldSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); break; case 1: - mutated.setNestedFilter(NestedQueryBuilderGenerator.nestedFilter(mutated.getNestedFilter())); + mutated.setNestedFilter(ESTestCase.randomValueOtherThan( + original.getNestedFilter(), + () -> NestedQueryBuilderGenerator.randomNestedFilter())); break; case 2: - mutated.sortMode(ESTestCase.randomValueOtherThan(mutated.sortMode(), () -> randomFrom(SortMode.values()))); + mutated.sortMode(ESTestCase.randomValueOtherThan(original.sortMode(), () -> randomFrom(SortMode.values()))); break; case 3: mutated.unmappedType(ESTestCase.randomValueOtherThan( - mutated.unmappedType(), + original.unmappedType(), () -> ESTestCase.randomAsciiOfLengthBetween(1, 10))); break; case 4: - mutated.missing(ESTestCase.randomValueOtherThan(mutated.missing(), () -> randomFrom(missingContent))); + mutated.missing(ESTestCase.randomValueOtherThan(original.missing(), () -> randomFrom(missingContent))); break; case 5: - mutated.order(ESTestCase.randomValueOtherThan(mutated.order(), () -> randomFrom(SortOrder.values()))); + mutated.order(ESTestCase.randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values()))); break; default: throw new IllegalStateException("Unsupported mutation."); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 9f1b1a325ef..3da59f79de6 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -89,7 +89,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase NestedQueryBuilderGenerator.randomNestedFilter())); break; case 7: result.setNestedPath(ESTestCase.randomValueOtherThan( diff --git a/core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java b/core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java index 7e6f88cffd5..de3318036e8 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java +++ b/core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java @@ -38,10 +38,7 @@ public class NestedQueryBuilderGenerator { new IdsQueryBuilder(), new TermQueryBuilder(ESTestCase.randomAsciiOfLengthBetween(1, 10), ESTestCase.randomDouble())); - public static QueryBuilder nestedFilter(QueryBuilder original) { - @SuppressWarnings("rawtypes") - QueryBuilder nested = ESTestCase.randomValueOtherThan(original, () -> ESTestCase.randomFrom(builders)); - nested.boost((float) ESTestCase.randomDoubleBetween(0, 10, false)); - return nested; + public static QueryBuilder randomNestedFilter() { + return ESTestCase.randomFrom(builders).boost(ESTestCase.randomFloat()); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index ba9d608f610..1d7247ba1e4 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; @@ -63,12 +64,10 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); + builder.setNestedPath(ESTestCase.randomAsciiOfLengthBetween(1, 10)); } return builder; } @@ -115,7 +114,9 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase NestedQueryBuilderGenerator.randomNestedFilter())); break; case 3: result.setNestedPath(original.getNestedPath() + "_some_suffix"); From 9fee8c76af39e471926a30685722cba7533f7a73 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Mon, 2 May 2016 14:21:59 +0200 Subject: [PATCH 0034/1311] Added release notes for 5.0.0-alpha2 --- docs/reference/release-notes.asciidoc | 2 + .../release-notes/5.0.0-alpha2.asciidoc | 255 ++++++++++++++++++ 2 files changed, 257 insertions(+) create mode 100644 docs/reference/release-notes/5.0.0-alpha2.asciidoc diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index f9391cece06..76fe55d3d34 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -5,9 +5,11 @@ -- This section summarizes the changes in each release. +* <> * <> * <> -- +include::release-notes/5.0.0-alpha2.asciidoc[] include::release-notes/5.0.0-alpha1.asciidoc[] include::release-notes/5.0.0-alpha1-2x.asciidoc[] diff --git a/docs/reference/release-notes/5.0.0-alpha2.asciidoc b/docs/reference/release-notes/5.0.0-alpha2.asciidoc new file mode 100644 index 00000000000..ad6c62653f0 --- /dev/null +++ b/docs/reference/release-notes/5.0.0-alpha2.asciidoc @@ -0,0 +1,255 @@ +[[release-notes-5.0.0-alpha2]] +== 5.0.0-alpha2 Release Notes + +Also see <>. + +[[breaking-5.0.0-alpha2]] +[float] +=== Breaking changes + +Analysis:: +* Analyze API : Rename filters/token_filters/char_filter in Analyze API in master {pull}17843[#17843] (issue: {issue}15189[#15189]) + +Cluster:: +* Remove validation errors from cluster health response {pull}17773[#17773] (issue: {issue}16979[#16979]) + +Indexed Scripts/Templates:: +* Store indexed scripts in the cluster state instead of the `.scripts` index {pull}17650[#17650] (issue: {issue}16651[#16651]) + +Packaging:: +* Add JVM options configuration file {pull}17675[#17675] (issue: {issue}17121[#17121]) + +Percolator:: +* Remove `.percolator` type in favour of `percolator` field type {pull}17560[#17560] + +REST:: +* Remove camelCase support {pull}17933[#17933] (issue: {issue}8988[#8988]) +* Remove 'case' parameter from rest apis {pull}17774[#17774] (issue: {issue}8988[#8988]) +* Disallow unquoted field names {pull}15351[#15351] (issue: {issue}9800[#9800]) + +Settings:: +* Remove `action.get.realtime` setting {pull}17857[#17857] (issue: {issue}12543[#12543]) +* Remove Settings.settingsBuilder. {pull}17619[#17619] + + + +[[deprecation-5.0.0-alpha2]] +[float] +=== Deprecations + +Query DSL:: +* Deprecate Indices query {pull}17710[#17710] (issue: {issue}12017[#12017]) +* Deprecate mlt, in and geo_bbox query name shortcuts {pull}17507[#17507] + +Query Refactoring:: +* Splits `phrase` and `phrase_prefix` in match query into `MatchPhraseQueryBuilder` and `MatchPhrasePrefixQueryBuilder` {pull}17508[#17508] + + + +[[feature-5.0.0-alpha2]] +[float] +=== New features + +Analysis:: +* Add `fingerprint` token filter and `fingerprint` analyzer {pull}17873[#17873] (issue: {issue}13325[#13325]) + +Plugin Analysis ICU:: +* Adding support for customizing the rule file in ICU tokenizer {pull}13651[#13651] (issue: {issue}13146[#13146]) + + + +[[enhancement-5.0.0-alpha2]] +[float] +=== Enhancements + +CAT API:: +* Add _cat/tasks {pull}17551[#17551] +* Cat health supports ts=0 option {pull}13508[#13508] (issue: {issue}10109[#10109]) + +Cache:: +* Allow the query cache to be disabled. {pull}16268[#16268] (issue: {issue}15802[#15802]) + +Cluster:: +* Adds tombstones to cluster state for index deletions {pull}17265[#17265] (issues: {issue}16358[#16358], {issue}17435[#17435]) +* Enable acked indexing {pull}17038[#17038] (issue: {issue}7572[#7572]) + +Core:: +* Kill thread local leak {pull}17921[#17921] (issues: {issue}283[#283], {issue}630[#630]) +* Add heap size bootstrap check {pull}17728[#17728] (issue: {issue}17490[#17490]) +* Remove hostname from NetworkAddress.format {pull}17601[#17601] (issue: {issue}17604[#17604]) +* Bootstrapping bootstrap checks {pull}17595[#17595] (issues: {issue}17474[#17474], {issue}17570[#17570]) +* Add max map count check {pull}16944[#16944] + +Geo:: +* Enhanced lat/long error handling {pull}16833[#16833] (issue: {issue}16137[#16137]) + +Index APIs:: +* Fail hot_threads in a better way if unsupported by JDK {pull}15909[#15909] + +Ingest:: +* Streamline option naming for several processors {pull}17892[#17892] (issue: {issue}17835[#17835]) + +Internal:: +* Makes Script type writeable {pull}17908[#17908] (issue: {issue}17753[#17753]) +* FiltersAggregatorBuilder: Don't create new context for inner parsing {pull}17851[#17851] +* Clean up serialization on some stats {pull}17832[#17832] (issue: {issue}17085[#17085]) +* Normalize registration for SignificanceHeuristics {pull}17830[#17830] (issue: {issue}17085[#17085]) +* Make (read|write)NamedWriteable public {pull}17829[#17829] (issue: {issue}17682[#17682]) +* Use try-with-resource when creating new parser instances where possible {pull}17822[#17822] +* Don't pass XContentParser to ParseFieldRegistry#lookup {pull}17794[#17794] +* Internal: Remove threadlocal from document parser {pull}17764[#17764] +* Cut range aggregations to registerAggregation {pull}17757[#17757] (issue: {issue}17085[#17085]) +* Remove ParseFieldMatcher from AbstractXContentParser {pull}17756[#17756] (issue: {issue}17417[#17417]) +* Remove parser argument from methods where we already pass in a parse context {pull}17738[#17738] +* Switch SearchAfterBuilder to writeGenericValue {pull}17735[#17735] (issue: {issue}17085[#17085]) +* Remove StreamableReader {pull}17729[#17729] (issue: {issue}17085[#17085]) +* Cleanup nested, has_child & has_parent query builders for inner hits construction {pull}17719[#17719] (issue: {issue}11118[#11118]) +* Make AllocationCommands NamedWriteables {pull}17661[#17661] +* Isolate StreamableReader {pull}17656[#17656] (issue: {issue}17085[#17085]) +* Create registration methods for aggregations similar to those for queries {pull}17653[#17653] (issues: {issue}17085[#17085], {issue}17389[#17389]) +* Turn RestChannel into an interface {pull}17643[#17643] (issue: {issue}17133[#17133]) +* Remove PROTOTYPEs from QueryBuilders {pull}17632[#17632] (issue: {issue}17085[#17085]) +* Remove registerQueryParser {pull}17608[#17608] +* ParseField#getAllNamesIncludedDeprecated to not return duplicate names {pull}17504[#17504] +* Rework a query parser and improve registration {pull}17458[#17458] +* Clean up QueryParseContext and don't hold it inside QueryRewrite/ShardContext {pull}17417[#17417] + +Mapping:: +* Automatically upgrade analyzed strings with an analyzer to `text`. {pull}17861[#17861] +* Support dots in field names when mapping already exists {pull}17759[#17759] (issue: {issue}15951[#15951]) +* Use the new points API to index numeric fields. {pull}17746[#17746] (issues: {issue}11513[#11513], {issue}16751[#16751], {issue}17007[#17007], {issue}17700[#17700]) +* Simplify AllEntries, AllField and AllFieldMapper: {pull}17613[#17613] + +Network:: +* Limit request size {pull}17133[#17133] (issue: {issue}16011[#16011]) + +Packaging:: +* Remove unnecessary sleep from init script restart {pull}17966[#17966] +* Explicitly set packaging permissions {pull}17912[#17912] (issue: {issue}17634[#17634]) +* Allow configuring Windows service name, description and user {pull}17312[#17312] +* rpm uses non-portable `--system` flag to `useradd` {pull}14596[#14596] (issue: {issue}14211[#14211]) + +Percolator:: +* PercolatorQueryBuilder cleanup by using MemoryIndex#fromDocument(...) helper {pull}17669[#17669] (issue: {issue}9386[#9386]) + +Plugins:: +* Cli: Improve output for usage errors {pull}17938[#17938] +* Cli: Add verbose output with zip url when installing plugin {pull}17662[#17662] (issue: {issue}17529[#17529]) + +Query DSL:: +* Add MatchNoDocsQuery, a query that matches no documents and prints the reason why in the toString method. {pull}17780[#17780] +* Adds `ignore_unmapped` option to geo queries {pull}17751[#17751] +* Adds `ignore_unmapped` option to nested and P/C queries {pull}17748[#17748] +* SimpleQueryParser should call MappedFieldType.termQuery when appropriate. {pull}17678[#17678] + +REST:: +* Allow JSON with unquoted field names by enabling system property {pull}17801[#17801] (issue: {issue}17674[#17674]) + +Recovery:: +* TransportNodesListGatewayStartedShards should fall back to disk based index metadata if not found in cluster state {pull}17663[#17663] (issue: {issue}17630[#17630]) + +Reindex API:: +* Properly mark reindex's child tasks as child tasks {pull}17770[#17770] + +Search:: +* Fail query if it contains very large rescores {pull}17917[#17917] (issue: {issue}17522[#17522]) + +Settings:: +* Switch to registered Settings for all IndexingMemoryController settings {pull}17778[#17778] (issue: {issue}17442[#17442]) + +Stats:: +* Add points to SegmentStats. {pull}17775[#17775] (issue: {issue}16974[#16974]) +* Remove FieldStats.Float. {pull}17749[#17749] +* Show configured and remaining delay for an unassigned shard. {pull}17515[#17515] (issue: {issue}17372[#17372]) + +Store:: +* Use `mmapfs` by default. {pull}17616[#17616] (issue: {issue}16983[#16983]) + +Suggesters:: +* Add bwc support for reading pre-5.0 completion index {pull}17602[#17602] + +Task Manager:: +* Move parentTaskId into TransportRequest {pull}17872[#17872] +* Shorten the serialization of the empty TaskId {pull}17870[#17870] +* Expose whether a task is cancellable in the _tasks list API {pull}17464[#17464] (issue: {issue}17369[#17369]) + + + +[[bug-5.0.0-alpha2]] +[float] +=== Bug fixes + +Aggregations:: +* Adds serialisation of sigma to extended_stats_bucket pipeline aggregation {pull}17703[#17703] (issue: {issue}17701[#17701]) +* Fixes NPE when no window is specified in moving average request {pull}17556[#17556] (issue: {issue}17516[#17516]) +* Fixes Filter and FiltersAggregation to work with empty query {pull}17542[#17542] (issue: {issue}17518[#17518]) +* ExtendedStatsAggregator should also pass sigma to emtpy aggs. {pull}17388[#17388] (issue: {issue}17362[#17362]) + +Allocation:: +* Rebalancing policy shouldn't prevent hard allocation decisions {pull}17698[#17698] (issues: {issue}14057[#14057], {issue}14259[#14259]) +* When considering the size of shadow replica shards, set size to 0 {pull}17509[#17509] (issue: {issue}17460[#17460]) + +Core:: +* Refactor UUID-generating methods out of Strings {pull}17837[#17837] (issue: {issue}17819[#17819]) +* Node names cleanup {pull}17723[#17723] (issue: {issue}17718[#17718]) +* NullPointerException from IndexingMemoryController when a version conflict happens during recovery {pull}17569[#17569] + +Ingest:: +* Ingest does not close its factories {pull}17626[#17626] (issue: {issue}17625[#17625]) + +Internal:: +* Fix BulkItemResponse.Failure.toString {pull}17871[#17871] + +Logging:: +* Add missing index name to search slow log. {pull}17818[#17818] (issue: {issue}17025[#17025]) + +Mapping:: +* Fix cross type mapping updates for `boolean` fields. {pull}17882[#17882] (issue: {issue}17879[#17879]) +* Fix dynamic check to properly handle parents {pull}17864[#17864] (issues: {issue}17644[#17644], {issue}17854[#17854]) +* Fix array parsing to remove its context when finished parsing {pull}17768[#17768] +* Disallow fielddata loading on text fields that are not indexed. {pull}17747[#17747] +* Fail if an object is added after a field with the same name. {pull}17568[#17568] (issue: {issue}17567[#17567]) + +Packaging:: +* Fix exit code {pull}17082[#17082] + +Plugin Discovery EC2:: +* Fix EC2 Discovery settings {pull}17651[#17651] (issue: {issue}16602[#16602]) + +Plugins:: +* Quote path to java binary {pull}17496[#17496] (issue: {issue}17495[#17495]) + +Query DSL:: +* Apply the default operator on analyzed wildcard in simple_query_string builder {pull}17776[#17776] +* Apply the default operator on analyzed wildcard in query_string builder: {pull}17711[#17711] (issue: {issue}2183[#2183]) + +REST:: +* Fixes reading of CORS pre-flight headers and methods {pull}17523[#17523] (issue: {issue}17483[#17483]) +* index is a required url part for update by query {pull}17503[#17503] + +Reindex API:: +* Reindex should never report negative throttled_until {pull}17799[#17799] (issue: {issue}17783[#17783]) +* Reindex should gracefully handle when _source is disabled {pull}17667[#17667] (issue: {issue}17666[#17666]) + +Settings:: +* convert settings for ResourceWatcherService to new infrastructure {pull}17948[#17948] + +Snapshot/Restore:: +* Fix the semantics for the BlobContainer interface {pull}17878[#17878] (issues: {issue}15579[#15579], {issue}15580[#15580]) +* On restore, selecting concrete indices can select wrong index {pull}17715[#17715] + +Task Manager:: +* Shard level tasks in Bulk Action lose reference to their parent tasks {pull}17743[#17743] + +Term Vectors:: +* Fix calculation of took time of term vectors request {pull}17817[#17817] (issue: {issue}12565[#12565]) + + + +[[upgrade-5.0.0-alpha2]] +[float] +=== Upgrades + +Core:: +* Upgrade to lucene 6 release {pull}17657[#17657] + From 28409e45090a74a4d480dfcd34e399f7f39ed06e Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 2 May 2016 09:07:25 -0400 Subject: [PATCH 0035/1311] Add support for .empty to expressions, and some docs improvements Closes #18077 --- .../modules/scripting/scripting.asciidoc | 19 ++++- .../expression/CountMethodValueSource.java | 2 +- .../expression/EmptyMemberValueSource.java | 83 +++++++++++++++++++ .../ExpressionScriptEngineService.java | 26 +++++- .../expression/MoreExpressionTests.java | 26 +++++- 5 files changed, 145 insertions(+), 11 deletions(-) create mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java diff --git a/docs/reference/modules/scripting/scripting.asciidoc b/docs/reference/modules/scripting/scripting.asciidoc index 047ad0aa136..5935554a368 100644 --- a/docs/reference/modules/scripting/scripting.asciidoc +++ b/docs/reference/modules/scripting/scripting.asciidoc @@ -455,11 +455,25 @@ for details on what operators and functions are available. Variables in `expression` scripts are available to access: -* Single valued document fields, e.g. `doc['myfield'].value` -* Single valued document fields can also be accessed without `.value` e.g. `doc['myfield']` +* document fields, e.g. `doc['myfield'].value` or just `doc['myfield']`. +* whether the field is empty, e.g. `doc['myfield'].empty` * Parameters passed into the script, e.g. `mymodifier` * The current document's score, `_score` (only available when used in a `script_score`) +When a document is missing the field completely, by default the value will be treated as `0`. +You can treat it as another value instead, e.g. `doc['myfield'].empty ? 100 : doc['myfield'].value` + +When a document has multiple values for the field, by default the minimum value is returned. +You can choose a different value instead, e.g. `doc['myfield'].sum()`. The following methods are available +for any field: + +* min() +* max() +* avg() +* median() +* sum() +* count() + Variables in `expression` scripts that are of type `date` may use the following member methods: * getYear() @@ -477,7 +491,6 @@ There are a few limitations relative to other script languages: * Only numeric fields may be accessed * Stored fields are not available -* If a field is sparse (only some documents contain a value), documents missing the field will have a value of `0` [float] === Score diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java index 043a11eebad..6f397c02bd3 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java @@ -63,7 +63,7 @@ public class CountMethodValueSource extends ValueSource { @Override public int hashCode() { - return fieldData.hashCode(); + return 31 * getClass().hashCode() + fieldData.hashCode(); } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java new file mode 100644 index 00000000000..b8c101e8abc --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.queries.function.FunctionValues; +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; + +/** + * ValueSource to return non-zero if a field is missing. + *

+ * This is essentially sugar over !count() + */ +public class EmptyMemberValueSource extends ValueSource { + protected IndexFieldData fieldData; + + protected EmptyMemberValueSource(IndexFieldData fieldData) { + this.fieldData = Objects.requireNonNull(fieldData); + } + + @Override + @SuppressWarnings("rawtypes") // ValueSource uses a rawtype + public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { + AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf); + final SortedNumericDoubleValues values = leafData.getDoubleValues(); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) { + values.setDocument(doc); + if (values.count() == 0) { + return 1; + } else { + return 0; + } + } + }; + } + + @Override + public int hashCode() { + return 31 * getClass().hashCode() + fieldData.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + EmptyMemberValueSource other = (EmptyMemberValueSource) obj; + if (!fieldData.equals(other.fieldData)) return false; + return true; + } + + @Override + public String description() { + return "empty: field(" + fieldData.getFieldName() + ")"; + } +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index d78f80bfe49..2bfd2928d57 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -65,6 +65,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements public static final List TYPES = Collections.singletonList(NAME); + // these methods only work on dates, e.g. doc['datefield'].getYear() protected static final String GET_YEAR_METHOD = "getYear"; protected static final String GET_MONTH_METHOD = "getMonth"; protected static final String GET_DAY_OF_MONTH_METHOD = "getDayOfMonth"; @@ -72,6 +73,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements protected static final String GET_MINUTES_METHOD = "getMinutes"; protected static final String GET_SECONDS_METHOD = "getSeconds"; + // these methods work on any field, e.g. doc['field'].sum() protected static final String MINIMUM_METHOD = "min"; protected static final String MAXIMUM_METHOD = "max"; protected static final String AVERAGE_METHOD = "avg"; @@ -79,6 +81,10 @@ public class ExpressionScriptEngineService extends AbstractComponent implements protected static final String SUM_METHOD = "sum"; protected static final String COUNT_METHOD = "count"; + // these variables work on any field, e.g. doc['field'].value + protected static final String VALUE_VARIABLE = "value"; + protected static final String EMPTY_VARIABLE = "empty"; + @Inject public ExpressionScriptEngineService(Settings settings) { super(settings); @@ -169,6 +175,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements } else { String fieldname = null; String methodname = null; + String variablename = VALUE_VARIABLE; // .value is the default for doc['field'], its optional. VariableContext[] parts = VariableContext.parse(variable); if (parts[0].text.equals("doc") == false) { throw new ScriptException("Unknown variable [" + parts[0].text + "] in expression"); @@ -181,8 +188,10 @@ public class ExpressionScriptEngineService extends AbstractComponent implements if (parts.length == 3) { if (parts[2].type == VariableContext.Type.METHOD) { methodname = parts[2].text; - } else if (parts[2].type != VariableContext.Type.MEMBER || !"value".equals(parts[2].text)) { - throw new ScriptException("Only the member variable [value] or member methods may be accessed on a field when not accessing the field directly"); + } else if (parts[2].type == VariableContext.Type.MEMBER) { + variablename = parts[2].text; + } else { + throw new ScriptException("Only member variables or member methods may be accessed on a field when not accessing the field directly"); } } if (parts.length > 3) { @@ -201,7 +210,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements throw new ScriptException("Field [" + fieldname + "] used in expression must be numeric"); } if (methodname == null) { - bindings.add(variable, new FieldDataValueSource(fieldData, MultiValueMode.MIN)); + bindings.add(variable, getVariableValueSource(fieldType, fieldData, fieldname, variablename)); } else { bindings.add(variable, getMethodValueSource(fieldType, fieldData, fieldname, methodname)); } @@ -245,6 +254,17 @@ public class ExpressionScriptEngineService extends AbstractComponent implements throw new IllegalArgumentException("Member method [" + methodName + "] does not exist."); } } + + protected ValueSource getVariableValueSource(MappedFieldType fieldType, IndexFieldData fieldData, String fieldName, String memberName) { + switch (memberName) { + case VALUE_VARIABLE: + return new FieldDataValueSource(fieldData, MultiValueMode.MIN); + case EMPTY_VARIABLE: + return new EmptyMemberValueSource(fieldData); + default: + throw new IllegalArgumentException("Member variable [" + memberName + "] does not exist."); + } + } protected ValueSource getDateMethodValueSource(MappedFieldType fieldType, IndexFieldData fieldData, String fieldName, String methodName, int calendarType) { if (fieldType instanceof LegacyDateFieldMapper.DateFieldType == false diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index b53245fda20..50a9900e3d9 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -164,10 +164,10 @@ public class MoreExpressionTests extends ESIntegTestCase { } public void testMultiValueMethods() throws Exception { - ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double")); + ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double", "double2", "type=double")); ensureGreen("test"); indexRandom(true, - client().prepareIndex("test", "doc", "1").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double1", "1.2", "double1", "2.4"), + client().prepareIndex("test", "doc", "1").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double1", "1.2", "double1", "2.4", "double2", "3.0"), client().prepareIndex("test", "doc", "2").setSource("double0", "5.0", "double1", "3.0"), client().prepareIndex("test", "doc", "3").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double0", "-1.5", "double1", "4.0")); @@ -227,6 +227,24 @@ public class MoreExpressionTests extends ESIntegTestCase { assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D); + + // make sure count() works for missing + rsp = buildRequest("doc['double2'].count()").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); + assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D); + assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D); + + // make sure .empty works in the same way + rsp = buildRequest("doc['double2'].empty ? 5.0 : 2.0").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D); + assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); + assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); } public void testInvalidDateMethodCall() throws Exception { @@ -363,8 +381,8 @@ public class MoreExpressionTests extends ESIntegTestCase { } catch (SearchPhaseExecutionException e) { assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString() + "should have contained member variable [value] or member methods may be accessed", - e.toString().contains("member variable [value] or member methods may be accessed"), equalTo(true)); + assertThat(e.toString() + "should have contained member variable [bogus] does not exist", + e.toString().contains("Member variable [bogus] does not exist"), equalTo(true)); } } From 372eceb8542d6928db8193649716ca9a6a3ac501 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Mon, 2 May 2016 15:41:05 +0200 Subject: [PATCH 0036/1311] Switch to using predicate for testing existing value --- .../search/sort/FieldSortBuilderTests.java | 1 + .../search/sort/ScriptSortBuilderTests.java | 2 +- .../java/org/elasticsearch/test/ESTestCase.java | 15 ++++++++------- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index cab40588777..ebd5403489b 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.function.Supplier; public class FieldSortBuilderTests extends AbstractSortTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 1d7247ba1e4..b809adcb27b 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -60,7 +60,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); + builder.sortMode(ESTestCase.randomValueOtherThanMany(exceptThis::contains, () -> randomFrom(SortMode.values()))); } } if (randomBoolean()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index d8a3221c88f..01f0d5b8151 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -87,6 +87,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.function.BooleanSupplier; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.function.Supplier; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; @@ -410,21 +411,21 @@ public abstract class ESTestCase extends LuceneTestCase { * helper to get a random value in a certain range that's different from the input */ public static T randomValueOtherThan(T input, Supplier randomSupplier) { - T randomValue = null; - do { - randomValue = randomSupplier.get(); - } while (randomValue.equals(input)); - return randomValue; + if (input != null) { + return randomValueOtherThanMany(input::equals, randomSupplier); + } + + return(randomSupplier.get()); } /** * helper to get a random value in a certain range that's different from the input */ - public static T randomValueOtherThanMany(Collection input, Supplier randomSupplier) { + public static T randomValueOtherThanMany(Predicate input, Supplier randomSupplier) { T randomValue = null; do { randomValue = randomSupplier.get(); - } while (input.contains(randomValue)); + } while (input.test(randomValue)); return randomValue; } From 8e178c4f8e0321929b4147c1c6e1d22e488bea5d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 2 May 2016 10:59:50 -0400 Subject: [PATCH 0037/1311] Add system bootstrap checks escape hatch Today when running in production mode the bootstrap checks are completely unforgiving. But there are cases where an end-user might not have the ability to modify some of the system-level settings that cause the bootstrap checks to trip (e.g., guest settings that are inherited from a host and can not be modified). This commit adds a setting that allows system-level bootstrap checks to be ignored for these end-users. We classify certain bootstrap checks into system-level checks and only those bootstrap checks will be ignored if this flag is enabled. All other bootstrap checks are still subject to being enforced if the user is in production mode. We will still log warnings for these bootstrap checks because the end-user does still need to be made aware that they are running in a configuration that is less-than-ideal from a resiliency perspective. Relates #18088 --- .../bootstrap/BootstrapCheck.java | 121 +++++++++++++++--- .../bootstrap/BootstrapSettings.java | 2 + .../common/settings/ClusterSettings.java | 1 + .../bootstrap/BootstrapCheckTests.java | 90 ++++++++++--- 4 files changed, 175 insertions(+), 39 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java index 2f87086ede4..25bdbe3fa8a 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -41,7 +41,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; -import java.util.stream.Collectors; /** * We enforce limits once any network host is configured. In this case we assume the node is running in production @@ -63,42 +62,80 @@ final class BootstrapCheck { * @param boundTransportAddress the node network bindings */ static void check(final Settings settings, final BoundTransportAddress boundTransportAddress) { - check(enforceLimits(boundTransportAddress), checks(settings), Node.NODE_NAME_SETTING.get(settings)); + check( + enforceLimits(boundTransportAddress), + BootstrapSettings.IGNORE_SYSTEM_BOOTSTRAP_CHECKS.get(settings), + checks(settings), + Node.NODE_NAME_SETTING.get(settings)); } /** * executes the provided checks and fails the node if * enforceLimits is true, otherwise logs warnings * - * @param enforceLimits true if the checks should be enforced or - * warned - * @param checks the checks to execute - * @param nodeName the node name to be used as a logging prefix + * @param enforceLimits true if the checks should be enforced or + * otherwise warned + * @param ignoreSystemChecks true if system checks should be enforced + * or otherwise warned + * @param checks the checks to execute + * @param nodeName the node name to be used as a logging prefix */ // visible for testing - static void check(final boolean enforceLimits, final List checks, final String nodeName) { - final ESLogger logger = Loggers.getLogger(BootstrapCheck.class, nodeName); + static void check(final boolean enforceLimits, final boolean ignoreSystemChecks, final List checks, final String nodeName) { + check(enforceLimits, ignoreSystemChecks, checks, Loggers.getLogger(BootstrapCheck.class, nodeName)); + } - final List errors = - checks.stream() - .filter(BootstrapCheck.Check::check) - .map(BootstrapCheck.Check::errorMessage) - .collect(Collectors.toList()); + /** + * executes the provided checks and fails the node if + * enforceLimits is true, otherwise logs warnings + * + * @param enforceLimits true if the checks should be enforced or + * otherwise warned + * @param ignoreSystemChecks true if system checks should be enforced + * or otherwise warned + * @param checks the checks to execute + * @param logger the logger to + */ + static void check( + final boolean enforceLimits, + final boolean ignoreSystemChecks, + final List checks, + final ESLogger logger) { + final List errors = new ArrayList<>(); + final List ignoredErrors = new ArrayList<>(); - if (!errors.isEmpty()) { - final List messages = new ArrayList<>(1 + errors.size()); - messages.add("bootstrap checks failed"); - messages.addAll(errors); - if (enforceLimits) { + for (final Check check : checks) { + if (check.check()) { + if (!enforceLimits || (check.isSystemCheck() && ignoreSystemChecks)) { + ignoredErrors.add(check.errorMessage()); + } else { + errors.add(check.errorMessage()); + } + } + } + + if (!errors.isEmpty() || !ignoredErrors.isEmpty()) { + + if (!ignoredErrors.isEmpty()) { + ignoredErrors.forEach(error -> log(logger, error)); + } + + if (!errors.isEmpty()) { + final List messages = new ArrayList<>(1 + errors.size()); + messages.add("bootstrap checks failed"); + messages.addAll(errors); final RuntimeException re = new RuntimeException(String.join("\n", messages)); errors.stream().map(IllegalStateException::new).forEach(re::addSuppressed); throw re; - } else { - messages.forEach(message -> logger.warn(message)); } + } } + static void log(final ESLogger logger, final String error) { + logger.warn(error); + } + /** * Tests if the checks should be enforced * @@ -151,6 +188,14 @@ final class BootstrapCheck { */ String errorMessage(); + /** + * test if the check is a system-level check + * + * @return true if the check is a system-level check as opposed + * to an Elasticsearch-level check + */ + boolean isSystemCheck(); + } static class HeapSizeCheck implements BootstrapCheck.Check { @@ -183,6 +228,11 @@ final class BootstrapCheck { return JvmInfo.jvmInfo().getConfiguredMaxHeapSize(); } + @Override + public final boolean isSystemCheck() { + return false; + } + } static class OsXFileDescriptorCheck extends FileDescriptorCheck { @@ -233,6 +283,11 @@ final class BootstrapCheck { return ProcessProbe.getInstance().getMaxFileDescriptorCount(); } + @Override + public final boolean isSystemCheck() { + return true; + } + } // visible for testing @@ -259,6 +314,11 @@ final class BootstrapCheck { return Natives.isMemoryLocked(); } + @Override + public final boolean isSystemCheck() { + return true; + } + } static class MinMasterNodesCheck implements Check { @@ -279,6 +339,12 @@ final class BootstrapCheck { return "please set [" + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + "] to a majority of the number of master eligible nodes in your cluster."; } + + @Override + public final boolean isSystemCheck() { + return false; + } + } static class MaxNumberOfThreadsCheck implements Check { @@ -305,6 +371,11 @@ final class BootstrapCheck { return JNANatives.MAX_NUMBER_OF_THREADS; } + @Override + public final boolean isSystemCheck() { + return true; + } + } static class MaxSizeVirtualMemoryCheck implements Check { @@ -333,6 +404,11 @@ final class BootstrapCheck { return JNANatives.MAX_SIZE_VIRTUAL_MEMORY; } + @Override + public final boolean isSystemCheck() { + return true; + } + } static class MaxMapCountCheck implements Check { @@ -396,6 +472,11 @@ final class BootstrapCheck { return Long.parseLong(procSysVmMaxMapCount); } + @Override + public boolean isSystemCheck() { + return true; + } + } } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java index 4e9dffc995b..b2059380181 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapSettings.java @@ -37,5 +37,7 @@ public final class BootstrapSettings { Setting.boolSetting("bootstrap.seccomp", true, Property.NodeScope); public static final Setting CTRLHANDLER_SETTING = Setting.boolSetting("bootstrap.ctrlhandler", true, Property.NodeScope); + public static final Setting IGNORE_SYSTEM_BOOTSTRAP_CHECKS = + Setting.boolSetting("bootstrap.ignore_system_bootstrap_checks", false, Property.NodeScope); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 55aa58ca588..dad5f48ce27 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -408,6 +408,7 @@ public final class ClusterSettings extends AbstractScopedSettings { BootstrapSettings.MLOCKALL_SETTING, BootstrapSettings.SECCOMP_SETTING, BootstrapSettings.CTRLHANDLER_SETTING, + BootstrapSettings.IGNORE_SYSTEM_BOOTSTRAP_CHECKS, IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING, diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java index 9f6a4e25eb5..235957ac18b 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.bootstrap; import org.apache.lucene.util.Constants; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -38,6 +39,8 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class BootstrapCheckTests extends ESTestCase { @@ -113,6 +116,11 @@ public class BootstrapCheckTests extends ESTestCase { public String errorMessage() { return "first"; } + + @Override + public boolean isSystemCheck() { + return false; + } }, new BootstrapCheck.Check() { @Override @@ -124,11 +132,16 @@ public class BootstrapCheckTests extends ESTestCase { public String errorMessage() { return "second"; } + + @Override + public boolean isSystemCheck() { + return false; + } } ); final RuntimeException e = - expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, checks, "testExceptionAggregation")); + expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, false, checks, "testExceptionAggregation")); assertThat(e, hasToString(allOf(containsString("bootstrap checks failed"), containsString("first"), containsString("second")))); final Throwable[] suppressed = e.getSuppressed(); assertThat(suppressed.length, equalTo(2)); @@ -159,7 +172,7 @@ public class BootstrapCheckTests extends ESTestCase { final RuntimeException e = expectThrows( RuntimeException.class, - () -> BootstrapCheck.check(true, Collections.singletonList(check), "testHeapSizeCheck")); + () -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testHeapSizeCheck")); assertThat( e.getMessage(), containsString("initial heap size [" + initialHeapSize.get() + "] " + @@ -167,7 +180,7 @@ public class BootstrapCheckTests extends ESTestCase { initialHeapSize.set(maxHeapSize.get()); - BootstrapCheck.check(true, Collections.singletonList(check), "testHeapSizeCheck"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testHeapSizeCheck"); // nothing should happen if the initial heap size or the max // heap size is not available @@ -176,7 +189,7 @@ public class BootstrapCheckTests extends ESTestCase { } else { maxHeapSize.set(0); } - BootstrapCheck.check(true, Collections.singletonList(check), "testHeapSizeCheck"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testHeapSizeCheck"); } public void testFileDescriptorLimits() { @@ -202,17 +215,17 @@ public class BootstrapCheckTests extends ESTestCase { final RuntimeException e = expectThrows(RuntimeException.class, - () -> BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimits")); + () -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimits")); assertThat(e.getMessage(), containsString("max file descriptors")); maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimits"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimits"); // nothing should happen if current file descriptor count is // not available maxFileDescriptorCount.set(-1); - BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimits"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimits"); } public void testFileDescriptorLimitsThrowsOnInvalidLimit() { @@ -255,13 +268,17 @@ public class BootstrapCheckTests extends ESTestCase { if (testCase.shouldFail) { final RuntimeException e = expectThrows( RuntimeException.class, - () -> BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit")); + () -> BootstrapCheck.check( + true, + false, + Collections.singletonList(check), + "testFileDescriptorLimitsThrowsOnInvalidLimit")); assertThat( e.getMessage(), containsString("memory locking requested for elasticsearch process but memory is not locked")); } else { // nothing should happen - BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit"); } } } @@ -278,17 +295,17 @@ public class BootstrapCheckTests extends ESTestCase { final RuntimeException e = expectThrows( RuntimeException.class, - () -> BootstrapCheck.check(true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck")); + () -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxNumberOfThreadsCheck")); assertThat(e.getMessage(), containsString("max number of threads")); maxNumberOfThreads.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapCheck.check(true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); // nothing should happen if current max number of threads is // not available maxNumberOfThreads.set(-1); - BootstrapCheck.check(true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); } public void testMaxSizeVirtualMemory() { @@ -309,17 +326,17 @@ public class BootstrapCheckTests extends ESTestCase { final RuntimeException e = expectThrows( RuntimeException.class, - () -> BootstrapCheck.check(true, Collections.singletonList(check), "testMaxSizeVirtualMemory")); + () -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxSizeVirtualMemory")); assertThat(e.getMessage(), containsString("max size virtual memory")); maxSizeVirtualMemory.set(rlimInfinity); - BootstrapCheck.check(true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxSizeVirtualMemory"); // nothing should happen if max size virtual memory is not // available maxSizeVirtualMemory.set(Long.MIN_VALUE); - BootstrapCheck.check(true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxSizeVirtualMemory"); } public void testMaxMapCountCheck() { @@ -334,17 +351,17 @@ public class BootstrapCheckTests extends ESTestCase { RuntimeException e = expectThrows( RuntimeException.class, - () -> BootstrapCheck.check(true, Collections.singletonList(check), "testMaxMapCountCheck")); + () -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxMapCountCheck")); assertThat(e.getMessage(), containsString("max virtual memory areas vm.max_map_count")); maxMapCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapCheck.check(true, Collections.singletonList(check), "testMaxMapCountCheck"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxMapCountCheck"); // nothing should happen if current vm.max_map_count is not // available maxMapCount.set(-1); - BootstrapCheck.check(true, Collections.singletonList(check), "testMaxMapCountCheck"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxMapCountCheck"); } public void testMinMasterNodes() { @@ -353,7 +370,42 @@ public class BootstrapCheckTests extends ESTestCase { assertThat(check.check(), not(equalTo(isSet))); List defaultChecks = BootstrapCheck.checks(Settings.EMPTY); - expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, defaultChecks, "testMinMasterNodes")); + expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, false, defaultChecks, "testMinMasterNodes")); + } + + public void testIgnoringSystemChecks() { + BootstrapCheck.Check check = new BootstrapCheck.Check() { + @Override + public boolean check() { + return true; + } + + @Override + public String errorMessage() { + return "error"; + } + + @Override + public boolean isSystemCheck() { + return true; + } + }; + + final RuntimeException notIgnored = expectThrows( + RuntimeException.class, + () -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testIgnoringSystemChecks")); + assertThat(notIgnored, hasToString(containsString("error"))); + + final ESLogger logger = mock(ESLogger.class); + + // nothing should happen if we ignore system checks + BootstrapCheck.check(true, true, Collections.singletonList(check), logger); + verify(logger).warn("error"); + reset(logger); + + // nothing should happen if we ignore all checks + BootstrapCheck.check(false, randomBoolean(), Collections.singletonList(check), logger); + verify(logger).warn("error"); } } From 8bdda49f907a45f9704377bc81dcd798ec1f48d2 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 2 May 2016 10:58:45 +0200 Subject: [PATCH 0038/1311] Fail automatic string upgrade if the value of `index` is not recognized. #18082 Closes #18062 --- .../index/mapper/core/StringFieldMapper.java | 6 ++++-- .../mapper/core/StringMappingUpgradeTests.java | 16 ++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 65d11b45475..0c98dd10c3e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -29,7 +29,6 @@ import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -204,6 +203,9 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) { final Object index = node.get("index"); + if (Arrays.asList(null, "no", "not_analyzed", "analyzed").contains(index) == false) { + throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]"); + } final boolean keyword = index != null && "analyzed".equals(index) == false; // Automatically upgrade simple mappings for ease of upgrade, otherwise fail @@ -283,7 +285,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc node.put("index", false); break; default: - throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]"); + throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]"); } } final Object fielddataObject = node.get("fielddata"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java index a8dd1b65fcf..41094fec58a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/StringMappingUpgradeTests.java @@ -103,6 +103,22 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase { assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); } + public void testIllegalIndexValue() throws IOException { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "string") + .field("index", false) + .endObject() + .endObject() .endObject().endObject().string(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertThat(e.getMessage(), + containsString("Can't parse [index] value [false] for field [field], expected [no], [not_analyzed] or [analyzed]")); + } + public void testNotSupportedUpgrade() throws IOException { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); From 226679c606098888e233309e9816aaeb0fbea1ef Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 2 May 2016 11:48:35 -0400 Subject: [PATCH 0039/1311] Mark MaxMapCountCheck#isSystemCheck as final This commit marks the method MaxMapCountCheck#isSystemCheck as final as this method should not be modifiable by inheriting classes (used in tests). --- .../main/java/org/elasticsearch/bootstrap/BootstrapCheck.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java index 25bdbe3fa8a..72f5a9a6bdc 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -473,7 +473,7 @@ final class BootstrapCheck { } @Override - public boolean isSystemCheck() { + public final boolean isSystemCheck() { return true; } From 15f33e61b74d4c65291f0ad157f89bd898f3ab96 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 2 May 2016 12:22:40 -0400 Subject: [PATCH 0040/1311] Kill redundant conditional in BootstrapCheck#check This commit removes an unnecessary if statement in Bootstrap#check. The removed if statement was duplicating the conditionals in the nested if statements and was merely an artifact of an earlier refactoring. --- .../bootstrap/BootstrapCheck.java | 27 +++++++++---------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java index 72f5a9a6bdc..626e274076c 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -114,22 +114,19 @@ final class BootstrapCheck { } } - if (!errors.isEmpty() || !ignoredErrors.isEmpty()) { - - if (!ignoredErrors.isEmpty()) { - ignoredErrors.forEach(error -> log(logger, error)); - } - - if (!errors.isEmpty()) { - final List messages = new ArrayList<>(1 + errors.size()); - messages.add("bootstrap checks failed"); - messages.addAll(errors); - final RuntimeException re = new RuntimeException(String.join("\n", messages)); - errors.stream().map(IllegalStateException::new).forEach(re::addSuppressed); - throw re; - } - + if (!ignoredErrors.isEmpty()) { + ignoredErrors.forEach(error -> log(logger, error)); } + + if (!errors.isEmpty()) { + final List messages = new ArrayList<>(1 + errors.size()); + messages.add("bootstrap checks failed"); + messages.addAll(errors); + final RuntimeException re = new RuntimeException(String.join("\n", messages)); + errors.stream().map(IllegalStateException::new).forEach(re::addSuppressed); + throw re; + } + } static void log(final ESLogger logger, final String error) { From 16c4c251f5d94a34957d735647fdd665b97f4406 Mon Sep 17 00:00:00 2001 From: Michael McCandless Date: Mon, 2 May 2016 14:40:08 -0400 Subject: [PATCH 0041/1311] Merge pull request #18094 from mikemccand/completion_stats_once Don't try to compute completion stats on a reader after we already closed it Conflicts: core/src/main/java/org/elasticsearch/index/shard/IndexShard.java --- .../main/java/org/elasticsearch/index/shard/IndexShard.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 8739c265701..ae6f1b6bb82 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -680,9 +680,6 @@ public class IndexShard extends AbstractIndexShardComponent { CompletionStats completionStats = new CompletionStats(); try (final Engine.Searcher currentSearcher = acquireSearcher("completion_stats")) { completionStats.add(CompletionFieldStats.completionStats(currentSearcher.reader(), fields)); - Completion090PostingsFormat postingsFormat = ((Completion090PostingsFormat) - PostingsFormat.forName(Completion090PostingsFormat.CODEC_NAME)); - completionStats.add(postingsFormat.completionStats(currentSearcher.reader(), fields)); } return completionStats; } From 0c6d6a5495d6a99866c5ea7acb99b821e93223e1 Mon Sep 17 00:00:00 2001 From: Mike McCandless Date: Mon, 2 May 2016 16:14:05 -0400 Subject: [PATCH 0042/1311] also compute completion stats for 2.x shards --- .../main/java/org/elasticsearch/index/shard/IndexShard.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ae6f1b6bb82..c1610e9bb1f 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -680,6 +680,10 @@ public class IndexShard extends AbstractIndexShardComponent { CompletionStats completionStats = new CompletionStats(); try (final Engine.Searcher currentSearcher = acquireSearcher("completion_stats")) { completionStats.add(CompletionFieldStats.completionStats(currentSearcher.reader(), fields)); + // Necessary for 2.x shards: + Completion090PostingsFormat postingsFormat = ((Completion090PostingsFormat) + PostingsFormat.forName(Completion090PostingsFormat.CODEC_NAME)); + completionStats.add(postingsFormat.completionStats(currentSearcher.reader(), fields)); } return completionStats; } From 693c1f667142f322e6faaf1abacee79c66660b37 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 2 May 2016 17:49:21 -0400 Subject: [PATCH 0043/1311] Support geo_point fields in lucene expressions. Closes #18096 --- .../modules/scripting/scripting.asciidoc | 91 +++++++++++--- .../expression/CountMethodFunctionValues.java | 44 ------- .../expression/CountMethodValueSource.java | 21 ++-- .../script/expression/DateField.java | 94 +++++++++++++++ .../expression/DateMethodFunctionValues.java | 47 -------- .../expression/DateMethodValueSource.java | 26 ++-- .../expression/EmptyMemberValueSource.java | 6 +- .../ExpressionScriptEngineService.java | 113 +++++------------- .../expression/FieldDataFunctionValues.java | 43 ------- .../expression/FieldDataValueSource.java | 27 +++-- .../expression/GeoEmptyValueSource.java | 81 +++++++++++++ .../script/expression/GeoField.java | 53 ++++++++ .../expression/GeoLatitudeValueSource.java | 81 +++++++++++++ .../expression/GeoLongitudeValueSource.java | 81 +++++++++++++ .../script/expression/NumericField.java | 75 ++++++++++++ .../ReplaceableConstFunctionValues.java | 4 +- .../ReplaceableConstValueSource.java | 4 +- .../expression/MoreExpressionTests.java | 44 ++++++- 18 files changed, 666 insertions(+), 269 deletions(-) delete mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java create mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java delete mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java delete mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java create mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java create mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoField.java create mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java create mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java create mode 100644 modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java diff --git a/docs/reference/modules/scripting/scripting.asciidoc b/docs/reference/modules/scripting/scripting.asciidoc index 5935554a368..b1f779b48a0 100644 --- a/docs/reference/modules/scripting/scripting.asciidoc +++ b/docs/reference/modules/scripting/scripting.asciidoc @@ -23,7 +23,7 @@ to specify the language of the script. Plugins are available for following langu |groovy |no |built-in |expression |yes |built-in |mustache |yes |built-in -/painless /yes /built-in (module) +|painless |yes |built-in (module) |javascript |no |{plugins}/lang-javascript.html[elasticsearch-lang-javascript] |python |no |{plugins}/lang-python.html[elasticsearch-lang-python] |======================================================================= @@ -455,41 +455,94 @@ for details on what operators and functions are available. Variables in `expression` scripts are available to access: -* document fields, e.g. `doc['myfield'].value` or just `doc['myfield']`. -* whether the field is empty, e.g. `doc['myfield'].empty` +* document fields, e.g. `doc['myfield'].value` +* variables and methods that the field supports, e.g. `doc['myfield'].empty` * Parameters passed into the script, e.g. `mymodifier` * The current document's score, `_score` (only available when used in a `script_score`) +[float] +=== Expressions API for numeric fields +[cols="<,<",options="header",] +|======================================================================= +|Expression |Description +|`doc['field_name'].value` |The native value of the field. For example, +if its a short type, it will be short. + +|`doc['field_name'].empty` |A boolean indicating if the field has no +values within the doc. + +|`doc['field_name'].min()` |The minimum value of the field in this document. + +|`doc['field_name'].max()` |The maximum value of the field in this document. + +|`doc['field_name'].median()` |The median value of the field in this document. + +|`doc['field_name'].avg()` |The average of the values in this document. + +|`doc['field_name'].sum()` |The sum of the values in this document. + +|`doc['field_name'].count()` |The number of values in this document. +|======================================================================= + When a document is missing the field completely, by default the value will be treated as `0`. You can treat it as another value instead, e.g. `doc['myfield'].empty ? 100 : doc['myfield'].value` When a document has multiple values for the field, by default the minimum value is returned. -You can choose a different value instead, e.g. `doc['myfield'].sum()`. The following methods are available -for any field: +You can choose a different value instead, e.g. `doc['myfield'].sum()`. -* min() -* max() -* avg() -* median() -* sum() -* count() +When a document is missing the field completely, by default the value will be treated as `0`. -Variables in `expression` scripts that are of type `date` may use the following member methods: +[float] +=== Additional methods for date fields +Date fields are treated as the number of milliseconds since January 1, 1970 and +support the numeric API above, with these additional methods: -* getYear() -* getMonth() -* getDayOfMonth() -* getHourOfDay() -* getMinutes() -* getSeconds() +[cols="<,<",options="header",] +|======================================================================= +|Expression |Description +|`doc['field_name'].getYear()` |Year component, e.g. `1970`. + +|`doc['field_name'].getMonth()` |Month component (0-11), e.g. `0` for January. + +|`doc['field_name'].getDayOfMonth()` |Day component, e.g. `1` for the first of the month. + +|`doc['field_name'].getHourOfDay()` |Hour component (0-23) + +|`doc['field_name'].getMinutes()` |Minutes component (0-59) + +|`doc['field_name'].getSeconds()` |Seconds component (0-59) +|======================================================================= The following example shows the difference in years between the `date` fields date0 and date1: `doc['date1'].getYear() - doc['date0'].getYear()` +[float] +=== Expressions API for `geo_point` fields +[cols="<,<",options="header",] +|======================================================================= +|Expression |Description +|`doc['field_name'].empty` |A boolean indicating if the field has no +values within the doc. + +|`doc['field_name'].lat` |The latitude of the geo point. + +|`doc['field_name'].lon` |The longitude of the geo point. +|======================================================================= + +The following example computes distance in kilometers from Washington, DC: + +`haversin(38.9072, 77.0369, doc['field_name'].lat, doc['field_name'].lon)` + +In this example the coordinates could have been passed as parameters to the script, +e.g. based on geolocation of the user. + +[float] +=== Expressions limitations + There are a few limitations relative to other script languages: -* Only numeric fields may be accessed +* Only numeric, date, and geo_point fields may be accessed * Stored fields are not available [float] diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java deleted file mode 100644 index 818404e98e2..00000000000 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.expression; - -import org.apache.lucene.queries.function.ValueSource; -import org.apache.lucene.queries.function.docvalues.DoubleDocValues; -import org.elasticsearch.index.fielddata.AtomicNumericFieldData; -import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; - -/** - * FunctionValues to get the count of the number of values in a field for a document. - */ -public class CountMethodFunctionValues extends DoubleDocValues { - SortedNumericDoubleValues values; - - CountMethodFunctionValues(ValueSource parent, AtomicNumericFieldData fieldData) { - super(parent); - - values = fieldData.getDoubleValues(); - } - - @Override - public double doubleVal(int doc) { - values.setDocument(doc); - return values.count(); - } -} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java index 6f397c02bd3..1aa85ef685b 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java @@ -26,17 +26,18 @@ import java.util.Objects; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; -import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; /** * A ValueSource to create FunctionValues to get the count of the number of values in a field for a document. */ -public class CountMethodValueSource extends ValueSource { - protected IndexFieldData fieldData; +final class CountMethodValueSource extends ValueSource { + IndexFieldData fieldData; - protected CountMethodValueSource(IndexFieldData fieldData) { + CountMethodValueSource(IndexFieldData fieldData) { Objects.requireNonNull(fieldData); this.fieldData = fieldData; @@ -45,10 +46,16 @@ public class CountMethodValueSource extends ValueSource { @Override @SuppressWarnings("rawtypes") // ValueSource uses a rawtype public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { - AtomicFieldData leafData = fieldData.load(leaf); - assert(leafData instanceof AtomicNumericFieldData); + AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf); + final SortedNumericDoubleValues values = leafData.getDoubleValues(); - return new CountMethodFunctionValues(this, (AtomicNumericFieldData)leafData); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) { + values.setDocument(doc); + return values.count(); + } + }; } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java new file mode 100644 index 00000000000..e4648887772 --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateField.java @@ -0,0 +1,94 @@ +package org.elasticsearch.script.expression; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.util.Calendar; + +import org.apache.lucene.queries.function.ValueSource; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.MultiValueMode; + +/** + * Expressions API for date fields. + */ +final class DateField { + // no instance + private DateField() {} + + // supported variables + static final String VALUE_VARIABLE = "value"; + static final String EMPTY_VARIABLE = "empty"; + + // supported methods + static final String MINIMUM_METHOD = "min"; + static final String MAXIMUM_METHOD = "max"; + static final String AVERAGE_METHOD = "avg"; + static final String MEDIAN_METHOD = "median"; + static final String SUM_METHOD = "sum"; + static final String COUNT_METHOD = "count"; + static final String GET_YEAR_METHOD = "getYear"; + static final String GET_MONTH_METHOD = "getMonth"; + static final String GET_DAY_OF_MONTH_METHOD = "getDayOfMonth"; + static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay"; + static final String GET_MINUTES_METHOD = "getMinutes"; + static final String GET_SECONDS_METHOD = "getSeconds"; + + static ValueSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { + switch (variable) { + case VALUE_VARIABLE: + return new FieldDataValueSource(fieldData, MultiValueMode.MIN); + case EMPTY_VARIABLE: + return new EmptyMemberValueSource(fieldData); + default: + throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for date field [" + fieldName + "]."); + } + } + + static ValueSource getMethod(IndexFieldData fieldData, String fieldName, String method) { + switch (method) { + case MINIMUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MIN); + case MAXIMUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MAX); + case AVERAGE_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.AVG); + case MEDIAN_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN); + case SUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.SUM); + case COUNT_METHOD: + return new CountMethodValueSource(fieldData); + case GET_YEAR_METHOD: + return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.YEAR); + case GET_MONTH_METHOD: + return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MONTH); + case GET_DAY_OF_MONTH_METHOD: + return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.DAY_OF_MONTH); + case GET_HOUR_OF_DAY_METHOD: + return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.HOUR_OF_DAY); + case GET_MINUTES_METHOD: + return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MINUTE); + case GET_SECONDS_METHOD: + return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.SECOND); + default: + throw new IllegalArgumentException("Member method [" + method + "] does not exist for date field [" + fieldName + "]."); + } + } +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java deleted file mode 100644 index 3ed2ed1f0b5..00000000000 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.expression; - -import org.apache.lucene.queries.function.ValueSource; -import org.elasticsearch.index.fielddata.AtomicNumericFieldData; -import org.elasticsearch.search.MultiValueMode; - -import java.util.Calendar; -import java.util.Locale; -import java.util.TimeZone; - -class DateMethodFunctionValues extends FieldDataFunctionValues { - private final int calendarType; - private final Calendar calendar; - - DateMethodFunctionValues(ValueSource parent, MultiValueMode multiValueMode, AtomicNumericFieldData data, int calendarType) { - super(parent, multiValueMode, data); - - this.calendarType = calendarType; - calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); - } - - @Override - public double doubleVal(int docId) { - long millis = (long)dataAccessor.get(docId); - calendar.setTimeInMillis(millis); - return calendar.get(calendarType); - } -} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java index e6c9dcddc78..98909f4401a 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java @@ -20,20 +20,25 @@ package org.elasticsearch.script.expression; import java.io.IOException; +import java.util.Calendar; +import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.TimeZone; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; -import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.search.MultiValueMode; +/** Extracts a portion of a date field with {@code Calendar.get()} */ class DateMethodValueSource extends FieldDataValueSource { - protected final String methodName; - protected final int calendarType; + final String methodName; + final int calendarType; DateMethodValueSource(IndexFieldData indexFieldData, MultiValueMode multiValueMode, String methodName, int calendarType) { super(indexFieldData, multiValueMode); @@ -47,10 +52,17 @@ class DateMethodValueSource extends FieldDataValueSource { @Override @SuppressWarnings("rawtypes") // ValueSource uses a rawtype public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { - AtomicFieldData leafData = fieldData.load(leaf); - assert(leafData instanceof AtomicNumericFieldData); - - return new DateMethodFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData, calendarType); + AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf); + final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); + NumericDoubleValues docValues = multiValueMode.select(leafData.getDoubleValues(), 0d); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int docId) { + long millis = (long)docValues.get(docId); + calendar.setTimeInMillis(millis); + return calendar.get(calendarType); + } + }; } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java index b8c101e8abc..b4c8582e0d6 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java @@ -36,10 +36,10 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; *

* This is essentially sugar over !count() */ -public class EmptyMemberValueSource extends ValueSource { - protected IndexFieldData fieldData; +final class EmptyMemberValueSource extends ValueSource { + final IndexFieldData fieldData; - protected EmptyMemberValueSource(IndexFieldData fieldData) { + EmptyMemberValueSource(IndexFieldData fieldData) { this.fieldData = Objects.requireNonNull(fieldData); } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index 2bfd2928d57..a6ed6253263 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -37,20 +37,19 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; +import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; -import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.lookup.SearchLookup; import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; import java.text.ParseException; -import java.util.Calendar; import java.util.Collections; import java.util.List; import java.util.Map; @@ -65,26 +64,6 @@ public class ExpressionScriptEngineService extends AbstractComponent implements public static final List TYPES = Collections.singletonList(NAME); - // these methods only work on dates, e.g. doc['datefield'].getYear() - protected static final String GET_YEAR_METHOD = "getYear"; - protected static final String GET_MONTH_METHOD = "getMonth"; - protected static final String GET_DAY_OF_MONTH_METHOD = "getDayOfMonth"; - protected static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay"; - protected static final String GET_MINUTES_METHOD = "getMinutes"; - protected static final String GET_SECONDS_METHOD = "getSeconds"; - - // these methods work on any field, e.g. doc['field'].sum() - protected static final String MINIMUM_METHOD = "min"; - protected static final String MAXIMUM_METHOD = "max"; - protected static final String AVERAGE_METHOD = "avg"; - protected static final String MEDIAN_METHOD = "median"; - protected static final String SUM_METHOD = "sum"; - protected static final String COUNT_METHOD = "count"; - - // these variables work on any field, e.g. doc['field'].value - protected static final String VALUE_VARIABLE = "value"; - protected static final String EMPTY_VARIABLE = "empty"; - @Inject public ExpressionScriptEngineService(Settings settings) { super(settings); @@ -175,7 +154,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements } else { String fieldname = null; String methodname = null; - String variablename = VALUE_VARIABLE; // .value is the default for doc['field'], its optional. + String variablename = "value"; // .value is the default for doc['field'], its optional. VariableContext[] parts = VariableContext.parse(variable); if (parts[0].text.equals("doc") == false) { throw new ScriptException("Unknown variable [" + parts[0].text + "] in expression"); @@ -205,15 +184,38 @@ public class ExpressionScriptEngineService extends AbstractComponent implements } IndexFieldData fieldData = lookup.doc().fieldDataService().getForField(fieldType); - if (fieldData instanceof IndexNumericFieldData == false) { - // TODO: more context (which expression?) - throw new ScriptException("Field [" + fieldname + "] used in expression must be numeric"); - } - if (methodname == null) { - bindings.add(variable, getVariableValueSource(fieldType, fieldData, fieldname, variablename)); + + // delegate valuesource creation based on field's type + // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods. + + final ValueSource valueSource; + if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) { + // geo + if (methodname == null) { + valueSource = GeoField.getVariable(fieldData, fieldname, variablename); + } else { + valueSource = GeoField.getMethod(fieldData, fieldname, methodname); + } + } else if (fieldType instanceof LegacyDateFieldMapper.DateFieldType || + fieldType instanceof DateFieldMapper.DateFieldType) { + // date + if (methodname == null) { + valueSource = DateField.getVariable(fieldData, fieldname, variablename); + } else { + valueSource = DateField.getMethod(fieldData, fieldname, methodname); + } + } else if (fieldData instanceof IndexNumericFieldData) { + // number + if (methodname == null) { + valueSource = NumericField.getVariable(fieldData, fieldname, variablename); + } else { + valueSource = NumericField.getMethod(fieldData, fieldname, methodname); + } } else { - bindings.add(variable, getMethodValueSource(fieldType, fieldData, fieldname, methodname)); + throw new ScriptException("Field [" + fieldname + "] used in expression must be numeric, date, or geopoint"); } + + bindings.add(variable, valueSource); } } @@ -224,57 +226,6 @@ public class ExpressionScriptEngineService extends AbstractComponent implements } } - protected ValueSource getMethodValueSource(MappedFieldType fieldType, IndexFieldData fieldData, String fieldName, String methodName) { - switch (methodName) { - case GET_YEAR_METHOD: - return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.YEAR); - case GET_MONTH_METHOD: - return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MONTH); - case GET_DAY_OF_MONTH_METHOD: - return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.DAY_OF_MONTH); - case GET_HOUR_OF_DAY_METHOD: - return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.HOUR_OF_DAY); - case GET_MINUTES_METHOD: - return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MINUTE); - case GET_SECONDS_METHOD: - return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.SECOND); - case MINIMUM_METHOD: - return new FieldDataValueSource(fieldData, MultiValueMode.MIN); - case MAXIMUM_METHOD: - return new FieldDataValueSource(fieldData, MultiValueMode.MAX); - case AVERAGE_METHOD: - return new FieldDataValueSource(fieldData, MultiValueMode.AVG); - case MEDIAN_METHOD: - return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN); - case SUM_METHOD: - return new FieldDataValueSource(fieldData, MultiValueMode.SUM); - case COUNT_METHOD: - return new CountMethodValueSource(fieldData); - default: - throw new IllegalArgumentException("Member method [" + methodName + "] does not exist."); - } - } - - protected ValueSource getVariableValueSource(MappedFieldType fieldType, IndexFieldData fieldData, String fieldName, String memberName) { - switch (memberName) { - case VALUE_VARIABLE: - return new FieldDataValueSource(fieldData, MultiValueMode.MIN); - case EMPTY_VARIABLE: - return new EmptyMemberValueSource(fieldData); - default: - throw new IllegalArgumentException("Member variable [" + memberName + "] does not exist."); - } - } - - protected ValueSource getDateMethodValueSource(MappedFieldType fieldType, IndexFieldData fieldData, String fieldName, String methodName, int calendarType) { - if (fieldType instanceof LegacyDateFieldMapper.DateFieldType == false - && fieldType instanceof DateFieldMapper.DateFieldType == false) { - throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "]."); - } - - return new DateMethodValueSource(fieldData, MultiValueMode.MIN, methodName, calendarType); - } - @Override public ExecutableScript executable(CompiledScript compiledScript, Map vars) { return new ExpressionExecutableScript(compiledScript, vars); diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java deleted file mode 100644 index b3e06d6b9f2..00000000000 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.expression; - -import org.apache.lucene.queries.function.ValueSource; -import org.apache.lucene.queries.function.docvalues.DoubleDocValues; -import org.elasticsearch.index.fielddata.AtomicNumericFieldData; -import org.elasticsearch.index.fielddata.NumericDoubleValues; -import org.elasticsearch.search.MultiValueMode; - -/** - * A {@link org.apache.lucene.queries.function.FunctionValues} which wrap field data. - */ -class FieldDataFunctionValues extends DoubleDocValues { - NumericDoubleValues dataAccessor; - - FieldDataFunctionValues(ValueSource parent, MultiValueMode m, AtomicNumericFieldData d) { - super(parent); - dataAccessor = m.select(d.getDoubleValues(), 0d); - } - - @Override - public double doubleVal(int i) { - return dataAccessor.get(i); - } -} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java index ae84a5cbfe7..3ac885e49dc 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java @@ -26,9 +26,10 @@ import java.util.Objects; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; -import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.search.MultiValueMode; /** @@ -36,15 +37,12 @@ import org.elasticsearch.search.MultiValueMode; */ class FieldDataValueSource extends ValueSource { - protected IndexFieldData fieldData; - protected MultiValueMode multiValueMode; + final IndexFieldData fieldData; + final MultiValueMode multiValueMode; - protected FieldDataValueSource(IndexFieldData d, MultiValueMode m) { - Objects.requireNonNull(d); - Objects.requireNonNull(m); - - fieldData = d; - multiValueMode = m; + protected FieldDataValueSource(IndexFieldData fieldData, MultiValueMode multiValueMode) { + this.fieldData = Objects.requireNonNull(fieldData); + this.multiValueMode = Objects.requireNonNull(multiValueMode); } @Override @@ -69,9 +67,14 @@ class FieldDataValueSource extends ValueSource { @Override @SuppressWarnings("rawtypes") // ValueSource uses a rawtype public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { - AtomicFieldData leafData = fieldData.load(leaf); - assert(leafData instanceof AtomicNumericFieldData); - return new FieldDataFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData); + AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf); + NumericDoubleValues docValues = multiValueMode.select(leafData.getDoubleValues(), 0d); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) { + return docValues.get(doc); + } + }; } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java new file mode 100644 index 00000000000..2a9b09ba10d --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.queries.function.FunctionValues; +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; +import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.MultiGeoPointValues; + +/** + * ValueSource to return non-zero if a field is missing. + */ +final class GeoEmptyValueSource extends ValueSource { + IndexFieldData fieldData; + + GeoEmptyValueSource(IndexFieldData fieldData) { + this.fieldData = Objects.requireNonNull(fieldData); + } + + @Override + @SuppressWarnings("rawtypes") // ValueSource uses a rawtype + public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { + AtomicGeoPointFieldData leafData = (AtomicGeoPointFieldData) fieldData.load(leaf); + final MultiGeoPointValues values = leafData.getGeoPointValues(); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) { + values.setDocument(doc); + if (values.count() == 0) { + return 1; + } else { + return 0; + } + } + }; + } + + @Override + public int hashCode() { + return 31 * getClass().hashCode() + fieldData.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + GeoEmptyValueSource other = (GeoEmptyValueSource) obj; + if (!fieldData.equals(other.fieldData)) return false; + return true; + } + + @Override + public String description() { + return "empty: field(" + fieldData.getFieldName() + ")"; + } +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoField.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoField.java new file mode 100644 index 00000000000..e830813e3a3 --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoField.java @@ -0,0 +1,53 @@ +package org.elasticsearch.script.expression; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.lucene.queries.function.ValueSource; +import org.elasticsearch.index.fielddata.IndexFieldData; + +/** + * Expressions API for geo_point fields. + */ +final class GeoField { + // no instance + private GeoField() {} + + // supported variables + static final String EMPTY_VARIABLE = "empty"; + static final String LAT_VARIABLE = "lat"; + static final String LON_VARIABLE = "lon"; + + static ValueSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { + switch (variable) { + case EMPTY_VARIABLE: + return new GeoEmptyValueSource(fieldData); + case LAT_VARIABLE: + return new GeoLatitudeValueSource(fieldData); + case LON_VARIABLE: + return new GeoLongitudeValueSource(fieldData); + default: + throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for geo field [" + fieldName + "]."); + } + } + + static ValueSource getMethod(IndexFieldData fieldData, String fieldName, String method) { + throw new IllegalArgumentException("Member method [" + method + "] does not exist for geo field [" + fieldName + "]."); + } +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java new file mode 100644 index 00000000000..d23eceda2fe --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.queries.function.FunctionValues; +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; +import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.MultiGeoPointValues; + +/** + * ValueSource to return latitudes as a double "stream" for geopoint fields + */ +final class GeoLatitudeValueSource extends ValueSource { + final IndexFieldData fieldData; + + GeoLatitudeValueSource(IndexFieldData fieldData) { + this.fieldData = Objects.requireNonNull(fieldData); + } + + @Override + @SuppressWarnings("rawtypes") // ValueSource uses a rawtype + public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { + AtomicGeoPointFieldData leafData = (AtomicGeoPointFieldData) fieldData.load(leaf); + final MultiGeoPointValues values = leafData.getGeoPointValues(); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) { + values.setDocument(doc); + if (values.count() == 0) { + return 0.0; + } else { + return values.valueAt(0).getLat(); + } + } + }; + } + + @Override + public int hashCode() { + return 31 * getClass().hashCode() + fieldData.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + GeoLatitudeValueSource other = (GeoLatitudeValueSource) obj; + if (!fieldData.equals(other.fieldData)) return false; + return true; + } + + @Override + public String description() { + return "lat: field(" + fieldData.getFieldName() + ")"; + } +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java new file mode 100644 index 00000000000..4fbc8fd936c --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.queries.function.FunctionValues; +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; +import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.MultiGeoPointValues; + +/** + * ValueSource to return longitudes as a double "stream" for geopoint fields + */ +final class GeoLongitudeValueSource extends ValueSource { + final IndexFieldData fieldData; + + GeoLongitudeValueSource(IndexFieldData fieldData) { + this.fieldData = Objects.requireNonNull(fieldData); + } + + @Override + @SuppressWarnings("rawtypes") // ValueSource uses a rawtype + public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { + AtomicGeoPointFieldData leafData = (AtomicGeoPointFieldData) fieldData.load(leaf); + final MultiGeoPointValues values = leafData.getGeoPointValues(); + return new DoubleDocValues(this) { + @Override + public double doubleVal(int doc) { + values.setDocument(doc); + if (values.count() == 0) { + return 0.0; + } else { + return values.valueAt(0).getLon(); + } + } + }; + } + + @Override + public int hashCode() { + return 31 * getClass().hashCode() + fieldData.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + GeoLongitudeValueSource other = (GeoLongitudeValueSource) obj; + if (!fieldData.equals(other.fieldData)) return false; + return true; + } + + @Override + public String description() { + return "lon: field(" + fieldData.getFieldName() + ")"; + } +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java new file mode 100644 index 00000000000..4147d0e97d9 --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/NumericField.java @@ -0,0 +1,75 @@ +package org.elasticsearch.script.expression; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.lucene.queries.function.ValueSource; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.MultiValueMode; + +/** + * Expressions API for numeric fields. + */ +final class NumericField { + // no instance + private NumericField() {} + + // supported variables + static final String VALUE_VARIABLE = "value"; + static final String EMPTY_VARIABLE = "empty"; + + // supported methods + static final String MINIMUM_METHOD = "min"; + static final String MAXIMUM_METHOD = "max"; + static final String AVERAGE_METHOD = "avg"; + static final String MEDIAN_METHOD = "median"; + static final String SUM_METHOD = "sum"; + static final String COUNT_METHOD = "count"; + + static ValueSource getVariable(IndexFieldData fieldData, String fieldName, String variable) { + switch (variable) { + case VALUE_VARIABLE: + return new FieldDataValueSource(fieldData, MultiValueMode.MIN); + case EMPTY_VARIABLE: + return new EmptyMemberValueSource(fieldData); + default: + throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for " + + "numeric field [" + fieldName + "]."); + } + } + + static ValueSource getMethod(IndexFieldData fieldData, String fieldName, String method) { + switch (method) { + case MINIMUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MIN); + case MAXIMUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MAX); + case AVERAGE_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.AVG); + case MEDIAN_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN); + case SUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.SUM); + case COUNT_METHOD: + return new CountMethodValueSource(fieldData); + default: + throw new IllegalArgumentException("Member method [" + method + "] does not exist for numeric field [" + fieldName + "]."); + } + } +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java index b3ebfab66ed..eab9131bf10 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstFunctionValues.java @@ -25,10 +25,10 @@ import org.apache.lucene.queries.function.FunctionValues; * A support class for an executable expression script that allows the double returned * by a {@link FunctionValues} to be modified. */ -public class ReplaceableConstFunctionValues extends FunctionValues { +final class ReplaceableConstFunctionValues extends FunctionValues { private double value = 0; - public void setValue(double value) { + void setValue(double value) { this.value = value; } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java index bb05ef2325d..ae0f862ca70 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ReplaceableConstValueSource.java @@ -29,10 +29,10 @@ import org.apache.lucene.queries.function.ValueSource; /** * A {@link ValueSource} which has a stub {@link FunctionValues} that holds a dynamically replaceable constant double. */ -class ReplaceableConstValueSource extends ValueSource { +final class ReplaceableConstValueSource extends ValueSource { final ReplaceableConstFunctionValues fv; - public ReplaceableConstValueSource() { + ReplaceableConstValueSource() { fv = new ReplaceableConstFunctionValues(); } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 50a9900e3d9..dbbddfc1424 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -27,12 +27,17 @@ import java.util.Map; import org.apache.lucene.expressions.Expression; import org.apache.lucene.expressions.js.JavascriptCompiler; +import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.update.UpdateRequestBuilder; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; @@ -51,8 +56,10 @@ import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketScript; @@ -257,8 +264,8 @@ public class MoreExpressionTests extends ESIntegTestCase { } catch (SearchPhaseExecutionException e) { assertThat(e.toString() + "should have contained IllegalArgumentException", e.toString().contains("IllegalArgumentException"), equalTo(true)); - assertThat(e.toString() + "should have contained can only be used with a date field type", - e.toString().contains("can only be used with a date field type"), equalTo(true)); + assertThat(e.toString() + "should have contained does not exist for numeric field", + e.toString().contains("does not exist for numeric field"), equalTo(true)); } } @@ -586,4 +593,37 @@ public class MoreExpressionTests extends ESIntegTestCase { } } } + + public void testGeo() throws Exception { + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location").field("type", "geo_point"); + xContentBuilder.endObject().endObject().endObject().endObject(); + assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); + ensureGreen(); + client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() + .field("name", "test") + .startObject("location").field("lat", 61.5240).field("lon", 105.3188).endObject() + .endObject()).execute().actionGet(); + refresh(); + // access .lat + SearchResponse rsp = buildRequest("doc['location'].lat").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + // access .lon + rsp = buildRequest("doc['location'].lon").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + // access .empty + rsp = buildRequest("doc['location'].empty ? 1 : 0").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + // call haversin + rsp = buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); + } } From fff82db6816cd9b9dea918ce6c230881a4c2b51b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 2 May 2016 18:13:03 -0400 Subject: [PATCH 0044/1311] Add tests/doc for boolean fields with expressions --- .../modules/scripting/scripting.asciidoc | 5 +- .../expression/MoreExpressionTests.java | 96 +++++++++++++------ 2 files changed, 69 insertions(+), 32 deletions(-) diff --git a/docs/reference/modules/scripting/scripting.asciidoc b/docs/reference/modules/scripting/scripting.asciidoc index b1f779b48a0..fad96978be9 100644 --- a/docs/reference/modules/scripting/scripting.asciidoc +++ b/docs/reference/modules/scripting/scripting.asciidoc @@ -492,6 +492,9 @@ You can choose a different value instead, e.g. `doc['myfield'].sum()`. When a document is missing the field completely, by default the value will be treated as `0`. +Boolean fields are exposed as numerics, with `true` mapped to `1` and `false` mapped to `0`. +For example: `doc['on_sale'] ? doc['price'] * 0.5 : doc['price']` + [float] === Additional methods for date fields Date fields are treated as the number of milliseconds since January 1, 1970 and @@ -542,7 +545,7 @@ e.g. based on geolocation of the user. There are a few limitations relative to other script languages: -* Only numeric, date, and geo_point fields may be accessed +* Only numeric, boolean, date, and geo_point fields may be accessed * Stored fields are not available [float] diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index dbbddfc1424..f023ec9f38a 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -595,35 +595,69 @@ public class MoreExpressionTests extends ESIntegTestCase { } public void testGeo() throws Exception { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location").field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); - ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() - .field("name", "test") - .startObject("location").field("lat", 61.5240).field("lon", 105.3188).endObject() - .endObject()).execute().actionGet(); - refresh(); - // access .lat - SearchResponse rsp = buildRequest("doc['location'].lat").get(); - assertSearchResponse(rsp); - assertEquals(1, rsp.getHits().getTotalHits()); - assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); - // access .lon - rsp = buildRequest("doc['location'].lon").get(); - assertSearchResponse(rsp); - assertEquals(1, rsp.getHits().getTotalHits()); - assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); - // access .empty - rsp = buildRequest("doc['location'].empty ? 1 : 0").get(); - assertSearchResponse(rsp); - assertEquals(1, rsp.getHits().getTotalHits()); - assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); - // call haversin - rsp = buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)").get(); - assertSearchResponse(rsp); - assertEquals(1, rsp.getHits().getTotalHits()); - assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); - } + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location").field("type", "geo_point"); + xContentBuilder.endObject().endObject().endObject().endObject(); + assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); + ensureGreen(); + client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() + .field("name", "test") + .startObject("location").field("lat", 61.5240).field("lon", 105.3188).endObject() + .endObject()).execute().actionGet(); + refresh(); + // access .lat + SearchResponse rsp = buildRequest("doc['location'].lat").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + // access .lon + rsp = buildRequest("doc['location'].lon").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + // access .empty + rsp = buildRequest("doc['location'].empty ? 1 : 0").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + // call haversin + rsp = buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)").get(); + assertSearchResponse(rsp); + assertEquals(1, rsp.getHits().getTotalHits()); + assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); + } + + public void testBoolean() throws Exception { + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("vip").field("type", "boolean"); + xContentBuilder.endObject().endObject().endObject().endObject(); + assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); + ensureGreen(); + indexRandom(true, + client().prepareIndex("test", "doc", "1").setSource("price", 1.0, "vip", true), + client().prepareIndex("test", "doc", "2").setSource("price", 2.0, "vip", false), + client().prepareIndex("test", "doc", "3").setSource("price", 2.0, "vip", false)); + // access .value + SearchResponse rsp = buildRequest("doc['vip'].value").get(); + assertSearchResponse(rsp); + assertEquals(3, rsp.getHits().getTotalHits()); + assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); + assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); + // access .empty + rsp = buildRequest("doc['vip'].empty ? 1 : 0").get(); + assertSearchResponse(rsp); + assertEquals(3, rsp.getHits().getTotalHits()); + assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); + assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); + // ternary operator + // vip's have a 50% discount + rsp = buildRequest("doc['vip'] ? doc['price']/2 : doc['price']").get(); + assertSearchResponse(rsp); + assertEquals(3, rsp.getHits().getTotalHits()); + assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); + assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); + assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); + } } From 0a6f40c7f5f3af4e04a8875216e059025e1bbdf3 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 3 May 2016 08:53:15 +0200 Subject: [PATCH 0045/1311] Enable HTTP compression by default with compression level 3 With this commit we compress HTTP responses provided the client supports it (as indicated by the HTTP header 'Accept-Encoding'). We're also able to process compressed HTTP requests if needed. The default compression level is lowered from 6 to 3 as benchmarks have indicated that this reduces query latency with a negligible increase in network traffic. Closes #7309 --- .../http/HttpTransportSettings.java | 6 +- .../http/netty/ESHttpContentDecompressor.java | 51 ------ .../http/netty/NettyHttpServerTransport.java | 9 +- .../http/netty/NettyHttpCompressionIT.java | 146 ++++++++++++++++++ docs/reference/migration/migrate_5_0.asciidoc | 3 + .../migration/migrate_5_0/http.asciidoc | 9 ++ docs/reference/modules/http.asciidoc | 6 +- .../elasticsearch/test/ESIntegTestCase.java | 8 +- 8 files changed, 177 insertions(+), 61 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/http/netty/ESHttpContentDecompressor.java create mode 100644 core/src/test/java/org/elasticsearch/http/netty/NettyHttpCompressionIT.java create mode 100644 docs/reference/migration/migrate_5_0/http.asciidoc diff --git a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 403c543041d..72f8f380df8 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -50,9 +50,11 @@ public final class HttpTransportSettings { public static final Setting SETTING_PIPELINING_MAX_EVENTS = Setting.intSetting("http.pipelining.max_events", 10000, Property.NodeScope); public static final Setting SETTING_HTTP_COMPRESSION = - Setting.boolSetting("http.compression", false, Property.NodeScope); + Setting.boolSetting("http.compression", true, Property.NodeScope); + // we intentionally use a different compression level as Netty here as our benchmarks have shown that a compression level of 3 is the + // best compromise between reduction in network traffic and added latency. For more details please check #7309. public static final Setting SETTING_HTTP_COMPRESSION_LEVEL = - Setting.intSetting("http.compression_level", 6, Property.NodeScope); + Setting.intSetting("http.compression_level", 3, Property.NodeScope); public static final Setting> SETTING_HTTP_HOST = listSetting("http.host", emptyList(), Function.identity(), Property.NodeScope); public static final Setting> SETTING_HTTP_PUBLISH_HOST = diff --git a/core/src/main/java/org/elasticsearch/http/netty/ESHttpContentDecompressor.java b/core/src/main/java/org/elasticsearch/http/netty/ESHttpContentDecompressor.java deleted file mode 100644 index 0d1e46cf06b..00000000000 --- a/core/src/main/java/org/elasticsearch/http/netty/ESHttpContentDecompressor.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.http.netty; - -import org.elasticsearch.transport.TransportException; -import org.jboss.netty.buffer.ChannelBuffer; -import org.jboss.netty.handler.codec.embedder.DecoderEmbedder; -import org.jboss.netty.handler.codec.http.HttpContentDecompressor; -import org.jboss.netty.handler.codec.http.HttpHeaders; - -public class ESHttpContentDecompressor extends HttpContentDecompressor { - private final boolean compression; - - public ESHttpContentDecompressor(boolean compression) { - super(); - this.compression = compression; - } - - @Override - protected DecoderEmbedder newContentDecoder(String contentEncoding) throws Exception { - if (compression) { - // compression is enabled so handle the request according to the headers (compressed and uncompressed) - return super.newContentDecoder(contentEncoding); - } else { - // if compression is disabled only allow "identity" (uncompressed) requests - if (HttpHeaders.Values.IDENTITY.equals(contentEncoding)) { - // nothing to handle here - return null; - } else { - throw new TransportException("Support for compressed content is disabled. You can enable it with http.compression=true"); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index e90a3710889..22852b7c0ff 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -70,6 +70,7 @@ import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory; import org.jboss.netty.handler.codec.http.HttpChunkAggregator; import org.jboss.netty.handler.codec.http.HttpContentCompressor; +import org.jboss.netty.handler.codec.http.HttpContentDecompressor; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequestDecoder; import org.jboss.netty.handler.timeout.ReadTimeoutException; @@ -544,19 +545,19 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent 1) { + throw new AssertionError("Expected none or one content encoding header but got " + headers.length + " headers."); + } + } + + public boolean hasContentEncodingHeader() { + return contentEncodingHeader != null; + } + + public Header getContentEncodingHeader() { + return contentEncodingHeader; + } + } + + +} diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index 5f2d30e290e..e2dae648485 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -36,6 +36,7 @@ way to do this is to upgrade to Elasticsearch 2.3 or later and to use the * <> * <> * <> +* <> * <> * <> * <> @@ -60,6 +61,8 @@ include::migrate_5_0/settings.asciidoc[] include::migrate_5_0/allocation.asciidoc[] +include::migrate_5_0/http.asciidoc[] + include::migrate_5_0/rest.asciidoc[] include::migrate_5_0/cat.asciidoc[] diff --git a/docs/reference/migration/migrate_5_0/http.asciidoc b/docs/reference/migration/migrate_5_0/http.asciidoc new file mode 100644 index 00000000000..68a3f2841f0 --- /dev/null +++ b/docs/reference/migration/migrate_5_0/http.asciidoc @@ -0,0 +1,9 @@ +[[breaking_50_http_changes]] +=== HTTP changes + +==== Compressed HTTP requests are always accepted + +Before 5.0, Elasticsearch accepted compressed HTTP requests only if the setting + `http.compressed` was set to `true`. Elasticsearch accepts compressed requests + now but will continue to send compressed responses only if `http.compressed` + is set to `true`. \ No newline at end of file diff --git a/docs/reference/modules/http.asciidoc b/docs/reference/modules/http.asciidoc index cf3780e53fd..24f11f0c378 100644 --- a/docs/reference/modules/http.asciidoc +++ b/docs/reference/modules/http.asciidoc @@ -48,10 +48,10 @@ to `4kb` |`http.compression` |Support for compression when possible (with -Accept-Encoding). Defaults to `false`. +Accept-Encoding). Defaults to `true`. -|`http.compression_level` |Defines the compression level to use. -Defaults to `6`. +|`http.compression_level` |Defines the compression level to use for HTTP responses. Valid values are in the range of 1 (minimum compression) +and 9 (maximum compression). Defaults to `3`. |`http.cors.enabled` |Enable or disable cross-origin resource sharing, i.e. whether a browser on another origin can do requests to diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index d347f001e79..a31ef76272e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -23,6 +23,7 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; @@ -2040,15 +2041,20 @@ public abstract class ESIntegTestCase extends ESTestCase { } protected HttpRequestBuilder httpClient() { + return httpClient(HttpClients.createDefault()); + } + + protected HttpRequestBuilder httpClient(CloseableHttpClient httpClient) { final NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); final NodeInfo[] nodes = nodeInfos.getNodes(); assertTrue(nodes.length > 0); TransportAddress publishAddress = randomFrom(nodes).getHttp().address().publishAddress(); assertEquals(1, publishAddress.uniqueAddressTypeId()); InetSocketAddress address = ((InetSocketTransportAddress) publishAddress).address(); - return new HttpRequestBuilder(HttpClients.createDefault()).host(NetworkAddress.format(address.getAddress())).port(address.getPort()); + return new HttpRequestBuilder(httpClient).host(NetworkAddress.format(address.getAddress())).port(address.getPort()); } + /** * This method is executed iff the test is annotated with {@link SuiteScopeTestCase} * before the first test of this class is executed. From 98dba262cf3e81d405c8a372983942d59f993557 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 3 May 2016 11:31:56 +0200 Subject: [PATCH 0046/1311] Use object equality to compare versions in IndexSettings (#18103) Fixes an issue where updating index metadata on a index from a version that it does not have in its static list of known versions fails. --- .../main/java/org/elasticsearch/index/IndexSettings.java | 2 +- .../java/org/elasticsearch/index/IndexSettingsTests.java | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 31e10e4d7ec..7c8cb4ff8c8 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -382,7 +382,7 @@ public final class IndexSettings { */ synchronized boolean updateIndexMetaData(IndexMetaData indexMetaData) { final Settings newSettings = indexMetaData.getSettings(); - if (Version.indexCreated(newSettings) != version) { + if (version.equals(Version.indexCreated(newSettings)) == false) { throw new IllegalArgumentException("version mismatch on settings update expected: " + version + " but was: " + Version.indexCreated(newSettings)); } final String newUUID = newSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 07962153d13..9c5040589ae 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -110,6 +110,15 @@ public class IndexSettingsTests extends ESTestCase { assertTrue(ex.getMessage(), ex.getMessage().startsWith("version mismatch on settings update expected: ")); } + // use version number that is unknown + metaData = newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromId(999999)) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(Version.fromId(999999), settings.getIndexVersionCreated()); + assertEquals("_na_", settings.getUUID()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, + Version.fromId(999999)).put("index.test.setting.int", 42).build())); + metaData = newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build()); settings = new IndexSettings(metaData, Settings.EMPTY); From 1385e25542c78f9122f43727743e54a3e2a545a3 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 3 May 2016 09:20:19 -0400 Subject: [PATCH 0047/1311] Add note on configuring assertions in IDEs This commit adds a note to the contributing docs on how to configure assertions inside Eclipse and IntelliJ. Relates #18107 --- CONTRIBUTING.md | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9afcd34fad7..9f7264acdae 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -76,7 +76,31 @@ Contributing to the Elasticsearch codebase **Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch) -Make sure you have [Gradle](http://gradle.org) installed, as Elasticsearch uses it as its build system. Integration with IntelliJ and Eclipse should work out of the box. Eclipse users can automatically configure their IDE: `gradle eclipse` then `File: Import: Existing Projects into Workspace`. Select the option `Search for nested projects`. Additionally you will want to ensure that Eclipse is using 2048m of heap by modifying `eclipse.ini` accordingly to avoid GC overhead errors. +Make sure you have [Gradle](http://gradle.org) installed, as +Elasticsearch uses it as its build system. + +Eclipse users can automatically configure their IDE: `gradle eclipse` +then `File: Import: Existing Projects into Workspace`. Select the +option `Search for nested projects`. Additionally you will want to +ensure that Eclipse is using 2048m of heap by modifying `eclipse.ini` +accordingly to avoid GC overhead errors. + +IntelliJ users acn automatically configure their IDE: `gradle idea` +then `File->New Project From Existing Sources`. Point to the root of +the source directory, select +`Import project from external model->Gradle`, enable +`Use auto-import`. + +The Elasticsearch codebase makes heavy use of Java `assert`s and the +test runner requires that assertions be enabled within the JVM. This +can be accomplished by passing the flag `-ea` to the JVM on startup. + +For IntelliJ, go to +`Run->Edit Configurations...->Defaults->JUnit->VM options` and input +`-ea`. + +For Eclipse, go to `Preferences->Java->Installed JREs` and add `-ea` to +`VM Arguments`. Please follow these formatting guidelines: From d01526b6aba778c8c232460ab2840a3e18ff972c Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Tue, 3 May 2016 15:35:33 +0200 Subject: [PATCH 0048/1311] Bumped docs version --- docs/reference/index.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 9b99b56997a..8812a2bed95 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -1,7 +1,7 @@ [[elasticsearch-reference]] = Elasticsearch Reference -:version: 5.0.0-alpha1 +:version: 5.0.0-alpha2 :major-version: 5.x :branch: master :jdk: 1.8.0_73 From 85d2fc0e38c873bdd4092fcf64c0aef3568cfc24 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 3 May 2016 10:05:44 -0400 Subject: [PATCH 0049/1311] Avoid string concatentation in IngestDocument.FieldPath Today, the constructor for IngestDocument#FieldPath does a string concatentation and two object allocation on every field path. This commit removes these unnecessary operations. Relates #18108 --- .../elasticsearch/ingest/core/IngestDocument.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java index 7b7199b657a..05682991713 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java @@ -51,6 +51,8 @@ import static java.nio.charset.StandardCharsets.UTF_8; public final class IngestDocument { public final static String INGEST_KEY = "_ingest"; + private static final String INGEST_KEY_PREFIX = INGEST_KEY + "."; + private static final String SOURCE_PREFIX = SourceFieldMapper.NAME + "."; static final String TIMESTAMP = "timestamp"; @@ -600,6 +602,7 @@ public final class IngestDocument { } private class FieldPath { + private final String[] pathElements; private final Object initialContext; @@ -608,13 +611,13 @@ public final class IngestDocument { throw new IllegalArgumentException("path cannot be null nor empty"); } String newPath; - if (path.startsWith(INGEST_KEY + ".")) { + if (path.startsWith(INGEST_KEY_PREFIX)) { initialContext = ingestMetadata; - newPath = path.substring(8, path.length()); + newPath = path.substring(INGEST_KEY_PREFIX.length(), path.length()); } else { initialContext = sourceAndMetadata; - if (path.startsWith(SourceFieldMapper.NAME + ".")) { - newPath = path.substring(8, path.length()); + if (path.startsWith(SOURCE_PREFIX)) { + newPath = path.substring(SOURCE_PREFIX.length(), path.length()); } else { newPath = path; } @@ -624,5 +627,6 @@ public final class IngestDocument { throw new IllegalArgumentException("path [" + path + "] is not valid"); } } + } } From 3ca02d647eafcd875802109fcbd009e80d37e5bb Mon Sep 17 00:00:00 2001 From: Chris Earle Date: Tue, 3 May 2016 11:37:34 -0400 Subject: [PATCH 0050/1311] Removing old documentation mentioning NodeBuilder. --- docs/groovy-api/client.asciidoc | 59 --------------------------------- 1 file changed, 59 deletions(-) delete mode 100644 docs/groovy-api/client.asciidoc diff --git a/docs/groovy-api/client.asciidoc b/docs/groovy-api/client.asciidoc deleted file mode 100644 index c0a6d688415..00000000000 --- a/docs/groovy-api/client.asciidoc +++ /dev/null @@ -1,59 +0,0 @@ -[[client]] -== Client - -Obtaining an elasticsearch Groovy `GClient` (a `GClient` is a simple -wrapper on top of the Java `Client`) is simple. The most common way to -get a client is by starting an embedded `Node` which acts as a node -within the cluster. - - -[[node-client]] -=== Node Client - -A Node based client is the simplest form to get a `GClient` to start -executing operations against elasticsearch. - -[source,js] --------------------------------------------------- -import org.elasticsearch.groovy.client.GClient -import org.elasticsearch.groovy.node.GNode -import static org.elasticsearch.groovy.node.GNodeBuilder.nodeBuilder - -// on startup - -GNode node = nodeBuilder().node(); -GClient client = node.client(); - -// on shutdown - -node.close(); --------------------------------------------------- - -Since elasticsearch allows to configure it using JSON based settings, -the configuration itself can be done using a closure that represent the -JSON: - -[source,js] --------------------------------------------------- -import org.elasticsearch.groovy.node.GNode -import org.elasticsearch.groovy.node.GNodeBuilder -import static org.elasticsearch.groovy.node.GNodeBuilder.* - -// on startup - -GNodeBuilder nodeBuilder = nodeBuilder(); -nodeBuilder.settings { - node { - client = true - } - cluster { - name = "test" - } -} - -GNode node = nodeBuilder.node() - -// on shutdown - -node.stop().close() --------------------------------------------------- From 5648253d452e2b2d07ee04574c2521440ff7d973 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 3 May 2016 09:51:15 -0600 Subject: [PATCH 0051/1311] Add documentation for shard store output in allocation explain API Relates to #17689 --- .../cluster/allocation-explain.asciidoc | 54 ++++++++++++++----- 1 file changed, 42 insertions(+), 12 deletions(-) diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index dcca80dd61f..7a8a3e4a8ce 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -34,15 +34,21 @@ The response looks like: "reason" : "INDEX_CREATED", <2> "at" : "2016-03-22T20:04:23.620Z" }, - "nodes" : { <3> + "allocation_delay_ms" : 0, <3> + "remaining_delay_ms" : 0, <4> + "nodes" : { "V-Spi0AyRZ6ZvKbaI3691w" : { "node_name" : "node1", - "node_attributes" : { <4> + "node_attributes" : { <5> "bar" : "baz" }, - "final_decision" : "NO", <5> - "weight" : 0.06666675, <6> - "decisions" : [ { <7> + "store" : { + "shard_copy" : "NONE" <6> + }, + "final_decision" : "NO", <7> + "final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", + "weight" : 0.06666675, <8> + "decisions" : [ { <9> "decider" : "filter", "decision" : "NO", "explanation" : "node does not match index include filters [foo:\"bar\"]" @@ -54,7 +60,11 @@ The response looks like: "bar" : "baz", "foo" : "bar" }, + "store" : { + "shard_copy" : "AVAILABLE" + }, "final_decision" : "NO", + "final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", "weight" : -1.3833332, "decisions" : [ { "decider" : "same_shard", @@ -65,7 +75,11 @@ The response looks like: "PzdyMZGXQdGhqTJHF_hGgA" : { "node_name" : "node3", "node_attributes" : { }, + "store" : { + "shard_copy" : "NONE" + }, "final_decision" : "NO", + "final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", "weight" : 2.3166666, "decisions" : [ { "decider" : "filter", @@ -78,11 +92,13 @@ The response looks like: -------------------------------------------------- <1> Whether the shard is assigned or unassigned <2> Reason for the shard originally becoming unassigned -<3> List of node decisions about the shard -<4> User-added attributes the node has -<5> Final decision for whether the shard is allowed to be allocated to this node -<6> Weight for how much the allocator would like to allocate the shard to this node -<7> List of decisions factoring into final decision +<3> Configured delay before the shard can be allocated +<4> Remaining delay before the shard can be allocated +<5> User-added attributes the node has +<6> The shard copy information for this node and error (if applicable) +<7> Final decision and explanation of whether the shard can be allocated to this node +<8> Weight for how much the allocator would like to allocate the shard to this node +<9> List of node decisions factoring into final decision about the shard For a shard that is already assigned, the output looks similar to: @@ -97,13 +113,19 @@ For a shard that is already assigned, the output looks similar to: }, "assigned" : true, "assigned_node_id" : "Qc6VL8c5RWaw1qXZ0Rg57g", <1> + "allocation_delay_ms" : 0, + "remaining_delay_ms" : 0, "nodes" : { "V-Spi0AyRZ6ZvKbaI3691w" : { "node_name" : "Susan Storm", "node_attributes" : { "bar" : "baz" }, + "store" : { + "shard_copy" : "NONE" + }, "final_decision" : "NO", + "final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", "weight" : 1.4499999, "decisions" : [ { "decider" : "filter", @@ -117,7 +139,11 @@ For a shard that is already assigned, the output looks similar to: "bar" : "baz", "foo" : "bar" }, - "final_decision" : "CURRENTLY_ASSIGNED", <2> + "store" : { + "shard_copy" : "AVAILABLE" + }, + "final_decision" : "ALREADY_ASSIGNED", <2> + "final_explanation" : "the shard is already assigned to this node", "weight" : 0.0, "decisions" : [ { "decider" : "same_shard", @@ -128,7 +154,11 @@ For a shard that is already assigned, the output looks similar to: "PzdyMZGXQdGhqTJHF_hGgA" : { "node_name" : "The Symbiote", "node_attributes" : { }, + "store" : { + "shard_copy" : "NONE" + }, "final_decision" : "NO", + "final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", "weight" : 3.6999998, "decisions" : [ { "decider" : "filter", @@ -140,7 +170,7 @@ For a shard that is already assigned, the output looks similar to: } -------------------------------------------------- <1> Node the shard is currently assigned to -<2> The decision is "CURRENTLY_ASSIGNED" because the shard is currently assigned to this node +<2> The decision is "ALREADY_ASSIGNED" because the shard is currently assigned to this node You can also have Elasticsearch explain the allocation of the first unassigned shard it finds by sending an empty body, such as: From 21e94e6054354f59fd1bdca400bb8a2229aecbea Mon Sep 17 00:00:00 2001 From: Chris Earle Date: Tue, 3 May 2016 12:03:00 -0400 Subject: [PATCH 0052/1311] Revert "Removing old documentation mentioning NodeBuilder." This reverts commit 3ca02d647eafcd875802109fcbd009e80d37e5bb. --- docs/groovy-api/client.asciidoc | 59 +++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 docs/groovy-api/client.asciidoc diff --git a/docs/groovy-api/client.asciidoc b/docs/groovy-api/client.asciidoc new file mode 100644 index 00000000000..c0a6d688415 --- /dev/null +++ b/docs/groovy-api/client.asciidoc @@ -0,0 +1,59 @@ +[[client]] +== Client + +Obtaining an elasticsearch Groovy `GClient` (a `GClient` is a simple +wrapper on top of the Java `Client`) is simple. The most common way to +get a client is by starting an embedded `Node` which acts as a node +within the cluster. + + +[[node-client]] +=== Node Client + +A Node based client is the simplest form to get a `GClient` to start +executing operations against elasticsearch. + +[source,js] +-------------------------------------------------- +import org.elasticsearch.groovy.client.GClient +import org.elasticsearch.groovy.node.GNode +import static org.elasticsearch.groovy.node.GNodeBuilder.nodeBuilder + +// on startup + +GNode node = nodeBuilder().node(); +GClient client = node.client(); + +// on shutdown + +node.close(); +-------------------------------------------------- + +Since elasticsearch allows to configure it using JSON based settings, +the configuration itself can be done using a closure that represent the +JSON: + +[source,js] +-------------------------------------------------- +import org.elasticsearch.groovy.node.GNode +import org.elasticsearch.groovy.node.GNodeBuilder +import static org.elasticsearch.groovy.node.GNodeBuilder.* + +// on startup + +GNodeBuilder nodeBuilder = nodeBuilder(); +nodeBuilder.settings { + node { + client = true + } + cluster { + name = "test" + } +} + +GNode node = nodeBuilder.node() + +// on shutdown + +node.stop().close() +-------------------------------------------------- From ca21aa0cb592decaf8a5f69b49dd778081f8456e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 3 May 2016 17:17:43 +0200 Subject: [PATCH 0053/1311] Make reset() in QueryShardContext private The query shard reset() method resets some internal state in the query shard context, like clearing query names, the filter flag or named queries. The problem with this method being public is that it currently (miss?) used for modifying an existing context for recursive invocatiob, but the contexts that have been reseted that way cannot be properly set back to their previous state. This PR is a step towards removing reset() entirely by first making it only be used internally in QueryShardContext. In places where reset() was used we can either create new QueryShardContexts or modify the existing context because it is discarded afterwards anyway. --- .../cluster/metadata/AliasValidator.java | 14 ++++---------- .../percolator/PercolatorFieldMapper.java | 7 +------ .../index/query/AbstractQueryBuilder.java | 6 +++--- .../index/query/QueryShardContext.java | 9 ++++++--- .../messy/tests/TemplateQueryParserTests.java | 18 ++++++++++-------- 5 files changed, 24 insertions(+), 30 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index a3d776a4edb..64da04e6511 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -139,15 +139,9 @@ public class AliasValidator extends AbstractComponent { } } - private void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException { - try { - queryShardContext.reset(); - QueryParseContext queryParseContext = queryShardContext.newParseContext(parser); - QueryBuilder queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext); - queryBuilder.toFilter(queryShardContext); - } finally { - queryShardContext.reset(); - parser.close(); - } + private static void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException { + QueryParseContext queryParseContext = queryShardContext.newParseContext(parser); + QueryBuilder queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext); + queryBuilder.toFilter(queryShardContext); } } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java index 0a993c63a35..6637dd8b762 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java @@ -207,7 +207,6 @@ public class PercolatorFieldMapper extends FieldMapper { } static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException { - context.reset(); // This means that fields in the query need to exist in the mapping prior to registering this query // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired. // @@ -222,11 +221,7 @@ public class PercolatorFieldMapper extends FieldMapper { // as an analyzed string. context.setAllowUnmappedFields(false); context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); - try { - return queryBuilder.toQuery(context); - } finally { - context.reset(); - } + return queryBuilder.toQuery(context); } static QueryBuilder parseQueryBuilder(QueryParseContext context, XContentLocation location) { diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index 635daab4142..ab04c1aff44 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -109,12 +109,12 @@ public abstract class AbstractQueryBuilder> @Override public final Query toFilter(QueryShardContext context) throws IOException { Query result = null; - final boolean originalIsFilter = context.isFilter; + final boolean originalIsFilter = context.isFilter(); try { - context.isFilter = true; + context.setIsFilter(true); result = toQuery(context); } finally { - context.isFilter = originalIsFilter; + context.setIsFilter(originalIsFilter); } return result; } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 2c0b90f059c..af87a986243 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -91,7 +91,7 @@ public class QueryShardContext extends QueryRewriteContext { private boolean allowUnmappedFields; private boolean mapUnmappedFieldAsString; private NestedScope nestedScope; - boolean isFilter; // pkg private for testing + private boolean isFilter; public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService, @@ -116,7 +116,7 @@ public class QueryShardContext extends QueryRewriteContext { this.types = source.getTypes(); } - public void reset() { + private void reset() { allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); this.lookup = null; this.namedQueries.clear(); @@ -183,6 +183,10 @@ public class QueryShardContext extends QueryRewriteContext { return isFilter; } + void setIsFilter(boolean isFilter) { + this.isFilter = isFilter; + } + public Collection simpleMatchToIndexNames(String pattern) { return mapperService.simpleMatchToIndexNames(pattern); } @@ -369,5 +373,4 @@ public class QueryShardContext extends QueryRewriteContext { public final Index index() { return indexSettings.getIndex(); } - } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 0815e27ce76..73a63088bf6 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -74,6 +74,7 @@ import org.junit.Before; import java.io.IOException; import java.lang.reflect.Proxy; import java.util.Collections; +import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; @@ -84,7 +85,7 @@ import static org.hamcrest.Matchers.containsString; public class TemplateQueryParserTests extends ESTestCase { private Injector injector; - private QueryShardContext context; + private Supplier contextFactory; @Before public void setup() throws IOException { @@ -134,7 +135,8 @@ public class TemplateQueryParserTests extends ESTestCase { ScriptService scriptService = injector.getInstance(ScriptService.class); SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); - MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () -> context); + MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () -> + contextFactory.get()); IndicesFieldDataCache cache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() {}); IndexFieldDataService indexFieldDataService =new IndexFieldDataService(idxSettings, cache, injector.getInstance(CircuitBreakerService.class), mapperService); BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { @@ -149,7 +151,7 @@ public class TemplateQueryParserTests extends ESTestCase { } }); IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); - context = new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, + contextFactory = () -> new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry, proxy, null, null, null); } @@ -164,7 +166,7 @@ public class TemplateQueryParserTests extends ESTestCase { String templateString = "{" + "\"query\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - context.reset(); + QueryShardContext context = contextFactory.get(); templateSourceParser.nextToken(); Query query = QueryBuilder.rewriteQuery(TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), @@ -176,7 +178,7 @@ public class TemplateQueryParserTests extends ESTestCase { String templateString = "{" + " \"inline\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + " \"params\":{" + " \"template\":\"all\"," + " \"use_it\": true" + " }" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - context.reset(); + QueryShardContext context = contextFactory.get(); Query query = QueryBuilder.rewriteQuery(TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context).toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); @@ -192,7 +194,7 @@ public class TemplateQueryParserTests extends ESTestCase { + " \"params\":{" + " \"size\":2" + " }\n" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - context.reset(); + QueryShardContext context = contextFactory.get(); try { TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).rewrite(context); @@ -206,7 +208,7 @@ public class TemplateQueryParserTests extends ESTestCase { String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } "; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - context.reset(); + QueryShardContext context = contextFactory.get(); templateSourceParser.nextToken(); @@ -219,7 +221,7 @@ public class TemplateQueryParserTests extends ESTestCase { String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } "; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); - context.reset(); + QueryShardContext context = contextFactory.get(); templateSourceParser.nextToken(); try { TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).toQuery(context); From 929187c71b6125d4eeb014bb6e7bea1c0ddf5368 Mon Sep 17 00:00:00 2001 From: Kelsey DeJesus-Banos Date: Tue, 3 May 2016 13:29:24 -0400 Subject: [PATCH 0054/1311] Update function-score-query.asciidoc (#18115) --- docs/reference/query-dsl/function-score-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 1e736efa11d..402eeb44550 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -148,7 +148,7 @@ you wish to inhibit this, set `"boost_mode": "replace"` The `weight` score allows you to multiply the score by the provided `weight`. This can sometimes be desired since boost value set on specific queries gets normalized, while for this score function it does -not. +not. The number value is of type float. [source,js] -------------------------------------------------- From d63362ce43cbe40ff5490d67bc5b6d6151508dcd Mon Sep 17 00:00:00 2001 From: Jihun No Date: Wed, 4 May 2016 14:07:28 +0900 Subject: [PATCH 0055/1311] Update client.asciidoc as of https://github.com/elastic/elasticsearch/commit/42526ac28e07da0055faafca1de6f8c5ec96cd85 5.0.0 alpha2 have no settingsBuilder() method. --- docs/java-api/client.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/java-api/client.asciidoc b/docs/java-api/client.asciidoc index 75a58f3c6b5..c71bd5f6ece 100644 --- a/docs/java-api/client.asciidoc +++ b/docs/java-api/client.asciidoc @@ -51,7 +51,7 @@ Note that you have to set the cluster name if you use one different than [source,java] -------------------------------------------------- -Settings settings = Settings.settingsBuilder() +Settings settings = Settings.builder() .put("cluster.name", "myClusterName").build(); Client client = TransportClient.builder().settings(settings).build(); //Add transport addresses and do something with the client... From 80dbe31d59c6cfdc41b5a3c3971d91699ab42f7e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 4 May 2016 08:30:28 +0200 Subject: [PATCH 0056/1311] Add note about using ipv6 addresses in `query_string`. --- docs/reference/mapping/types/ip.asciidoc | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc index 9786ebf4fd8..624bf63660f 100644 --- a/docs/reference/mapping/types/ip.asciidoc +++ b/docs/reference/mapping/types/ip.asciidoc @@ -108,3 +108,20 @@ GET my_index/_search } } -------------------------------------------------- + +Also beware that colons are special characters to the +<> query, so ipv6 addresses will +need to be escaped. The easiest way to do so is to put quotes around the +searched value: + +[source,js] +-------------------------------------------------- +GET t/_search +{ + "query": { + "query_string" : { + "query": "ip_addr:\"2001:db8::/48\"" + } + } +} +-------------------------------------------------- From 51a53c55cb2b30a3db85cd3342b3e3a4e5520903 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 4 May 2016 08:51:37 +0200 Subject: [PATCH 0057/1311] Update store documentation after #17616. --- docs/reference/index-modules/store.asciidoc | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/reference/index-modules/store.asciidoc b/docs/reference/index-modules/store.asciidoc index 79712fbc266..54080e365c2 100644 --- a/docs/reference/index-modules/store.asciidoc +++ b/docs/reference/index-modules/store.asciidoc @@ -8,9 +8,8 @@ The store module allows you to control how index data is stored and accessed on === File system storage types There are different file system implementations or _storage types_. The best -one for the operating environment will be automatically chosen: `mmapfs` on -Windows 64bit, `simplefs` on Windows 32bit, and `default` (hybrid `niofs` and -`mmapfs`) for the rest. +one for the operating environment will be automatically chosen: `simplefs` on +Windows 32bit, `niofs` on other 32bit systems and `mmapfs` on 64bit systems. This can be overridden for all indices by adding this to the `config/elasticsearch.yml` file: @@ -61,12 +60,13 @@ process equal to the size of the file being mapped. Before using this class, be sure you have allowed plenty of <>. -[[default_fs]]`default_fs`:: +[[default_fs]]`default_fs` deprecated[5.0.0, The `default_fs` store type is deprecated - use `mmapfs` instead]:: The `default` type is a hybrid of NIO FS and MMapFS, which chooses the best -file system for each type of file. Currently only the Lucene term dictionary -and doc values files are memory mapped to reduce the impact on the operating -system. All other files are opened using Lucene `NIOFSDirectory`. Address -space settings (<>) might also apply if your term -dictionaries are large. +file system for each type of file. Currently only the Lucene term dictionary, +doc values and points files are memory mapped to reduce the impact on the +operating system. All other files are opened using Lucene `NIOFSDirectory`. +Address space settings (<>) might also apply if your term +dictionary are large, if you index many fields that use points (numerics, dates +and ip addresses) or if you have many fields with doc values. From ad8bf53bbdfd1d90e52cfd3c4c2e04b7525d0f23 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 4 May 2016 11:25:23 +0200 Subject: [PATCH 0058/1311] Fold helper class into abstract sort test class. Folds the helper class for random object generation into the abstract sort test class. Removes a few references to ESTestCase that were not needed due to inheriting from it along the way. --- .../search/sort/AbstractSortTestCase.java | 16 +++++++ .../search/sort/FieldSortBuilderTests.java | 30 ++++++------- .../sort/GeoDistanceSortBuilderTests.java | 4 +- .../sort/NestedQueryBuilderGenerator.java | 44 ------------------- .../search/sort/ScriptSortBuilderTests.java | 5 +-- 5 files changed, 34 insertions(+), 65 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index a8562493c7a..76d5eba12f9 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -45,8 +45,12 @@ import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper.DoubleFieldType; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper.Nested; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.query.IndicesQueriesRegistry; @@ -252,6 +256,18 @@ public abstract class AbstractSortTestCase> extends EST return doubleFieldType; } + protected static QueryBuilder randomNestedFilter() { + int id = randomIntBetween(0, 2); + switch(id) { + case 0: return (new MatchAllQueryBuilder()).boost(randomFloat()); + case 1: return (new IdsQueryBuilder()).boost(randomFloat()); + case 2: return (new TermQueryBuilder( + randomAsciiOfLengthBetween(1, 10), + randomDouble()).boost(randomFloat())); + default: throw new IllegalStateException("Only three query builders supported for testing sort"); + } + } + @SuppressWarnings("unchecked") private T copyItem(T original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index ebd5403489b..4b6eb82304a 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -25,12 +25,10 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; import java.util.List; -import java.util.function.Supplier; public class FieldSortBuilderTests extends AbstractSortTestCase { @@ -42,8 +40,8 @@ public class FieldSortBuilderTests extends AbstractSortTestCase missingContent = Arrays.asList( "_last", "_first", - ESTestCase.randomAsciiOfLength(10), ESTestCase.randomUnicodeOfCodepointLengthBetween(5, 15), - ESTestCase.randomInt()); + randomAsciiOfLength(10), randomUnicodeOfCodepointLengthBetween(5, 15), + randomInt()); public FieldSortBuilder randomFieldSortBuilder() { @@ -58,7 +56,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); + () -> randomAsciiOfLengthBetween(1, 10))); break; case 1: - mutated.setNestedFilter(ESTestCase.randomValueOtherThan( + mutated.setNestedFilter(randomValueOtherThan( original.getNestedFilter(), - () -> NestedQueryBuilderGenerator.randomNestedFilter())); + () -> randomNestedFilter())); break; case 2: - mutated.sortMode(ESTestCase.randomValueOtherThan(original.sortMode(), () -> randomFrom(SortMode.values()))); + mutated.sortMode(randomValueOtherThan(original.sortMode(), () -> randomFrom(SortMode.values()))); break; case 3: - mutated.unmappedType(ESTestCase.randomValueOtherThan( + mutated.unmappedType(randomValueOtherThan( original.unmappedType(), - () -> ESTestCase.randomAsciiOfLengthBetween(1, 10))); + () -> randomAsciiOfLengthBetween(1, 10))); break; case 4: - mutated.missing(ESTestCase.randomValueOtherThan(original.missing(), () -> randomFrom(missingContent))); + mutated.missing(randomValueOtherThan(original.missing(), () -> randomFrom(missingContent))); break; case 5: - mutated.order(ESTestCase.randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values()))); + mutated.order(randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values()))); break; default: throw new IllegalStateException("Unsupported mutation."); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 3da59f79de6..817427f9ecc 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -89,7 +89,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase NestedQueryBuilderGenerator.randomNestedFilter())); + () -> randomNestedFilter())); break; case 7: result.setNestedPath(ESTestCase.randomValueOtherThan( diff --git a/core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java b/core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java deleted file mode 100644 index de3318036e8..00000000000 --- a/core/src/test/java/org/elasticsearch/search/sort/NestedQueryBuilderGenerator.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.sort; - -import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; -import java.util.List; - -public class NestedQueryBuilderGenerator { - private NestedQueryBuilderGenerator() { - // this is a helper class only, doesn't need a constructor - } - - private static List> builders = Arrays.asList( - new MatchAllQueryBuilder(), - new IdsQueryBuilder(), - new TermQueryBuilder(ESTestCase.randomAsciiOfLengthBetween(1, 10), ESTestCase.randomDouble())); - - public static QueryBuilder randomNestedFilter() { - return ESTestCase.randomFrom(builders).boost(ESTestCase.randomFloat()); - } -} diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index b809adcb27b..38a20a129aa 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; @@ -64,7 +63,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase NestedQueryBuilderGenerator.randomNestedFilter())); + () -> randomNestedFilter())); break; case 3: result.setNestedPath(original.getNestedPath() + "_some_suffix"); From 9a90bdce01592927c61956c3324cab69722abbdb Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 4 May 2016 12:04:28 +0200 Subject: [PATCH 0059/1311] Add deprecation warnings for ignore_malformed to reference docs --- docs/reference/query-dsl/geo-bounding-box-query.asciidoc | 3 +++ docs/reference/query-dsl/geo-distance-query.asciidoc | 5 +++++ docs/reference/query-dsl/geo-polygon-query.asciidoc | 3 +++ 3 files changed, 11 insertions(+) diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index 2630e6cf048..00120a98fc9 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -52,6 +52,9 @@ Then the following simple query can be executed with a |Option |Description |`_name` |Optional name field to identify the filter +|`ignore_malformed` |deprecated[5.0.0,Use `validation_method` instead] Set to `true` to +accept geo points with invalid latitude or longitude (default is `false`). + |`validation_method` |Set to `IGNORE_MALFORMED` to accept geo points with invalid latitude or longitude, set to `COERCE` to also try to infer correct latitude or longitude. (default is `STRICT`). diff --git a/docs/reference/query-dsl/geo-distance-query.asciidoc b/docs/reference/query-dsl/geo-distance-query.asciidoc index 2b80241dfc5..c29391e5b57 100644 --- a/docs/reference/query-dsl/geo-distance-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-query.asciidoc @@ -162,6 +162,11 @@ The following are options allowed on the filter: Optional name field to identify the query +`ignore_malformed`:: + + deprecated[5.0.0,Use `validation_method` instead] Set to `true` to accept geo points with invalid latitude or + longitude (default is `false`). + `validation_method`:: Set to `IGNORE_MALFORMED` to accept geo points with invalid latitude or diff --git a/docs/reference/query-dsl/geo-polygon-query.asciidoc b/docs/reference/query-dsl/geo-polygon-query.asciidoc index ef80de82079..5717c5dc924 100644 --- a/docs/reference/query-dsl/geo-polygon-query.asciidoc +++ b/docs/reference/query-dsl/geo-polygon-query.asciidoc @@ -34,6 +34,9 @@ points. Here is an example: |Option |Description |`_name` |Optional name field to identify the filter +|`ignore_malformed` |deprecated[5.0.0,Use `validation_method` instead] Set to `true` to accept geo points with invalid latitude or +longitude (default is `false`). + |`validation_method` |Set to `IGNORE_MALFORMED` to accept geo points with invalid latitude or longitude, `COERCE` to try and infer correct latitude or longitude, or `STRICT` (default is `STRICT`). From 6f15f35819f26763a5b3f427af6b6a68fff3cebe Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 4 May 2016 12:14:39 +0200 Subject: [PATCH 0060/1311] Remove left over references to ESTestCase --- .../search/sort/GeoDistanceSortBuilderTests.java | 13 ++++++------- .../search/sort/ScoreSortBuilderTests.java | 3 +-- .../search/sort/ScriptSortBuilderTests.java | 11 +++++------ 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 817427f9ecc..a41138e2093 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; @@ -93,9 +92,9 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase ESTestCase.randomAsciiOfLengthBetween(1, 10))); + () -> randomAsciiOfLengthBetween(1, 10))); } if (randomBoolean()) { result.coerce(! result.coerce()); @@ -171,20 +170,20 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(SortOrder.values()))); + result.order(randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values()))); break; case 5: result.sortMode(mode(original.sortMode())); break; case 6: - result.setNestedFilter(ESTestCase.randomValueOtherThan( + result.setNestedFilter(randomValueOtherThan( original.getNestedFilter(), () -> randomNestedFilter())); break; case 7: - result.setNestedPath(ESTestCase.randomValueOtherThan( + result.setNestedPath(randomValueOtherThan( result.getNestedPath(), - () -> ESTestCase.randomAsciiOfLengthBetween(1, 10))); + () -> randomAsciiOfLengthBetween(1, 10))); break; case 8: result.coerce(! original.coerce()); diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java index 222240a9ac4..605573e9b71 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScoreSortBuilderTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.test.ESTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; @@ -46,7 +45,7 @@ public class ScoreSortBuilderTests extends AbstractSortTestCase randomFrom(SortOrder.values()))); + result.order(randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values()))); return result; } diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 38a20a129aa..6491c36bf6d 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; -import org.elasticsearch.test.ESTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; @@ -53,20 +52,20 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); + builder.sortMode(randomValueOtherThan(builder.sortMode(), () -> randomFrom(SortMode.values()))); } else { Set exceptThis = new HashSet<>(); exceptThis.add(SortMode.SUM); exceptThis.add(SortMode.AVG); exceptThis.add(SortMode.MEDIAN); - builder.sortMode(ESTestCase.randomValueOtherThanMany(exceptThis::contains, () -> randomFrom(SortMode.values()))); + builder.sortMode(randomValueOtherThanMany(exceptThis::contains, () -> randomFrom(SortMode.values()))); } } if (randomBoolean()) { builder.setNestedFilter(randomNestedFilter()); } if (randomBoolean()) { - builder.setNestedPath(ESTestCase.randomAsciiOfLengthBetween(1, 10)); + builder.setNestedPath(randomAsciiOfLengthBetween(1, 10)); } return builder; } @@ -102,7 +101,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomFrom(SortMode.values()))); + result.sortMode(randomValueOtherThan(result.sortMode(), () -> randomFrom(SortMode.values()))); } else { // script sort type String only allows MIN and MAX, so we only switch if (original.sortMode() == SortMode.MIN) { @@ -113,7 +112,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomNestedFilter())); break; From 283a6d27f1aa59e38739b19a058ba798742f09ef Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 4 May 2016 13:34:11 +0200 Subject: [PATCH 0061/1311] Clean up merge commit changes --- .../search/sort/GeoDistanceSortBuilderTests.java | 15 +++++++-------- .../java/org/elasticsearch/test/ESTestCase.java | 11 ----------- 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 89379e6b428..3c94f69eba5 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; @@ -81,13 +80,13 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(DistanceUnit.values()))); + result.unit(randomValueOtherThan(result.unit(), () -> randomFrom(DistanceUnit.values()))); } if (randomBoolean()) { result.order(randomFrom(SortOrder.values())); } if (randomBoolean()) { - result.sortMode(ESTestCase.randomValueOtherThan(SortMode.SUM, () -> randomFrom(SortMode.values()))); + result.sortMode(randomValueOtherThan(SortMode.SUM, () -> randomFrom(SortMode.values()))); } if (randomBoolean()) { result.setNestedFilter(randomNestedFilter()); @@ -99,7 +98,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomAsciiOfLengthBetween(1, 10))); } if (randomBoolean()) { - result.validation(ESTestCase.randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); + result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); } return result; @@ -150,14 +149,14 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomFrom(DistanceUnit.values()))); + result.unit(randomValueOtherThan(result.unit(), () -> randomFrom(DistanceUnit.values()))); break; case 4: result.order(randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values()))); break; case 5: - result.sortMode(ESTestCase.randomValueOtherThanMany( - Arrays.asList(SortMode.SUM, result.sortMode()), + result.sortMode(randomValueOtherThanMany( + Arrays.asList(SortMode.SUM, result.sortMode())::contains, () -> randomFrom(SortMode.values()))); break; case 6: @@ -171,7 +170,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomAsciiOfLengthBetween(1, 10))); break; case 8: - result.validation(ESTestCase.randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); + result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); break; } return result; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 7e33f494d70..01f0d5b8151 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -429,17 +429,6 @@ public abstract class ESTestCase extends LuceneTestCase { return randomValue; } - /** - * helper to get a random value in a certain range that's different from the input - */ - public static T randomValueOtherThanMany(Collection input, Supplier randomSupplier) { - T randomValue = null; - do { - randomValue = randomSupplier.get(); - } while (input.contains(randomValue)); - return randomValue; - } - /** * Runs the code block for 10 seconds waiting for no assertion to trip. */ From 2dea44994922dacec38f81cc68c42360bdc449d7 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 3 May 2016 09:12:28 -0400 Subject: [PATCH 0062/1311] Remove Strings#splitStringToArray This commit removes the method Strings#splitStringToArray and replaces the call sites with invocations to String#split. There are only two explanations for the existence of this method. The first is that String#split is slightly tricky in that it accepts a regular expression rather than a character to split on. This means that if s is a string, s.split(".") does not split on the character '.', but rather splits on the regular expression '.' which splits on every character (of course, this is easily fixed by invoking s.split("\\.") instead). The second possible explanation is that (again) String#split accepts a regular expression. This means that there could be a performance concern compared to just splitting on a single character. However, it turns out that String#split has a fast path for the case of splitting on a single character and microbenchmarks show that String#split has 1.5x--2x the throughput of Strings#splitStringToArray. There is a slight behavior difference between Strings#splitStringToArray and String#split: namely, the former would return an empty array in cases when the input string was null or empty but String#split will just NPE at the call site on null and return a one-element array containing the empty string when the input string is empty. There was only one place relying on this behavior and the call site has been modified accordingly. --- .../action/search/TransportSearchHelper.java | 3 +- .../org/elasticsearch/common/Strings.java | 40 +------------------ .../java/org/elasticsearch/common/Table.java | 2 +- .../elasticsearch/common/path/PathTrie.java | 13 +++--- .../common/settings/Settings.java | 13 ------ .../common/util/BloomFilter.java | 3 +- .../xcontent/support/XContentMapValues.java | 4 +- .../index/mapper/DocumentParser.java | 17 ++++---- .../index/query/GeoShapeQueryBuilder.java | 3 +- .../ingest/core/IngestDocument.java | 4 +- .../admin/cluster/node/tasks/TasksIT.java | 2 +- .../common/settings/SettingsTests.java | 18 --------- .../cloud/gce/network/GceNameResolver.java | 2 +- .../repositories/azure/AzureRepository.java | 2 +- .../repositories/s3/S3Repository.java | 2 +- .../ingest/RandomDocumentPicks.java | 4 +- 16 files changed, 29 insertions(+), 103 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index 49d4c65add1..8da195b739b 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.internal.InternalScrollSearchRequest; @@ -89,7 +88,7 @@ final class TransportSearchHelper { } catch (Exception e) { throw new IllegalArgumentException("Failed to decode scrollId", e); } - String[] elements = Strings.splitStringToArray(spare.get(), ';'); + String[] elements = spare.get().toString().split(";"); if (elements.length < 2) { throw new IllegalArgumentException("Malformed scrollId [" + scrollId + "]"); } diff --git a/core/src/main/java/org/elasticsearch/common/Strings.java b/core/src/main/java/org/elasticsearch/common/Strings.java index 151c53e2007..6b6c31c1522 100644 --- a/core/src/main/java/org/elasticsearch/common/Strings.java +++ b/core/src/main/java/org/elasticsearch/common/Strings.java @@ -38,7 +38,6 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Properties; -import java.util.Random; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeSet; @@ -557,7 +556,8 @@ public class Strings { } public static String[] splitStringByCommaToArray(final String s) { - return splitStringToArray(s, ','); + if (s == null || s.isEmpty()) return Strings.EMPTY_ARRAY; + else return s.split(","); } public static Set splitStringToSet(final String s, final char c) { @@ -588,42 +588,6 @@ public class Strings { return result; } - public static String[] splitStringToArray(final CharSequence s, final char c) { - if (s == null || s.length() == 0) { - return Strings.EMPTY_ARRAY; - } - int count = 1; - for (int i = 0; i < s.length(); i++) { - if (s.charAt(i) == c) { - count++; - } - } - final String[] result = new String[count]; - final StringBuilder builder = new StringBuilder(); - int res = 0; - for (int i = 0; i < s.length(); i++) { - if (s.charAt(i) == c) { - if (builder.length() > 0) { - result[res++] = builder.toString(); - builder.setLength(0); - } - - } else { - builder.append(s.charAt(i)); - } - } - if (builder.length() > 0) { - result[res++] = builder.toString(); - } - if (res != count) { - // we have empty strings, copy over to a new array - String[] result1 = new String[res]; - System.arraycopy(result, 0, result1, 0, res); - return result1; - } - return result; - } - /** * Split a String at the first occurrence of the delimiter. * Does not include the delimiter in the result. diff --git a/core/src/main/java/org/elasticsearch/common/Table.java b/core/src/main/java/org/elasticsearch/common/Table.java index 0d4a827202d..ab0252b11dc 100644 --- a/core/src/main/java/org/elasticsearch/common/Table.java +++ b/core/src/main/java/org/elasticsearch/common/Table.java @@ -149,7 +149,7 @@ public class Table { // get the attributes of the header cell we are going to add mAttr.putAll(headers.get(currentCells.size()).attr); } - String[] sAttrs = Strings.splitStringToArray(attributes, ';'); + String[] sAttrs = attributes.split(";"); for (String sAttr : sAttrs) { if (sAttr.length() == 0) { continue; diff --git a/core/src/main/java/org/elasticsearch/common/path/PathTrie.java b/core/src/main/java/org/elasticsearch/common/path/PathTrie.java index 704468f7533..3c25a9caa52 100644 --- a/core/src/main/java/org/elasticsearch/common/path/PathTrie.java +++ b/core/src/main/java/org/elasticsearch/common/path/PathTrie.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.path; -import org.elasticsearch.common.Strings; - import java.util.HashMap; import java.util.Map; @@ -38,7 +36,7 @@ public class PathTrie { private final Decoder decoder; private final TrieNode root; - private final char separator; + private final String separator; private T rootValue; public PathTrie(Decoder decoder) { @@ -47,8 +45,9 @@ public class PathTrie { public PathTrie(char separator, String wildcard, Decoder decoder) { this.decoder = decoder; - this.separator = separator; - root = new TrieNode(new String(new char[]{separator}), null, wildcard); + final String separatorAsString = new String(new char[]{separator}); + this.separator = separatorAsString; + root = new TrieNode(separatorAsString, null, wildcard); } public class TrieNode { @@ -196,7 +195,7 @@ public class PathTrie { } public void insert(String path, T value) { - String[] strings = Strings.splitStringToArray(path, separator); + String[] strings = path.split(separator); if (strings.length == 0) { rootValue = value; return; @@ -217,7 +216,7 @@ public class PathTrie { if (path.length() == 0) { return rootValue; } - String[] strings = Strings.splitStringToArray(path, separator); + String[] strings = path.split(separator); if (strings.length == 0) { return rootValue; } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 1c243255454..8488ca75c73 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -899,19 +899,6 @@ public final class Settings implements ToXContent { return this; } - public Builder loadFromDelimitedString(String value, char delimiter) { - String[] values = Strings.splitStringToArray(value, delimiter); - for (String s : values) { - int index = s.indexOf('='); - if (index == -1) { - throw new IllegalArgumentException( - "value [" + s + "] for settings loaded with delimiter [" + delimiter + "] is malformed, missing ="); - } - map.put(s.substring(0, index), s.substring(index + 1)); - } - return this; - } - /** * Loads settings from the actual string content that represents them using the * {@link SettingsLoaderFactory#loaderFromSource(String)}. diff --git a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java index b9dd6859ce0..6c471cddb55 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java +++ b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java @@ -24,7 +24,6 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -75,7 +74,7 @@ public class BloomFilter { if (config == null) { return buildDefault(); } - String[] sEntries = Strings.splitStringToArray(config, ','); + String[] sEntries = config.split(","); if (sEntries.length == 0) { if (config.length() > 0) { return new Factory(new Entry[]{new Entry(0, Double.parseDouble(config))}); diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 4612d3f05d0..a8c120f424b 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -40,7 +40,7 @@ public class XContentMapValues { */ public static List extractRawValues(String path, Map map) { List values = new ArrayList<>(); - String[] pathElements = Strings.splitStringToArray(path, '.'); + String[] pathElements = path.split("\\."); if (pathElements.length == 0) { return values; } @@ -93,7 +93,7 @@ public class XContentMapValues { } public static Object extractValue(String path, Map map) { - String[] pathElements = Strings.splitStringToArray(path, '.'); + String[] pathElements = path.split("\\."); if (pathElements.length == 0) { return null; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 3d191e4d80a..a666ffc0ed5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -19,13 +19,6 @@ package org.elasticsearch.index.mapper; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; - import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; @@ -51,6 +44,13 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import org.elasticsearch.index.mapper.object.ObjectMapper; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + /** A parser for documents, given mappings from a DocumentMapper */ final class DocumentParser { @@ -829,8 +829,7 @@ final class DocumentParser { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); - // TODO: why Strings.splitStringToArray instead of String.split? - final String[] paths = Strings.splitStringToArray(field, '.'); + final String[] paths = field.split("\\."); final String fieldName = paths[paths.length-1]; ObjectMapper mapper = context.root(); ObjectMapper[] mappers = new ObjectMapper[paths.length-1]; diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 8ff2f697b45..d3c47d5bc70 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -33,7 +33,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; @@ -379,7 +378,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder(node.getName(), actionMasks), listener); assertNull(oldListener); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index ced961a70b8..3539e54d943 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -38,24 +38,6 @@ import static org.hamcrest.Matchers.nullValue; public class SettingsTests extends ESTestCase { - public void testLoadFromDelimitedString() { - Settings settings = Settings.builder() - .loadFromDelimitedString("key1=value1;key2=value2", ';') - .build(); - assertThat(settings.get("key1"), equalTo("value1")); - assertThat(settings.get("key2"), equalTo("value2")); - assertThat(settings.getAsMap().size(), equalTo(2)); - assertThat(settings.toDelimitedString(';'), equalTo("key1=value1;key2=value2;")); - - settings = Settings.builder() - .loadFromDelimitedString("key1=value1;key2=value2;", ';') - .build(); - assertThat(settings.get("key1"), equalTo("value1")); - assertThat(settings.get("key2"), equalTo("value2")); - assertThat(settings.getAsMap().size(), equalTo(2)); - assertThat(settings.toDelimitedString(';'), equalTo("key1=value1;key2=value2;")); - } - public void testReplacePropertiesPlaceholderSystemProperty() { String value = System.getProperty("java.home"); assertFalse(value.isEmpty()); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java index 22d79fb1614..0bd5e07da91 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java @@ -92,7 +92,7 @@ public class GceNameResolver extends AbstractComponent implements CustomNameReso } else if (value.startsWith(GceAddressResolverType.PRIVATE_IP.configName)) { // We extract the network interface from gce:privateIp:XX String network = "0"; - String[] privateIpConfig = Strings.splitStringToArray(value, ':'); + String[] privateIpConfig = value.split(":"); if (privateIpConfig != null && privateIpConfig.length == 3) { network = privateIpConfig[2]; } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 66db57fdd92..8af614df605 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -116,7 +116,7 @@ public class AzureRepository extends BlobStoreRepository { // Remove starting / if any basePath = Strings.trimLeadingCharacter(basePath, '/'); BlobPath path = new BlobPath(); - for(String elem : Strings.splitStringToArray(basePath, '/')) { + for(String elem : basePath.split("/")) { path = path.add(elem); } this.basePath = path; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index a09d57ebc93..5861893d6b2 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -287,7 +287,7 @@ public class S3Repository extends BlobStoreRepository { String basePath = getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); - for(String elem : Strings.splitStringToArray(basePath, '/')) { + for(String elem : basePath.split("/")) { path = path.add(elem); } this.basePath = path; diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java index c42d51b7abd..6dc920c6ff8 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -22,8 +22,6 @@ package org.elasticsearch.ingest; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.ingest.core.IngestDocument; import java.util.ArrayList; @@ -105,7 +103,7 @@ public final class RandomDocumentPicks { * that each node of the tree either doesn't exist or is a map, otherwise new fields cannot be added. */ public static boolean canAddField(String path, IngestDocument ingestDocument) { - String[] pathElements = Strings.splitStringToArray(path, '.'); + String[] pathElements = path.split("\\."); Map innerMap = ingestDocument.getSourceAndMetadata(); if (pathElements.length > 1) { for (int i = 0; i < pathElements.length - 1; i++) { From 9fe5ce934279a274abdf67b417aae8d1566cde63 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 4 May 2016 09:18:30 -0400 Subject: [PATCH 0063/1311] Remove arbitrary separator/wildcard from PathTrie PathTrie has a constructor that allows for an arbitrary separtor and wildcard, but this constructor is unused and internally we always use '/' as the separator and '*' as the wildcard. There are no tests for the case where the separator differs from the default separator and wildcard. This commit removes this constructor and now all instances of PathTrie have the default separator and wildcard. --- .../elasticsearch/common/path/PathTrie.java | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/path/PathTrie.java b/core/src/main/java/org/elasticsearch/common/path/PathTrie.java index 3c25a9caa52..c711bfa2a61 100644 --- a/core/src/main/java/org/elasticsearch/common/path/PathTrie.java +++ b/core/src/main/java/org/elasticsearch/common/path/PathTrie.java @@ -25,9 +25,6 @@ import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -/** - * - */ public class PathTrie { public interface Decoder { @@ -36,18 +33,14 @@ public class PathTrie { private final Decoder decoder; private final TrieNode root; - private final String separator; private T rootValue; - public PathTrie(Decoder decoder) { - this('/', "*", decoder); - } + private static final String SEPARATOR = "/"; + private static final String WILDCARD = "*"; - public PathTrie(char separator, String wildcard, Decoder decoder) { + public PathTrie(Decoder decoder) { this.decoder = decoder; - final String separatorAsString = new String(new char[]{separator}); - this.separator = separatorAsString; - root = new TrieNode(separatorAsString, null, wildcard); + root = new TrieNode(SEPARATOR, null, WILDCARD); } public class TrieNode { @@ -195,7 +188,7 @@ public class PathTrie { } public void insert(String path, T value) { - String[] strings = path.split(separator); + String[] strings = path.split(SEPARATOR); if (strings.length == 0) { rootValue = value; return; @@ -216,7 +209,7 @@ public class PathTrie { if (path.length() == 0) { return rootValue; } - String[] strings = path.split(separator); + String[] strings = path.split(SEPARATOR); if (strings.length == 0) { return rootValue; } From 052191f2a2a76cb214c74340eede8d193ae24d24 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 4 May 2016 10:55:24 +0200 Subject: [PATCH 0064/1311] Add the ability to use the breadth_first mode with nested aggregations (such as `top_hits`) which require access to score information. The score is recomputed lazily for each document belonging to a top bucket. Relates to #9825 --- .../elasticsearch/common/lucene/Lucene.java | 25 ------------- .../search/aggregations/AggregatorBase.java | 2 +- .../bucket/BestBucketsDeferringCollector.java | 33 +++++++++++++---- .../bucket/terms/TermsAggregator.java | 1 - .../aggregations/metrics/TopHitsIT.java | 35 +++++++++++++++++-- .../search/profile/ProfileTests.java | 2 +- .../bucket/terms-aggregation.asciidoc | 5 ++- 7 files changed, 64 insertions(+), 39 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 8508a8a2e40..fcc443c57b6 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -638,31 +638,6 @@ public class Lucene { } } - /** - * Return a Scorer that throws an ElasticsearchIllegalStateException - * on all operations with the given message. - */ - public static Scorer illegalScorer(final String message) { - return new Scorer(null) { - @Override - public float score() throws IOException { - throw new IllegalStateException(message); - } - @Override - public int freq() throws IOException { - throw new IllegalStateException(message); - } - @Override - public int docID() { - throw new IllegalStateException(message); - } - @Override - public DocIdSetIterator iterator() { - throw new IllegalStateException(message); - } - }; - } - private static final class CommitPoint extends IndexCommit { private String segmentsFileName; private final Collection files; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index e70780ec48c..04023b04977 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -165,7 +165,7 @@ public abstract class AggregatorBase extends Aggregator { public DeferringBucketCollector getDeferringCollector() { // Default impl is a collector that selects the best buckets // but an alternative defer policy may be based on best docs. - return new BestBucketsDeferringCollector(); + return new BestBucketsDeferringCollector(context()); } /** diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java index 8a379d1ad82..43cefdca290 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java @@ -20,6 +20,9 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.common.lucene.Lucene; @@ -30,6 +33,7 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketCollector; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; import java.util.ArrayList; @@ -56,6 +60,7 @@ public class BestBucketsDeferringCollector extends DeferringBucketCollector { final List entries = new ArrayList<>(); BucketCollector collector; + final AggregationContext aggContext; LeafReaderContext context; PackedLongValues.Builder docDeltas; PackedLongValues.Builder buckets; @@ -64,7 +69,8 @@ public class BestBucketsDeferringCollector extends DeferringBucketCollector { LongHash selectedBuckets; /** Sole constructor. */ - public BestBucketsDeferringCollector() { + public BestBucketsDeferringCollector(AggregationContext context) { + this.aggContext = context; } @Override @@ -139,19 +145,34 @@ public class BestBucketsDeferringCollector extends DeferringBucketCollector { this.selectedBuckets = hash; collector.preCollection(); - if (collector.needsScores()) { - throw new IllegalStateException("Cannot defer if scores are needed"); + boolean needsScores = collector.needsScores(); + Weight weight = null; + if (needsScores) { + weight = aggContext.searchContext().searcher() + .createNormalizedWeight(aggContext.searchContext().query(), true); } - for (Entry entry : entries) { final LeafBucketCollector leafCollector = collector.getLeafCollector(entry.context); - leafCollector.setScorer(Lucene.illegalScorer("A limitation of the " + SubAggCollectionMode.BREADTH_FIRST - + " collection mode is that scores cannot be buffered along with document IDs")); + DocIdSetIterator docIt = null; + if (needsScores && entry.docDeltas.size() > 0) { + Scorer scorer = weight.scorer(entry.context); + // We don't need to check if the scorer is null + // since we are sure that there are documents to replay (entry.docDeltas it not empty). + docIt = scorer.iterator(); + leafCollector.setScorer(scorer); + } final PackedLongValues.Iterator docDeltaIterator = entry.docDeltas.iterator(); final PackedLongValues.Iterator buckets = entry.buckets.iterator(); int doc = 0; for (long i = 0, end = entry.docDeltas.size(); i < end; ++i) { doc += docDeltaIterator.next(); + if (needsScores) { + if (docIt.docID() < doc) { + docIt.advance(doc); + } + // aggregations should only be replayed on matching documents + assert docIt.docID() == doc; + } final long bucket = buckets.next(); final long rebasedBucket = hash.find(bucket); if (rebasedBucket != -1) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java index 1c59711b646..eefaf63c62b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java @@ -199,7 +199,6 @@ public abstract class TermsAggregator extends BucketsAggregator { @Override protected boolean shouldDefer(Aggregator aggregator) { return collectMode == SubAggCollectionMode.BREADTH_FIRST - && aggregator.needsScores() == false && !aggsUsedForSorting.contains(aggregator); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 36d021a5f3f..2cd1fed7fbe 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -351,8 +351,7 @@ public class TopHitsIT extends ESIntegTestCase { } - public void testBreadthFirst() throws Exception { - // breadth_first will be ignored since we need scores + public void testBreadthFirstWithScoreNeeded() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) @@ -382,6 +381,38 @@ public class TopHitsIT extends ESIntegTestCase { } } + public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { + SearchResponse response = client().prepareSearch("idx").setTypes("type") + .addAggregation(terms("terms") + .executionHint(randomExecutionHint()) + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .field(TERMS_AGGS_FIELD) + .order(Terms.Order.aggregation("max", false)) + .subAggregation(max("max").field(SORT_FIELD)) + .subAggregation(topHits("hits").size(3)) + ).get(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(5)); + int id = 4; + for (Terms.Bucket bucket : terms.getBuckets()) { + assertThat(bucket, notNullValue()); + assertThat(key(bucket), equalTo("val" + id)); + assertThat(bucket.getDocCount(), equalTo(10L)); + TopHits topHits = bucket.getAggregations().get("hits"); + SearchHits hits = topHits.getHits(); + assertThat(hits.totalHits(), equalTo(10L)); + assertThat(hits.getHits().length, equalTo(3)); + + assertThat(hits.getAt(0).sourceAsMap().size(), equalTo(4)); + id --; + } + } + public void testBasicsGetProperty() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(topHits("hits"))).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java b/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java index 83f6efaa150..afc1c7b3cbb 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/ProfileTests.java @@ -164,7 +164,7 @@ public class ProfileTests extends ESTestCase { final LeafCollector leafCollector = profileCollector.getLeafCollector(reader.leaves().get(0)); assertThat(profileCollector.getTime(), greaterThan(0L)); long time = profileCollector.getTime(); - leafCollector.setScorer(Lucene.illegalScorer("dummy scorer")); + leafCollector.setScorer(null); assertThat(profileCollector.getTime(), greaterThan(time)); time = profileCollector.getTime(); leafCollector.collect(0); diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 5d79c5580cb..14dec039435 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -635,9 +635,8 @@ elasticsearch will always use the `depth_first` collect_mode unless explicitly i Note that the `order` parameter can still be used to refer to data from a child aggregation when using the `breadth_first` setting - the parent aggregation understands that this child aggregation will need to be called first before any of the other child aggregations. -WARNING: It is not possible to nest aggregations such as `top_hits` which require access to match score information under an aggregation that uses -the `breadth_first` collection mode. This is because this would require a RAM buffer to hold the float score value for every document and -this would typically be too costly in terms of RAM. +WARNING: Nested aggregations such as `top_hits` which require access to score information under an aggregation that uses the `breadth_first` +collection mode need to replay the query on the second pass but only for the documents belonging to the top buckets. [[search-aggregations-bucket-terms-aggregation-execution-hint]] ==== Execution hint From c4554fedad100cb82a5cab9c7efcd127d4c4d205 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 4 May 2016 16:46:44 +0200 Subject: [PATCH 0065/1311] Fix build: restore illegalScorer still in use in ExpressionSearchScript --- .../elasticsearch/common/lucene/Lucene.java | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index fcc443c57b6..3540b3712ae 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -638,6 +638,56 @@ public class Lucene { } } + /** + * Return a Scorer that throws an ElasticsearchIllegalStateException + * on all operations with the given message. + */ + public static Scorer illegalScorer(final String message) { + return new Scorer(null) { + @Override + public float score() throws IOException { + throw new IllegalStateException(message); + } + @Override + public int freq() throws IOException { + throw new IllegalStateException(message); + } + @Override + public int docID() { + throw new IllegalStateException(message); + } + @Override + public DocIdSetIterator iterator() { + throw new IllegalStateException(message); + } + }; + } + + /** + * Return a Scorer that throws an ElasticsearchIllegalStateException + * on all operations with the given message. + */ + public static Scorer illegalScorer(final String message) { + return new Scorer(null) { + @Override + public float score() throws IOException { + throw new IllegalStateException(message); + } + @Override + public int freq() throws IOException { + throw new IllegalStateException(message); + } + @Override + public int docID() { + throw new IllegalStateException(message); + } + @Override + public DocIdSetIterator iterator() { + throw new IllegalStateException(message); + } + }; + } + private static final class CommitPoint extends IndexCommit { private String segmentsFileName; private final Collection files; From 8d3427b44dc67625f2e8d0e38bbe41e3fe6d3f05 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Wed, 4 May 2016 16:48:08 +0200 Subject: [PATCH 0066/1311] Fix build: restore illegalScorer still in use in ExpressionSearchScript (2) --- .../elasticsearch/common/lucene/Lucene.java | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 3540b3712ae..8508a8a2e40 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -638,31 +638,6 @@ public class Lucene { } } - /** - * Return a Scorer that throws an ElasticsearchIllegalStateException - * on all operations with the given message. - */ - public static Scorer illegalScorer(final String message) { - return new Scorer(null) { - @Override - public float score() throws IOException { - throw new IllegalStateException(message); - } - @Override - public int freq() throws IOException { - throw new IllegalStateException(message); - } - @Override - public int docID() { - throw new IllegalStateException(message); - } - @Override - public DocIdSetIterator iterator() { - throw new IllegalStateException(message); - } - }; - } - /** * Return a Scorer that throws an ElasticsearchIllegalStateException * on all operations with the given message. From 223d67df4aa49f8f7b2c62b4ded2a0824713bc38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 3 May 2016 16:16:29 +0200 Subject: [PATCH 0067/1311] Consolidate query generation in QueryShardContext Currently we have a lot of methods left in QueryShardContext that take parsers or BytesReference arguments to do some xContent parsing on the shard. While this still seems necessary in some cases (e.g. percolation, phrase suggester), the shard context should only be concerned with generating lucene queries from QueryBuilders. This change removes all of the parseX() methods in favour of two public methods toQuery(QueryBuilder) and toFilter(QueryBuilder) that either call the query builders toFilter() or toQuery() method and move all code required for parsing out to the respective callers. --- .../org/elasticsearch/index/IndexService.java | 10 ++- .../percolator/PercolatorFieldMapper.java | 2 +- .../index/query/AbstractQueryBuilder.java | 4 +- .../index/query/QueryShardContext.java | 90 +++++-------------- .../search/query/PostFilterParseElement.java | 4 +- .../search/query/QueryParseElement.java | 4 +- .../search/rescore/QueryRescorer.java | 14 --- .../search/rescore/Rescorer.java | 12 --- .../suggest/phrase/PhraseSuggester.java | 14 ++- .../index/query/AbstractQueryTestCase.java | 6 ++ 10 files changed, 55 insertions(+), 105 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index bd96727d28f..ea2cfe9f106 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -431,6 +431,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC return nodeServicesProvider; } + @Override public IndexSettings getIndexSettings() { return indexSettings; } @@ -598,18 +599,18 @@ public final class IndexService extends AbstractIndexComponent implements IndexC } } - private Query parse(AliasMetaData alias, QueryShardContext parseContext) { + private Query parse(AliasMetaData alias, QueryShardContext shardContext) { if (alias.filter() == null) { return null; } try { byte[] filterSource = alias.filter().uncompressed(); try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) { - ParsedQuery parsedFilter = parseContext.parseInnerFilter(parser); + ParsedQuery parsedFilter = shardContext.toFilter(shardContext.newParseContext(parser).parseInnerQueryBuilder()); return parsedFilter == null ? null : parsedFilter.query(); } } catch (IOException ex) { - throw new AliasFilterParsingException(parseContext.index(), alias.getAlias(), "Invalid alias filter", ex); + throw new AliasFilterParsingException(shardContext.index(), alias.getAlias(), "Invalid alias filter", ex); } } @@ -759,6 +760,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC return scheduledFuture != null; } + @Override public final void run() { try { runInternal(); @@ -824,6 +826,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC super(indexService, indexService.getIndexSettings().getTranslogSyncInterval()); } + @Override protected String getThreadPool() { return ThreadPool.Names.FLUSH; } @@ -849,6 +852,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC indexService.maybeRefreshEngine(); } + @Override protected String getThreadPool() { return ThreadPool.Names.REFRESH; } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java index 6637dd8b762..10c7e46e353 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java @@ -224,7 +224,7 @@ public class PercolatorFieldMapper extends FieldMapper { return queryBuilder.toQuery(context); } - static QueryBuilder parseQueryBuilder(QueryParseContext context, XContentLocation location) { + private static QueryBuilder parseQueryBuilder(QueryParseContext context, XContentLocation location) { try { return context.parseInnerQueryBuilder(); } catch (IOException e) { diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index ab04c1aff44..6e82e7059d8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -239,14 +239,14 @@ public abstract class AbstractQueryBuilder> return getWriteableName(); } - protected final void writeQueries(StreamOutput out, List> queries) throws IOException { + protected final static void writeQueries(StreamOutput out, List queries) throws IOException { out.writeVInt(queries.size()); for (QueryBuilder query : queries) { out.writeNamedWriteable(query); } } - protected final List> readQueries(StreamInput in) throws IOException { + protected final static List> readQueries(StreamInput in) throws IOException { List> queries = new ArrayList<>(); int size = in.readVInt(); for (int i = 0; i < size; i++) { diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index af87a986243..4aa72728bc8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -26,7 +26,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.IndexReader; import org.apache.lucene.queryparser.classic.MapperQueryParser; @@ -37,13 +36,9 @@ import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.Version; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; @@ -292,84 +287,45 @@ public class QueryShardContext extends QueryRewriteContext { return false; } - public ParsedQuery parse(BytesReference source) { - XContentParser parser = null; - try { - parser = XContentFactory.xContent(source).createParser(source); - return innerParse(parser); - } catch (ParsingException e) { - throw e; - } catch (Exception e) { - throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e); - } finally { - if (parser != null) { - parser.close(); - } - } - } - - public ParsedQuery parse(XContentParser parser) { - try { - return innerParse(parser); - } catch(IOException e) { - throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e); - } - } - - /** - * Parses an inner filter, returning null if the filter should be ignored. - */ - @Nullable - public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException { - reset(); - try { - Query filter = QueryBuilder.rewriteQuery(newParseContext(parser).parseInnerQueryBuilder(), this).toFilter(this); + public ParsedQuery toFilter(QueryBuilder queryBuilder) { + return toQuery(queryBuilder, q -> { + Query filter = q.toFilter(this); if (filter == null) { return null; } - return new ParsedQuery(filter, copyNamedQueries()); - } finally { - reset(); - } + return filter; + }); } + public ParsedQuery toQuery(QueryBuilder queryBuilder) { + return toQuery(queryBuilder, q -> { + Query query = q.toQuery(this); + if (query == null) { + query = Queries.newMatchNoDocsQuery("No query left after rewrite."); + } + return query; + }); + } - private ParsedQuery innerParse(XContentParser parser) throws IOException, QueryShardException { + @FunctionalInterface + private interface CheckedFunction { + R apply(T t) throws IOException; + } + + private ParsedQuery toQuery(QueryBuilder queryBuilder, CheckedFunction filterOrQuery) { reset(); try { - Query query = parseInnerQuery(parser); - return new ParsedQuery(query, copyNamedQueries()); - } finally { - reset(); - } - } - - public Query parseInnerQuery(XContentParser parser) throws IOException { - return toQuery(this.newParseContext(parser).parseInnerQueryBuilder(), this); - } - - public ParsedQuery toQuery(QueryBuilder queryBuilder) { - reset(); - try { - Query query = toQuery(queryBuilder, this); - return new ParsedQuery(query, copyNamedQueries()); + QueryBuilder rewriteQuery = QueryBuilder.rewriteQuery(queryBuilder, this); + return new ParsedQuery(filterOrQuery.apply(rewriteQuery), copyNamedQueries()); } catch(QueryShardException | ParsingException e ) { throw e; } catch(Exception e) { throw new QueryShardException(this, "failed to create query: {}", e, queryBuilder); } finally { - this.reset(); + reset(); } } - private static Query toQuery(final QueryBuilder queryBuilder, final QueryShardContext context) throws IOException { - final Query query = QueryBuilder.rewriteQuery(queryBuilder, context).toQuery(context); - if (query == null) { - return Queries.newMatchNoDocsQuery("No query left after rewrite."); - } - return query; - } - public final Index index() { return indexSettings.getIndex(); } diff --git a/core/src/main/java/org/elasticsearch/search/query/PostFilterParseElement.java b/core/src/main/java/org/elasticsearch/search/query/PostFilterParseElement.java index 6995b6ff8a7..1b9fee22d76 100644 --- a/core/src/main/java/org/elasticsearch/search/query/PostFilterParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/query/PostFilterParseElement.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.query; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.internal.SearchContext; @@ -30,7 +31,8 @@ public class PostFilterParseElement implements SearchParseElement { @Override public void parse(XContentParser parser, SearchContext context) throws Exception { - ParsedQuery postFilter = context.getQueryShardContext().parseInnerFilter(parser); + QueryShardContext shardContext = context.getQueryShardContext(); + ParsedQuery postFilter = shardContext.toFilter(shardContext.newParseContext(parser).parseInnerQueryBuilder()); if (postFilter != null) { context.parsedPostFilter(postFilter); } diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryParseElement.java b/core/src/main/java/org/elasticsearch/search/query/QueryParseElement.java index 094a29cd6b1..cfa4ea21747 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryParseElement.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.query; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.internal.SearchContext; @@ -30,6 +31,7 @@ public class QueryParseElement implements SearchParseElement { @Override public void parse(XContentParser parser, SearchContext context) throws Exception { - context.parsedQuery(context.getQueryShardContext().parse(parser)); + QueryShardContext queryShardContext = context.getQueryShardContext(); + context.parsedQuery(queryShardContext.toQuery(queryShardContext.newParseContext(parser).parseInnerQueryBuilder())); } } diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java index 319055639ac..29d62f3c7d3 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java @@ -120,20 +120,6 @@ public final class QueryRescorer implements Rescorer { } } - private static final ObjectParser RESCORE_PARSER = new ObjectParser<>("query", null); - - static { - RESCORE_PARSER.declareObject(QueryRescoreContext::setQuery, (p, c) -> c.parse(p).query(), new ParseField("rescore_query")); - RESCORE_PARSER.declareFloat(QueryRescoreContext::setQueryWeight, new ParseField("query_weight")); - RESCORE_PARSER.declareFloat(QueryRescoreContext::setRescoreQueryWeight, new ParseField("rescore_query_weight")); - RESCORE_PARSER.declareString(QueryRescoreContext::setScoreMode, new ParseField("score_mode")); - } - - @Override - public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException { - return RESCORE_PARSER.parse(parser, new QueryRescoreContext(this), context); - } - private final static Comparator SCORE_DOC_COMPARATOR = new Comparator() { @Override public int compare(ScoreDoc o1, ScoreDoc o2) { diff --git a/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java b/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java index b475ca90db1..5e824aadc6c 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java @@ -23,8 +23,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TopDocs; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -65,16 +63,6 @@ public interface Rescorer { public Explanation explain(int topLevelDocId, SearchContext context, RescoreSearchContext rescoreContext, Explanation sourceExplanation) throws IOException; - /** - * Parses the {@link RescoreSearchContext} for this implementation - * - * @param parser the parser to read the context from - * @param context the current shard context - * @return the parsed {@link RescoreSearchContext} - * @throws IOException if an {@link IOException} occurs while parsing the context - */ - public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException; - /** * Extracts all terms needed to execute this {@link Rescorer}. This method * is executed in a distributed frequency collection roundtrip for diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 11a0c10ee0e..9aed90f55e5 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -32,8 +32,11 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; @@ -115,11 +118,14 @@ public final class PhraseSuggester extends Suggester { // from the index for a correction, collateMatch is updated final Map vars = suggestion.getCollateScriptParams(); vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); - ScriptService scriptService = suggestion.getShardContext().getScriptService(); + QueryShardContext shardContext = suggestion.getShardContext(); + ScriptService scriptService = shardContext.getScriptService(); final ExecutableScript executable = scriptService.executable(collateScript, vars); final BytesReference querySource = (BytesReference) executable.run(); - final ParsedQuery parsedQuery = suggestion.getShardContext().parse(querySource); - collateMatch = Lucene.exists(searcher, parsedQuery.query()); + try (XContentParser parser = XContentFactory.xContent(querySource).createParser(querySource)) { + final ParsedQuery parsedQuery = shardContext.toQuery(shardContext.newParseContext(parser).parseInnerQueryBuilder()); + collateMatch = Lucene.exists(searcher, parsedQuery.query()); + } } if (!collateMatch && !collatePrune) { continue; @@ -142,7 +148,7 @@ public final class PhraseSuggester extends Suggester { return response; } - private PhraseSuggestion.Entry buildResultEntry(SuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { + private static PhraseSuggestion.Entry buildResultEntry(SuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) { spare.copyUTF8Bytes(suggestion.getText()); return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore); } diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index ba55413a27b..fac11ab1f78 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -593,6 +593,12 @@ public abstract class AbstractQueryTestCase> assertNotEquals("modifying the boost doesn't affect the corresponding lucene query", rewrite(firstLuceneQuery), rewrite(thirdLuceneQuery)); } + + // check that context#isFilter is not changed by invoking toQuery/rewrite + boolean filterFlag = randomBoolean(); + context.setIsFilter(filterFlag); + rewriteQuery(firstQuery, context).toQuery(context); + assertEquals("isFilter should be unchanged", filterFlag, context.isFilter()); } } From fad0705e3049bd1c3a74593e9adbfbfadbd82944 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 4 May 2016 10:23:49 -0400 Subject: [PATCH 0068/1311] [docs] Modernize README.textile * camel_case all the things * `?pretty` all the things (we tell people to do this in bug reports) Closes #18136 --- README.textile | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/README.textile b/README.textile index 5c75844b108..e48495a5524 100644 --- a/README.textile +++ b/README.textile @@ -50,19 +50,19 @@ h3. Indexing Let's try and index some twitter like information. First, let's create a twitter user, and add some tweets (the @twitter@ index will be created automatically):
-curl -XPUT 'http://localhost:9200/twitter/user/kimchy' -d '{ "name" : "Shay Banon" }'
+curl -XPUT 'http://localhost:9200/twitter/user/kimchy?pretty' -d '{ "name" : "Shay Banon" }'
 
-curl -XPUT 'http://localhost:9200/twitter/tweet/1' -d '
+curl -XPUT 'http://localhost:9200/twitter/tweet/1?pretty' -d '
 {
     "user": "kimchy",
-    "postDate": "2009-11-15T13:12:00",
+    "post_date": "2009-11-15T13:12:00",
     "message": "Trying out Elasticsearch, so far so good?"
 }'
 
-curl -XPUT 'http://localhost:9200/twitter/tweet/2' -d '
+curl -XPUT 'http://localhost:9200/twitter/tweet/2?pretty' -d '
 {
     "user": "kimchy",
-    "postDate": "2009-11-15T14:12:12",
+    "post_date": "2009-11-15T14:12:12",
     "message": "Another tweet, will it be indexed?"
 }'
 
@@ -101,7 +101,7 @@ Just for kicks, let's get all the documents stored (we should see the user as we curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d ' { "query" : { - "matchAll" : {} + "match_all" : {} } }' @@ -113,7 +113,7 @@ curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d ' { "query" : { "range" : { - "postDate" : { "from" : "2009-11-15T13:00:00", "to" : "2009-11-15T14:00:00" } + "post_date" : { "from" : "2009-11-15T13:00:00", "to" : "2009-11-15T14:00:00" } } } }' @@ -130,19 +130,19 @@ Elasticsearch supports multiple indices, as well as multiple types per index. In Another way to define our simple twitter system is to have a different index per user (note, though that each index has an overhead). Here is the indexing curl's in this case:
-curl -XPUT 'http://localhost:9200/kimchy/info/1' -d '{ "name" : "Shay Banon" }'
+curl -XPUT 'http://localhost:9200/kimchy/info/1?pretty' -d '{ "name" : "Shay Banon" }'
 
-curl -XPUT 'http://localhost:9200/kimchy/tweet/1' -d '
+curl -XPUT 'http://localhost:9200/kimchy/tweet/1?pretty' -d '
 {
     "user": "kimchy",
-    "postDate": "2009-11-15T13:12:00",
+    "post_date": "2009-11-15T13:12:00",
     "message": "Trying out Elasticsearch, so far so good?"
 }'
 
-curl -XPUT 'http://localhost:9200/kimchy/tweet/2' -d '
+curl -XPUT 'http://localhost:9200/kimchy/tweet/2?pretty' -d '
 {
     "user": "kimchy",
-    "postDate": "2009-11-15T14:12:12",
+    "post_date": "2009-11-15T14:12:12",
     "message": "Another tweet, will it be indexed?"
 }'
 
@@ -152,11 +152,11 @@ The above will index information into the @kimchy@ index, with two types, @info@ Complete control on the index level is allowed. As an example, in the above case, we would want to change from the default 5 shards with 1 replica per index, to only 1 shard with 1 replica per index (== per twitter user). Here is how this can be done (the configuration can be in yaml as well):
-curl -XPUT http://localhost:9200/another_user/ -d '
+curl -XPUT http://localhost:9200/another_user?pretty -d '
 {
     "index" : {
-        "numberOfShards" : 1,
-        "numberOfReplicas" : 1
+        "number_of_shards" : 1,
+        "number_of_replicas" : 1
     }
 }'
 
@@ -168,7 +168,7 @@ index (twitter user), for example: curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -d ' { "query" : { - "matchAll" : {} + "match_all" : {} } }' @@ -179,7 +179,7 @@ Or on all the indices: curl -XGET 'http://localhost:9200/_search?pretty=true' -d ' { "query" : { - "matchAll" : {} + "match_all" : {} } }' From 5a0cfdd6af7365f5af80d9e2cb85a1d4421cf5c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 May 2016 18:40:52 +0200 Subject: [PATCH 0069/1311] Change scriptFields member in InnerHitBuilder to set Adding random shuffling of xContent to InnterHitBuilderTests shows that the scriptFields are stored in order as a list internally although they are an unordered json objects in the query dsl. This changes the internal representation to a set and updates serialization accordingly. --- .../index/query/InnerHitBuilder.java | 25 +++++++++++++------ .../index/query/InnerHitBuilderTests.java | 21 ++++++++-------- 2 files changed, 28 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 2734dbc0e6b..61663713c8e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -47,10 +47,12 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import static org.elasticsearch.common.xcontent.XContentParser.Token.END_OBJECT; @@ -72,7 +74,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl PARSER.declareStringArray(InnerHitBuilder::setFieldDataFields, SearchSourceBuilder.FIELDDATA_FIELDS_FIELD); PARSER.declareField((p, i, c) -> { try { - List scriptFields = new ArrayList<>(); + Set scriptFields = new HashSet<>(); for (XContentParser.Token token = p.nextToken(); token != END_OBJECT; token = p.nextToken()) { scriptFields.add(new ScriptField(c)); } @@ -132,7 +134,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl private QueryBuilder query = new MatchAllQueryBuilder(); private List> sorts; private List fieldDataFields; - private List scriptFields; + private Set scriptFields; private HighlightBuilder highlightBuilder; private FetchSourceContext fetchSourceContext; private Map childInnerHits; @@ -155,7 +157,11 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl fieldNames = (List) in.readGenericValue(); fieldDataFields = (List) in.readGenericValue(); if (in.readBoolean()) { - scriptFields = in.readList(ScriptField::new); + int size = in.readVInt(); + scriptFields = new HashSet<>(size); + for (int i = 0; i < size; i++) { + scriptFields.add(new ScriptField(in)); + } } fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); if (in.readBoolean()) { @@ -190,7 +196,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl fieldDataFields = new ArrayList<>(other.fieldDataFields); } if (other.scriptFields != null) { - scriptFields = new ArrayList<>(other.scriptFields); + scriptFields = new HashSet<>(other.scriptFields); } if (other.fetchSourceContext != null) { fetchSourceContext = new FetchSourceContext( @@ -240,7 +246,10 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl boolean hasScriptFields = scriptFields != null; out.writeBoolean(hasScriptFields); if (hasScriptFields) { - out.writeList(scriptFields); + out.writeVInt(scriptFields.size()); + for (ScriptField scriptField : scriptFields) { + scriptField.writeTo(out);; + } } out.writeOptionalStreamable(fetchSourceContext); boolean hasSorts = sorts != null; @@ -350,18 +359,18 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl return this; } - public List getScriptFields() { + public Set getScriptFields() { return scriptFields; } - public InnerHitBuilder setScriptFields(List scriptFields) { + public InnerHitBuilder setScriptFields(Set scriptFields) { this.scriptFields = scriptFields; return this; } public InnerHitBuilder addScriptField(String name, Script script) { if (scriptFields == null) { - scriptFields = new ArrayList<>(); + scriptFields = new HashSet<>(); } scriptFields.add(new ScriptField(name, script, false)); return this; diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index a05093761d2..d2cbec890e8 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -22,11 +22,11 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.Matchers.nullValue; - import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -91,12 +91,13 @@ public class InnerHitBuilderTests extends ESTestCase { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { InnerHitBuilder innerHit = randomInnerHits(true, false); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - if (randomBoolean()) { - builder.prettyPrint(); - } innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS); + XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet()); + if (randomBoolean()) { + shuffled.prettyPrint(); + } - XContentParser parser = XContentHelper.createParser(builder.bytes()); + XContentParser parser = XContentHelper.createParser(shuffled.bytes()); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(context); assertThat(innerHit, not(sameInstance(secondInnerHits))); @@ -202,7 +203,7 @@ public class InnerHitBuilderTests extends ESTestCase { .innerHit(leafInnerHits); FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(nestedQueryBuilder); Map innerHitBuilders = new HashMap<>(); - ((AbstractQueryBuilder) functionScoreQueryBuilder).extractInnerHitBuilders(innerHitBuilders); + ((AbstractQueryBuilder) functionScoreQueryBuilder).extractInnerHitBuilders(innerHitBuilders); assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue()); } @@ -220,7 +221,7 @@ public class InnerHitBuilderTests extends ESTestCase { innerHits.setTrackScores(randomBoolean()); innerHits.setFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); innerHits.setFieldDataFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); - innerHits.setScriptFields(randomListStuff(16, InnerHitBuilderTests::randomScript)); + innerHits.setScriptFields(new HashSet<>(randomListStuff(16, InnerHitBuilderTests::randomScript))); FetchSourceContext randomFetchSourceContext; if (randomBoolean()) { randomFetchSourceContext = new FetchSourceContext(randomBoolean()); @@ -245,7 +246,7 @@ public class InnerHitBuilderTests extends ESTestCase { } if (includeQueryTypeOrPath) { - QueryBuilder query = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 16), randomAsciiOfLengthBetween(1, 16)); + QueryBuilder query = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 16), randomAsciiOfLengthBetween(1, 16)); if (randomBoolean()) { return new InnerHitBuilder(innerHits, randomAsciiOfLength(8), query); } else { @@ -299,7 +300,7 @@ public class InnerHitBuilderTests extends ESTestCase { case 7: if (randomBoolean()) { instance.setScriptFields(randomValueOtherThan(instance.getScriptFields(), () -> { - return randomListStuff(16, InnerHitBuilderTests::randomScript);})); + return new HashSet<>(randomListStuff(16, InnerHitBuilderTests::randomScript));})); } else { SearchSourceBuilder.ScriptField script = randomScript(); instance.addScriptField(script.fieldName(), script.script()); From 34d90b041f4cd6232c83c1732c53a93f46d57d66 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 4 May 2016 18:17:10 +0200 Subject: [PATCH 0070/1311] Reorganise scripting docs (#18132) * Reorganize scripting documentation * Further changes to tidy up scripting docs Closes #18116 * Add note about .lat/lon potentially returning null * Added .value to expressions example * Fixed two bad ASCIIDOC links --- docs/reference/modules.asciidoc | 2 - docs/reference/modules/scripting.asciidoc | 103 ++- .../scripting/advanced-scripting.asciidoc | 20 +- .../modules/scripting/expression.asciidoc | 120 +++ .../modules/scripting/fields.asciidoc | 232 ++++++ .../modules/scripting/groovy.asciidoc | 181 +++++ .../modules/scripting/native.asciidoc | 84 ++ .../modules/{ => scripting}/painless.asciidoc | 8 +- .../modules/scripting/scripting.asciidoc | 761 ------------------ .../modules/scripting/security.asciidoc | 136 +++- .../modules/scripting/using.asciidoc | 238 ++++++ 11 files changed, 1108 insertions(+), 777 deletions(-) create mode 100644 docs/reference/modules/scripting/expression.asciidoc create mode 100644 docs/reference/modules/scripting/fields.asciidoc create mode 100644 docs/reference/modules/scripting/groovy.asciidoc create mode 100644 docs/reference/modules/scripting/native.asciidoc rename docs/reference/modules/{ => scripting}/painless.asciidoc (98%) delete mode 100644 docs/reference/modules/scripting/scripting.asciidoc create mode 100644 docs/reference/modules/scripting/using.asciidoc diff --git a/docs/reference/modules.asciidoc b/docs/reference/modules.asciidoc index b71d1224e7e..5a39cdfd790 100644 --- a/docs/reference/modules.asciidoc +++ b/docs/reference/modules.asciidoc @@ -94,8 +94,6 @@ include::modules/network.asciidoc[] include::modules/node.asciidoc[] -include::modules/painless.asciidoc[] - include::modules/plugins.asciidoc[] include::modules/scripting.asciidoc[] diff --git a/docs/reference/modules/scripting.asciidoc b/docs/reference/modules/scripting.asciidoc index 114740306ee..feb8113a6da 100644 --- a/docs/reference/modules/scripting.asciidoc +++ b/docs/reference/modules/scripting.asciidoc @@ -1,5 +1,104 @@ -include::scripting/scripting.asciidoc[] +[[modules-scripting]] +== Scripting + +The scripting module enables you to use scripts to evaluate custom +expressions. For example, you could use a script to return "script fields" +as part of a search request or evaluate a custom score for a query. + +TIP: Elasticsearch now has a built-in scripting language called _Painless_ +that provides a more secure alternative for implementing +scripts for Elasticsearch. We encourage you to try it out -- +for more information, see <>. + +The default scripting language is http://groovy-lang.org/[groovy]. +Additional `lang` plugins enable you to run scripts written in other languages. +Everywhere a script can be used, you can include a `lang` parameter +to specify the language of the script. + +[float] +=== General-purpose languages: + +These languages can be used for any purpose in the scripting APIs, +and give the most flexibility. + +[cols="<,<,<",options="header",] +|======================================================================= +|Language + |Sandboxed + |Required plugin + +|<> + |yes + |built-in + +|<> + |<> + |built-in + +|{plugins}/lang-javascript.html[`javascript`] + |<> + |{plugins}/lang-javascript.html[`lang-javascript`] + +|{plugins}/lang-python.html[`python`] + |<> + |{plugins}/lang-python.html[`lang-python`] + +|======================================================================= + +[float] +=== Special-purpose languages: + +These languages are less flexible, but typically have higher performance for +certain tasks. + +[cols="<,<,<,<",options="header",] +|======================================================================= +|Language + |Sandboxed + |Required plugin + |Purpose + +|<> + |yes + |built-in + |fast custom ranking and sorting + +|<> + |yes + |built-in + |templates + +|<> + |n/a + |you write it! + |expert API + +|======================================================================= + +[WARNING] +.Scripts and security +================================================= + +Languages that are sandboxed are designed with security in mind. However, non- +sandboxed languages can be a security issue, please read +<> for more details. + +================================================= + + +include::scripting/using.asciidoc[] + +include::scripting/fields.asciidoc[] + +include::scripting/security.asciidoc[] + +include::scripting/groovy.asciidoc[] + +include::scripting/painless.asciidoc[] + +include::scripting/expression.asciidoc[] + +include::scripting/native.asciidoc[] include::scripting/advanced-scripting.asciidoc[] -include::scripting/security.asciidoc[] diff --git a/docs/reference/modules/scripting/advanced-scripting.asciidoc b/docs/reference/modules/scripting/advanced-scripting.asciidoc index 206a2fec50d..b4053d76598 100644 --- a/docs/reference/modules/scripting/advanced-scripting.asciidoc +++ b/docs/reference/modules/scripting/advanced-scripting.asciidoc @@ -1,13 +1,17 @@ [[modules-advanced-scripting]] -=== Text scoring in scripts +=== Advanced text scoring in scripts +experimental[The functionality described on this page is considered experimental and may be changed or removed in a future release] -Text features, such as term or document frequency for a specific term can be accessed in scripts (see <> ) with the `_index` variable. This can be useful if, for example, you want to implement your own scoring model using for example a script inside a <>. +Text features, such as term or document frequency for a specific term can be +accessed in scripts with the `_index` variable. This can be useful if, for +example, you want to implement your own scoring model using for example a +script inside a <>. Statistics over the document collection are computed *per shard*, not per index. [float] -==== Nomenclature: +=== Nomenclature: [horizontal] @@ -33,7 +37,7 @@ depending on the shard the current document resides in. [float] -==== Shard statistics: +=== Shard statistics: `_index.numDocs()`:: @@ -49,7 +53,7 @@ depending on the shard the current document resides in. [float] -==== Field statistics: +=== Field statistics: Field statistics can be accessed with a subscript operator like this: `_index['FIELD']`. @@ -74,7 +78,7 @@ depending on the shard the current document resides in. The number of terms in a field cannot be accessed using the `_index` variable. See <> for how to do that. [float] -==== Term statistics: +=== Term statistics: Term statistics for a field can be accessed with a subscript operator like this: `_index['FIELD']['TERM']`. This will never return null, even if term or field does not exist. @@ -101,7 +105,7 @@ affect is your set the <> to `docs`. [float] -==== Term positions, offsets and payloads: +=== Term positions, offsets and payloads: If you need information on the positions of terms in a field, call `_index['FIELD'].get('TERM', flag)` where flag can be @@ -174,7 +178,7 @@ return score; [float] -==== Term vectors: +=== Term vectors: The `_index` variable can only be used to gather statistics for single terms. If you want to use information on all terms in a field, you must store the term vectors (see <>). To access them, call `_index.termVectors()` to get a diff --git a/docs/reference/modules/scripting/expression.asciidoc b/docs/reference/modules/scripting/expression.asciidoc new file mode 100644 index 00000000000..d941fa2f4a4 --- /dev/null +++ b/docs/reference/modules/scripting/expression.asciidoc @@ -0,0 +1,120 @@ +[[modules-scripting-expression]] +=== Lucene Expressions Language + +Lucene's expressions compile a `javascript` expression to bytecode. They are +designed for high-performance custom ranking and sorting functions and are +enabled for `inline` and `stored` scripting by default. + +[float] +=== Performance + +Expressions were designed to have competitive performance with custom Lucene code. +This performance is due to having low per-document overhead as opposed to other +scripting engines: expressions do more "up-front". + +This allows for very fast execution, even faster than if you had written a `native` script. + +[float] +=== Syntax + +Expressions support a subset of javascript syntax: a single expression. + +See the link:http://lucene.apache.org/core/6_0_0/expressions/index.html?org/apache/lucene/expressions/js/package-summary.html[expressions module documentation] +for details on what operators and functions are available. + +Variables in `expression` scripts are available to access: + +* document fields, e.g. `doc['myfield'].value` +* variables and methods that the field supports, e.g. `doc['myfield'].empty` +* Parameters passed into the script, e.g. `mymodifier` +* The current document's score, `_score` (only available when used in a `script_score`) + +You can use Expressions scripts for `script_score`, `script_fields`, sort scripts, and numeric aggregation +scripts, simply set the `lang` parameter to `expression`. + +[float] +=== Numeric field API +[cols="<,<",options="header",] +|======================================================================= +|Expression |Description +|`doc['field_name'].value` |The value of the field, as a `double` + +|`doc['field_name'].empty` |A boolean indicating if the field has no +values within the doc. + +|`doc['field_name'].min()` |The minimum value of the field in this document. + +|`doc['field_name'].max()` |The maximum value of the field in this document. + +|`doc['field_name'].median()` |The median value of the field in this document. + +|`doc['field_name'].avg()` |The average of the values in this document. + +|`doc['field_name'].sum()` |The sum of the values in this document. + +|`doc['field_name'].count()` |The number of values in this document. +|======================================================================= + +When a document is missing the field completely, by default the value will be treated as `0`. +You can treat it as another value instead, e.g. `doc['myfield'].empty ? 100 : doc['myfield'].value` + +When a document has multiple values for the field, by default the minimum value is returned. +You can choose a different value instead, e.g. `doc['myfield'].sum()`. + +When a document is missing the field completely, by default the value will be treated as `0`. + +Boolean fields are exposed as numerics, with `true` mapped to `1` and `false` mapped to `0`. +For example: `doc['on_sale'].value ? doc['price'].value * 0.5 : doc['price'].value` + +[float] +=== Date field API +Date fields are treated as the number of milliseconds since January 1, 1970 and +support the Numeric Fields API above, with these additional methods: + +[cols="<,<",options="header",] +|======================================================================= +|Expression |Description +|`doc['field_name'].getYear()` |Year component, e.g. `1970`. + +|`doc['field_name'].getMonth()` |Month component (0-11), e.g. `0` for January. + +|`doc['field_name'].getDayOfMonth()` |Day component, e.g. `1` for the first of the month. + +|`doc['field_name'].getHourOfDay()` |Hour component (0-23) + +|`doc['field_name'].getMinutes()` |Minutes component (0-59) + +|`doc['field_name'].getSeconds()` |Seconds component (0-59) +|======================================================================= + +The following example shows the difference in years between the `date` fields date0 and date1: + +`doc['date1'].getYear() - doc['date0'].getYear()` + +[float] +=== `geo_point` field API +[cols="<,<",options="header",] +|======================================================================= +|Expression |Description +|`doc['field_name'].empty` |A boolean indicating if the field has no +values within the doc. + +|`doc['field_name'].lat` |The latitude of the geo point, or `null`. + +|`doc['field_name'].lon` |The longitude of the geo point, or `null`. +|======================================================================= + +The following example computes distance in kilometers from Washington, DC: + +`haversin(38.9072, 77.0369, doc['field_name'].lat, doc['field_name'].lon)` + +In this example the coordinates could have been passed as parameters to the script, +e.g. based on geolocation of the user. + +[float] +=== Limitations + +There are a few limitations relative to other script languages: + +* Only numeric, boolean, date, and geo_point fields may be accessed +* Stored fields are not available diff --git a/docs/reference/modules/scripting/fields.asciidoc b/docs/reference/modules/scripting/fields.asciidoc new file mode 100644 index 00000000000..9ddf8267db6 --- /dev/null +++ b/docs/reference/modules/scripting/fields.asciidoc @@ -0,0 +1,232 @@ +[[modules-scripting-fields]] +=== Accessing document fields and special variables + +Depending on where a script is used, it will have access to certain special +variables and document fields. + +[float] +== Update scripts + +A script used in the <>, +<>, or <> +API will have access to the `ctx` variable which exposes: + +[horizontal] +`ctx._source`:: Access to the document <>. +`ctx.op`:: The operation that should be applied to the document: `index` or `delete`. +`ctx._index` etc:: Access to <>, some of which may be read-only. + +[float] +== Search and Aggregation scripts + +With the exception of <> which are +executed once per search hit, scripts used in search and aggregations will be +executed once for every document which might match a query or an aggregation. +Depending on how many documents you have, this could mean millions or billions +of executions: these scripts need to be fast! + +Field values can be accessed from a script using +<>, or +<>, which are explained below. + +Scripts may also have access to the document's relevance +<> and, via the experimental `_index` variable, +to term statistics for <>. + +[[scripting-score]] +[float] +=== Accessing the score of a document within a script + +Scripts used in the <>, +in <>, or in +<> have access to the `_score` variable which +represents the current relevance score of a document. + +Here's an example of using a script in a +<> to alter the +relevance `_score` of each document: + +[source,js] +------------------------------------- +PUT my_index/my_type/1 +{ + "text": "quick brown fox", + "popularity": 1 +} + +PUT my_index/my_type/2 +{ + "text": "quick fox", + "popularity": 5 +} + +GET my_index/_search +{ + "query": { + "function_score": { + "query": { + "match": { + "text": "quick brown fox" + } + }, + "script_score": { + "script": { + "lang": "expression", + "inline": "_score * doc['popularity']" + } + } + } + } +} +------------------------------------- +// AUTOSENSE + + +[float] +[[modules-scripting-doc-vals]] +=== Doc Values + +By far the fastest most efficient way to access a field value from a +script is to use the `doc['field_name']` syntax, which retrieves the field +value from <>. Doc values are a columnar field value +store, enabled by default on all fields except for <>. + +[source,js] +------------------------------- +PUT my_index/my_type/1 +{ + "cost_price": 100 +} + +GET my_index/_search +{ + "script_fields": { + "sales_price": { + "script": { + "lang": "expression", + "inline": "doc['cost_price'] * markup", + "params": { + "markup": 0.2 + } + } + } + } +} +------------------------------- +// AUTOSENSE + +Doc-values can only return "simple" field values like numbers, dates, geo- +points, terms, etc, or arrays of these values if the field is multi-valued. +It cannot return JSON objects. + +[NOTE] +.Doc values and `text` fields +=================================================== + +The `doc['field']` syntax can also be used for <> +if <> is enabled, but *BEWARE*: enabling fielddata on a +`text` field requires loading all of the terms into the JVM heap, which can be +very expensive both in terms of memory and CPU. It seldom makes sense to +access `text` fields from scripts. + +=================================================== + +[float] +[[modules-scripting-stored]] +=== Stored Fields and `_source` + +_Stored fields_ -- fields explicitly marked as +<> -- can be accessed using the +`_fields['field_name'].value` or `_fields['field_name'].values` syntax. + +The document <>, which is really just a +special stored field, can be accessed using the `_source.field_name` syntax. +The `_source` is loaded as a map-of-maps, so properties within object fields +can be accessed as, for example, `_source.name.first`. + +[IMPORTANT] +.Prefer doc-values to stored fields +========================================================= + +Stored fields (which includes the stored `_source` field) are much slower than +doc-values. They are optimised for returning several fields per result, +while doc values are optimised for accessing the value of a specific field in +many documents. + + +It makes sense to use `_source` or stored fields when generating a +<> for the top ten hits from a search +result but, for other search and aggregation use cases, always prefer using +doc values. +========================================================= + + +For instance: + +[source,js] +------------------------------- +PUT my_index +{ + "mappings": { + "my_type": { + "properties": { + "title": { <1> + "type": "text" + }, + "first_name": { + "type": "text", + "store": true + }, + "last_name": { + "type": "text", + "store": true + } + } + } + } +} + +PUT my_index/my_type/1 +{ + "title": "Mr", + "first_name": "Barry", + "last_name": "White" +} + +GET my_index/_search +{ + "script_fields": { + "source": { + "script": { + "lang": "groovy", + "inline": "_source.title + ' ' + _source.first_name + ' ' + _source.last_name" <2> + } + }, + "stored_fields": { + "script": { + "lang": "groovy", + "inline": "_fields['first_name'].value + ' ' + _fields['last_name'].value" + } + } + } +} +------------------------------- +// AUTOSENSE +<1> The `title` field is not stored and so cannot be used with the `_fields[]` syntax. +<2> The `title` field can still be accessed from the `_source`. + +[TIP] +.Stored vs `_source` +======================================================= + +The `_source` field is just a special stored field, so the performance is +similar to that of other stored fields. The `_source` provides access to the +original document body that was indexed (including the ability to distinguish +`null` values from empty fields, single-value arrays from plain scalars, etc). + +The only time it really makes sense to use stored fields instead of the +`_source` field is when the `_source` is very large and it is less costly to +access a few small stored fields instead of the entire `_source`. + +======================================================= + diff --git a/docs/reference/modules/scripting/groovy.asciidoc b/docs/reference/modules/scripting/groovy.asciidoc new file mode 100644 index 00000000000..60b64e0131a --- /dev/null +++ b/docs/reference/modules/scripting/groovy.asciidoc @@ -0,0 +1,181 @@ +[[modules-scripting-groovy]] +=== Groovy Scripting Language + +Groovy is the default scripting language available in Elasticsearch. Although +limited by the <>, it is not a +sandboxed language and only `file` scripts may be used by default. + +Enabling `inline` or `stored` Groovy scripting is a security risk and should +only be considered if your Elasticsearch cluster is protected from the outside +world. Even a simple `while (true) { }` loop could behave as a denial-of- +service attack on your cluster. + +See <> for details +on security issues with scripts, including how to customize class +whitelisting. + +[float] +=== Doc value properties and methods + +Doc values in Groovy support the following properties and methods (depending +on the underlying field type): + +`doc['field_name'].value`:: + The native value of the field. For example, if its a short type, it will be short. + +`doc['field_name'].values`:: + The native array values of the field. For example, if its a short type, + it will be short[]. Remember, a field can have several values within a + single doc. Returns an empty array if the field has no values. + +`doc['field_name'].empty`:: + A boolean indicating if the field has no values within the doc. + +`doc['field_name'].multiValued`:: + A boolean indicating that the field has several values within the corpus. + +`doc['field_name'].lat`:: + The latitude of a geo point type, or `null`. + +`doc['field_name'].lon`:: + The longitude of a geo point type, or `null`. + +`doc['field_name'].lats`:: + The latitudes of a geo point type, or an empty array. + +`doc['field_name'].lons`:: + The longitudes of a geo point type, or an empty array. + +`doc['field_name'].distance(lat, lon)`:: + The `plane` distance (in meters) of this geo point field from the provided lat/lon. + +`doc['field_name'].distanceWithDefault(lat, lon, default)`:: + The `plane` distance (in meters) of this geo point field from the provided lat/lon with a default value. + +`doc['field_name'].distanceInMiles(lat, lon)`:: + The `plane` distance (in miles) of this geo point field from the provided lat/lon. + +`doc['field_name'].distanceInMilesWithDefault(lat, lon, default)`:: + The `plane` distance (in miles) of this geo point field from the provided lat/lon with a default value. + +`doc['field_name'].distanceInKm(lat, lon)`:: + The `plane` distance (in km) of this geo point field from the provided lat/lon. + +`doc['field_name'].distanceInKmWithDefault(lat, lon, default)`:: + The `plane` distance (in km) of this geo point field from the provided lat/lon with a default value. + +`doc['field_name'].arcDistance(lat, lon)`:: + The `arc` distance (in meters) of this geo point field from the provided lat/lon. + +`doc['field_name'].arcDistanceWithDefault(lat, lon, default)`:: + The `arc` distance (in meters) of this geo point field from the provided lat/lon with a default value. + +`doc['field_name'].arcDistanceInMiles(lat, lon)`:: + The `arc` distance (in miles) of this geo point field from the provided lat/lon. + +`doc['field_name'].arcDistanceInMilesWithDefault(lat, lon, default)`:: + The `arc` distance (in miles) of this geo point field from the provided lat/lon with a default value. + +`doc['field_name'].arcDistanceInKm(lat, lon)`:: + The `arc` distance (in km) of this geo point field from the provided lat/lon. + +`doc['field_name'].arcDistanceInKmWithDefault(lat, lon, default)`:: + The `arc` distance (in km) of this geo point field from the provided lat/lon with a default value. + +`doc['field_name'].factorDistance(lat, lon)`:: + The distance factor of this geo point field from the provided lat/lon. + +`doc['field_name'].factorDistance(lat, lon, default)`:: + The distance factor of this geo point field from the provided lat/lon with a default value. + +`doc['field_name'].geohashDistance(geohash)`:: + The `arc` distance (in meters) of this geo point field from the provided geohash. + +`doc['field_name'].geohashDistanceInKm(geohash)`:: + The `arc` distance (in km) of this geo point field from the provided geohash. + +`doc['field_name'].geohashDistanceInMiles(geohash)`:: + The `arc` distance (in miles) of this geo point field from the provided geohash. + + +[float] +=== Groovy Built In Functions + +There are several built in functions that can be used within scripts. +They include: + +[cols="<,<",options="header",] +|======================================================================= +|Function |Description +|`sin(a)` |Returns the trigonometric sine of an angle. + +|`cos(a)` |Returns the trigonometric cosine of an angle. + +|`tan(a)` |Returns the trigonometric tangent of an angle. + +|`asin(a)` |Returns the arc sine of a value. + +|`acos(a)` |Returns the arc cosine of a value. + +|`atan(a)` |Returns the arc tangent of a value. + +|`toRadians(angdeg)` |Converts an angle measured in degrees to an +approximately equivalent angle measured in radians + +|`toDegrees(angrad)` |Converts an angle measured in radians to an +approximately equivalent angle measured in degrees. + +|`exp(a)` |Returns Euler's number _e_ raised to the power of value. + +|`log(a)` |Returns the natural logarithm (base _e_) of a value. + +|`log10(a)` |Returns the base 10 logarithm of a value. + +|`sqrt(a)` |Returns the correctly rounded positive square root of a +value. + +|`cbrt(a)` |Returns the cube root of a double value. + +|`IEEEremainder(f1, f2)` |Computes the remainder operation on two +arguments as prescribed by the IEEE 754 standard. + +|`ceil(a)` |Returns the smallest (closest to negative infinity) value +that is greater than or equal to the argument and is equal to a +mathematical integer. + +|`floor(a)` |Returns the largest (closest to positive infinity) value +that is less than or equal to the argument and is equal to a +mathematical integer. + +|`rint(a)` |Returns the value that is closest in value to the argument +and is equal to a mathematical integer. + +|`atan2(y, x)` |Returns the angle _theta_ from the conversion of +rectangular coordinates (_x_, _y_) to polar coordinates (r,_theta_). + +|`pow(a, b)` |Returns the value of the first argument raised to the +power of the second argument. + +|`round(a)` |Returns the closest _int_ to the argument. + +|`random()` |Returns a random _double_ value. + +|`abs(a)` |Returns the absolute value of a value. + +|`max(a, b)` |Returns the greater of two values. + +|`min(a, b)` |Returns the smaller of two values. + +|`ulp(d)` |Returns the size of an ulp of the argument. + +|`signum(d)` |Returns the signum function of the argument. + +|`sinh(x)` |Returns the hyperbolic sine of a value. + +|`cosh(x)` |Returns the hyperbolic cosine of a value. + +|`tanh(x)` |Returns the hyperbolic tangent of a value. + +|`hypot(x, y)` |Returns sqrt(_x2_ + _y2_) without intermediate overflow +or underflow. +|======================================================================= diff --git a/docs/reference/modules/scripting/native.asciidoc b/docs/reference/modules/scripting/native.asciidoc new file mode 100644 index 00000000000..1b99673a90f --- /dev/null +++ b/docs/reference/modules/scripting/native.asciidoc @@ -0,0 +1,84 @@ +[[modules-scripting-native]] +=== Native (Java) Scripts + +Sometimes `groovy` and <> aren't enough. For those times you can +implement a native script. + +The best way to implement a native script is to write a plugin and install it. +The plugin {plugins}/plugin-authors.html[documentation] has more information on +how to write a plugin so that Elasticsearch will properly load it. + +To register the actual script you'll need to implement `NativeScriptFactory` +to construct the script. The actual script will extend either +`AbstractExecutableScript` or `AbstractSearchScript`. The second one is likely +the most useful and has several helpful subclasses you can extend like +`AbstractLongSearchScript`, `AbstractDoubleSearchScript`, and +`AbstractFloatSearchScript`. Finally, your plugin should register the native +script by declaring the `onModule(ScriptModule)` method. + +If you squashed the whole thing into one class it'd look like: + +[source,java] +-------------------------------------------------- +public class MyNativeScriptPlugin extends Plugin { + @Override + public String name() { + return "my-native-script"; + } + @Override + public String description() { + return "my native script that does something great"; + } + public void onModule(ScriptModule scriptModule) { + scriptModule.registerScript("my_script", MyNativeScriptFactory.class); + } + + public static class MyNativeScriptFactory implements NativeScriptFactory { + @Override + public ExecutableScript newScript(@Nullable Map params) { + return new MyNativeScript(); + } + @Override + public boolean needsScores() { + return false; + } + } + + public static class MyNativeScript extends AbstractFloatSearchScript { + @Override + public float runAsFloat() { + float a = (float) source().get("a"); + float b = (float) source().get("b"); + return a * b; + } + } +} +-------------------------------------------------- + +You can execute the script by specifying its `lang` as `native`, and the name +of the script as the `id`: + +[source,js] +-------------------------------------------------- +curl -XPOST localhost:9200/_search -d '{ + "query": { + "function_score": { + "query": { + "match": { + "body": "foo" + } + }, + "functions": [ + { + "script_score": { + "script": { + "id": "my_script", + "lang" : "native" + } + } + } + ] + } + } +}' +-------------------------------------------------- diff --git a/docs/reference/modules/painless.asciidoc b/docs/reference/modules/scripting/painless.asciidoc similarity index 98% rename from docs/reference/modules/painless.asciidoc rename to docs/reference/modules/scripting/painless.asciidoc index e1234777efa..1937bd9947b 100644 --- a/docs/reference/modules/painless.asciidoc +++ b/docs/reference/modules/scripting/painless.asciidoc @@ -1,10 +1,12 @@ [[modules-scripting-painless]] -== Painless Scripting Language +=== Painless Scripting Language experimental[The Painless scripting language is new and is still marked as experimental. The syntax or API may be changed in the future in non-backwards compatible ways if required.] -_Painless_ is a simple, secure scripting language built in to Elasticsearch as a module. -It is designed specifically for use with Elasticsearch and can safely be used dynamically. +_Painless_ is a simple, secure scripting language available in Elasticsearch +by default. It is designed specifically for use with Elasticsearch and can +safely be used with `inline` and `stored` scripting, which is enabled by +default. A Painless script is essentially a single function. Painless does not provide support for defining multiple functions within a script. The Painless syntax is similar to diff --git a/docs/reference/modules/scripting/scripting.asciidoc b/docs/reference/modules/scripting/scripting.asciidoc deleted file mode 100644 index fad96978be9..00000000000 --- a/docs/reference/modules/scripting/scripting.asciidoc +++ /dev/null @@ -1,761 +0,0 @@ -[[modules-scripting]] -== Scripting - -The scripting module enables you to use scripts to evaluate custom -expressions. For example, you could use a script to return "script fields" -as part of a search request or evaluate a custom score for a query. - -TIP: Elasticsearch now has a built-in scripting language called _Painless_ -that provides a more secure alternative for implementing -scripts for Elasticsearch. We encourage you to try it out-- -for more information, see <>. - -The default scripting language is http://groovy-lang.org/[groovy] -(http://mvel.codehaus.org/[mvel] was the default in 1.3.x and earlier). - -Additional `lang` plugins enable you to run scripts written in other languages. -Everywhere a script can be used, you can include a `lang` parameter -to specify the language of the script. Plugins are available for following languages: - -[cols="<,<,<",options="header",] -|======================================================================= -|Language |Sandboxed |Required plugin -|groovy |no |built-in -|expression |yes |built-in -|mustache |yes |built-in -|painless |yes |built-in (module) -|javascript |no |{plugins}/lang-javascript.html[elasticsearch-lang-javascript] -|python |no |{plugins}/lang-python.html[elasticsearch-lang-python] -|======================================================================= - -.Groovy dynamic scripting off by default from v1.4.3 -[IMPORTANT] -=================================================== - -Groovy dynamic scripting is off by default. This prevents Groovy scripts -from being accepted as part of a request or retrieved from the -`.scripts` index. You can still use Groovy file scripts stored in -the `config/scripts/` directory on every node. - -To convert an inline script to a file-based script, save the contents -of the `inline` field to a file with the `.groovy` extension and -store it in the `config/scripts` directory on every data node in your -cluster. - -For example, if you have the following inline script: - -[source,js] ------------------------------------ -GET /_search -{ - "script_fields": { - "my_field": { - "inline": "1 + my_var", - "params": { - "my_var": 2 - } - } - } -} ------------------------------------ - -Save `1 + my_var` in a file called `config/scripts/my_script.groovy`. - -To use the script in a request, specify its name (without the `.groovy` extension) in the `file` field: - -[source,js] ------------------------------------ -GET /_search -{ - "script_fields": { - "my_field": { - "script": { - "file": "my_script", - "params": { - "my_var": 2 - } - } - } - } -} ------------------------------------ - -=================================================== - -[float] -=== File-based Scripts - -To increase security, Elasticsearch does not allow you to specify scripts for -non-sandboxed languages with a request. Instead, scripts must be placed in the -`scripts` directory inside the configuration directory (the directory where -elasticsearch.yml is). The default location of this `scripts` directory can be -changed by setting `path.scripts` in elasticsearch.yml. Scripts placed into -this directory will automatically be picked up and be available to be used. -Once a script has been placed in this directory, it can be referenced by name. -For example, a script called `calculate-score.groovy` can be referenced in a -request like this: - -[source,sh] --------------------------------------------------- -$ tree config -config -├── elasticsearch.yml -├── logging.yml -└── scripts - └── calculate-score.groovy --------------------------------------------------- - -[source,sh] --------------------------------------------------- -$ cat config/scripts/calculate-score.groovy -log(_score * 2) + my_modifier --------------------------------------------------- - -[source,js] --------------------------------------------------- -curl -XPOST localhost:9200/_search -d '{ - "query": { - "function_score": { - "query": { - "match": { - "body": "foo" - } - }, - "functions": [ - { - "script_score": { - "script": { - "lang": "groovy", - "file": "calculate-score", - "params": { - "my_modifier": 8 - } - } - } - } - ] - } - } -}' --------------------------------------------------- - -The name of the script is derived from the hierarchy of directories it -exists under, and the file name without the lang extension. For example, -a script placed under `config/scripts/group1/group2/test.py` will be -named `group1_group2_test`. - -[float] -[[modules-scripting-stored-scripts]] -=== Stored Scripts -Elasticsearch allows you to store scripts in the cluster state. -There are REST endpoints to manage stored scripts as follows: - -Requests to the scripts endpoint look like : -[source,js] ------------------------------------ -/_scripts/{lang}/{id} ------------------------------------ -Where the `lang` part is the language the script is in and the `id` part is the id -of the script. - -[source,js] ------------------------------------ -curl -XPOST localhost:9200/_scripts/groovy/calculateScore -d '{ - "script": "log(_score * 2) + my_modifier" -}' ------------------------------------ - -This will store the script under the `calculateScore` in the cluster -state. - -This script can be accessed at query time by using the `id` and `lang` script parameters: - -[source,js] --------------------------------------------------- -curl -XPOST localhost:9200/_search -d '{ - "query": { - "function_score": { - "query": { - "match": { - "body": "foo" - } - }, - "functions": [ - { - "script_score": { - "script": { - "id": "calculateScore", - "lang" : "groovy", - "params": { - "my_modifier": 8 - } - } - } - } - ] - } - } -}' --------------------------------------------------- - -The script can be viewed by: -[source,js] ------------------------------------ -curl -XGET localhost:9200/_scripts/groovy/calculateScore ------------------------------------ - -This is rendered as: - -[source,js] ------------------------------------ -'{ - "script": "log(_score * 2) + my_modifier" -}' ------------------------------------ - -Stored scripts can be deleted by: -[source,js] ------------------------------------ -curl -XDELETE localhost:9200/_scripts/groovy/calculateScore ------------------------------------ - -NOTE: The size of stored scripts is limited to 65535 bytes. This can be changed by setting `script.max_size_in_bytes` -setting to increase that soft limit, but if scripts are really large then alternatives like native scripts should be considered. - -[float] -[[enable-dynamic-scripting]] -=== Enabling dynamic scripting - -We recommend running Elasticsearch behind an application or proxy, which -protects Elasticsearch from the outside world. If users are allowed to run -inline scripts (even in a search request) or indexed scripts, then they have -the same access to your box as the user that Elasticsearch is running as. For -this reason dynamic scripting is allowed only for sandboxed languages by default. - -First, you should not run Elasticsearch as the `root` user, as this would allow -a script to access or do *anything* on your server, without limitations. Second, -you should not expose Elasticsearch directly to users, but instead have a proxy -application inbetween. If you *do* intend to expose Elasticsearch directly to -your users, then you have to decide whether you trust them enough to run scripts -on your box or not. - -It is possible to enable scripts based on their source, for -every script engine, through the following settings that need to be added to the -`config/elasticsearch.yml` file on every node. - -[source,yaml] ------------------------------------ -script.inline: true -script.stored: true - ------------------------------------ - -While this still allows execution of named scripts provided in the config, or -_native_ Java scripts registered through plugins, it also allows users to run -arbitrary scripts via the API. Instead of sending the name of the file as the -script, the body of the script can be sent instead or retrieved from the -cluster state if previously stored. - -There are three possible configuration values for any of the fine-grained -script settings: - -[cols="<,<",options="header",] -|======================================================================= -|Value |Description -| `false` |scripting is turned off completely, in the context of the setting being set. -| `true` |scripting is turned on, in the context of the setting being set. -| `sandbox` |scripts may be executed only for languages that are sandboxed -|======================================================================= - -The default values are the following: - -[source,yaml] ------------------------------------ -script.inline: sandbox -script.stored: sandbox -script.file: true - ------------------------------------ - -NOTE: Global scripting settings affect the `mustache` scripting language. -<> internally use the `mustache` language, -and will still be enabled by default as the `mustache` engine is sandboxed, -but they will be enabled/disabled according to fine-grained settings -specified in `elasticsearch.yml`. - -It is also possible to control which operations can execute scripts. The -supported operations are: - -[cols="<,<",options="header",] -|======================================================================= -|Value |Description -| `aggs` |Aggregations (wherever they may be used) -| `search` |Search api, Percolator api and Suggester api (e.g filters, script_fields) -| `update` |Update api -| `plugin` |Any plugin that makes use of scripts under the generic `plugin` category -|======================================================================= - -Plugins can also define custom operations that they use scripts for instead -of using the generic `plugin` category. Those operations can be referred to -in the following form: `${pluginName}_${operation}`. - -The following example disables scripting for `update` and `mapping` operations, -regardless of the script source, for any engine. Scripts can still be -executed from sandboxed languages as part of `aggregations`, `search` -and plugins execution though, as the above defaults still get applied. - -[source,yaml] ------------------------------------ -script.update: false -script.mapping: false - ------------------------------------ - -Generic settings get applied in order, operation based ones have precedence -over source based ones. Language specific settings are supported too. They -need to be prefixed with the `script.engine.` prefix and have -precedence over any other generic settings. - -[source,yaml] ------------------------------------ -script.engine.groovy.file.aggs: true -script.engine.groovy.file.mapping: true -script.engine.groovy.file.search: true -script.engine.groovy.file.update: true -script.engine.groovy.file.plugin: true -script.engine.groovy.stored.aggs: true -script.engine.groovy.stored.mapping: false -script.engine.groovy.stored.search: true -script.engine.groovy.stored.update: false -script.engine.groovy.stored.plugin: false -script.engine.groovy.inline.aggs: true -script.engine.groovy.inline.mapping: false -script.engine.groovy.inline.search: false -script.engine.groovy.inline.update: false -script.engine.groovy.inline.plugin: false - ------------------------------------ - -[float] -=== Default Scripting Language - -The default scripting language (assuming no `lang` parameter is provided) is -`groovy`. In order to change it, set the `script.default_lang` to the -appropriate language. - -[float] -=== Automatic Script Reloading - -The `config/scripts` directory is scanned periodically for changes. -New and changed scripts are reloaded and deleted script are removed -from preloaded scripts cache. The reload frequency can be specified -using `resource.reload.interval` setting, which defaults to `60s`. -To disable script reloading completely set `script.auto_reload_enabled` -to `false`. - -[[native-java-scripts]] -[float] -=== Native (Java) Scripts - -Sometimes `groovy` and `expressions` aren't enough. For those times you can -implement a native script. - -The best way to implement a native script is to write a plugin and install it. -The plugin {plugins}/plugin-authors.html[documentation] has more information on -how to write a plugin so that Elasticsearch will properly load it. - -To register the actual script you'll need to implement `NativeScriptFactory` -to construct the script. The actual script will extend either -`AbstractExecutableScript` or `AbstractSearchScript`. The second one is likely -the most useful and has several helpful subclasses you can extend like -`AbstractLongSearchScript`, `AbstractDoubleSearchScript`, and -`AbstractFloatSearchScript`. Finally, your plugin should register the native -script by declaring the `onModule(ScriptModule)` method. - -If you squashed the whole thing into one class it'd look like: - -[source,java] --------------------------------------------------- -public class MyNativeScriptPlugin extends Plugin { - @Override - public String name() { - return "my-native-script"; - } - @Override - public String description() { - return "my native script that does something great"; - } - public void onModule(ScriptModule scriptModule) { - scriptModule.registerScript("my_script", MyNativeScriptFactory.class); - } - - public static class MyNativeScriptFactory implements NativeScriptFactory { - @Override - public ExecutableScript newScript(@Nullable Map params) { - return new MyNativeScript(); - } - @Override - public boolean needsScores() { - return false; - } - } - - public static class MyNativeScript extends AbstractFloatSearchScript { - @Override - public float runAsFloat() { - float a = (float) source().get("a"); - float b = (float) source().get("b"); - return a * b; - } - } -} --------------------------------------------------- - -You can execute the script by specifying its `lang` as `native`, and the name -of the script as the `id`: - -[source,js] --------------------------------------------------- -curl -XPOST localhost:9200/_search -d '{ - "query": { - "function_score": { - "query": { - "match": { - "body": "foo" - } - }, - "functions": [ - { - "script_score": { - "script": { - "id": "my_script", - "lang" : "native" - } - } - } - ] - } - } -}' --------------------------------------------------- - - -[float] -=== Lucene Expressions Scripts - -experimental[The Lucene expressions module is undergoing significant development and the exposed functionality is likely to change in the future] - -Lucene's expressions module provides a mechanism to compile a -`javascript` expression to bytecode. This allows very fast execution, -as if you had written a `native` script. Expression scripts can be -used in `script_score`, `script_fields`, sort scripts and numeric aggregation scripts. - -See the link:http://lucene.apache.org/core/4_9_0/expressions/index.html?org/apache/lucene/expressions/js/package-summary.html[expressions module documentation] -for details on what operators and functions are available. - -Variables in `expression` scripts are available to access: - -* document fields, e.g. `doc['myfield'].value` -* variables and methods that the field supports, e.g. `doc['myfield'].empty` -* Parameters passed into the script, e.g. `mymodifier` -* The current document's score, `_score` (only available when used in a `script_score`) - -[float] -=== Expressions API for numeric fields -[cols="<,<",options="header",] -|======================================================================= -|Expression |Description -|`doc['field_name'].value` |The native value of the field. For example, -if its a short type, it will be short. - -|`doc['field_name'].empty` |A boolean indicating if the field has no -values within the doc. - -|`doc['field_name'].min()` |The minimum value of the field in this document. - -|`doc['field_name'].max()` |The maximum value of the field in this document. - -|`doc['field_name'].median()` |The median value of the field in this document. - -|`doc['field_name'].avg()` |The average of the values in this document. - -|`doc['field_name'].sum()` |The sum of the values in this document. - -|`doc['field_name'].count()` |The number of values in this document. -|======================================================================= - -When a document is missing the field completely, by default the value will be treated as `0`. -You can treat it as another value instead, e.g. `doc['myfield'].empty ? 100 : doc['myfield'].value` - -When a document has multiple values for the field, by default the minimum value is returned. -You can choose a different value instead, e.g. `doc['myfield'].sum()`. - -When a document is missing the field completely, by default the value will be treated as `0`. - -Boolean fields are exposed as numerics, with `true` mapped to `1` and `false` mapped to `0`. -For example: `doc['on_sale'] ? doc['price'] * 0.5 : doc['price']` - -[float] -=== Additional methods for date fields -Date fields are treated as the number of milliseconds since January 1, 1970 and -support the numeric API above, with these additional methods: - -[cols="<,<",options="header",] -|======================================================================= -|Expression |Description -|`doc['field_name'].getYear()` |Year component, e.g. `1970`. - -|`doc['field_name'].getMonth()` |Month component (0-11), e.g. `0` for January. - -|`doc['field_name'].getDayOfMonth()` |Day component, e.g. `1` for the first of the month. - -|`doc['field_name'].getHourOfDay()` |Hour component (0-23) - -|`doc['field_name'].getMinutes()` |Minutes component (0-59) - -|`doc['field_name'].getSeconds()` |Seconds component (0-59) -|======================================================================= - -The following example shows the difference in years between the `date` fields date0 and date1: - -`doc['date1'].getYear() - doc['date0'].getYear()` - -[float] -=== Expressions API for `geo_point` fields -[cols="<,<",options="header",] -|======================================================================= -|Expression |Description -|`doc['field_name'].empty` |A boolean indicating if the field has no -values within the doc. - -|`doc['field_name'].lat` |The latitude of the geo point. - -|`doc['field_name'].lon` |The longitude of the geo point. -|======================================================================= - -The following example computes distance in kilometers from Washington, DC: - -`haversin(38.9072, 77.0369, doc['field_name'].lat, doc['field_name'].lon)` - -In this example the coordinates could have been passed as parameters to the script, -e.g. based on geolocation of the user. - -[float] -=== Expressions limitations - -There are a few limitations relative to other script languages: - -* Only numeric, boolean, date, and geo_point fields may be accessed -* Stored fields are not available - -[float] -=== Score - -In all scripts that can be used in aggregations, the current -document's score is accessible in `_score`. - -[float] -=== Computing scores based on terms in scripts - -see <> - -[float] -=== Document Fields - -Most scripting revolve around the use of specific document fields data. -The `doc['field_name']` can be used to access specific field data within -a document (the document in question is usually derived by the context -the script is used). Document fields are very fast to access since they -end up being loaded into memory (all the relevant field values/tokens -are loaded to memory). Note, however, that the `doc[...]` notation only -allows for simple valued fields (can’t return a json object from it) -and makes sense only on non-analyzed or single term based fields. - -The following data can be extracted from a field: - -[cols="<,<",options="header",] -|======================================================================= -|Expression |Description -|`doc['field_name'].value` |The native value of the field. For example, -if its a short type, it will be short. - -|`doc['field_name'].values` |The native array values of the field. For -example, if its a short type, it will be short[]. Remember, a field can -have several values within a single doc. Returns an empty array if the -field has no values. - -|`doc['field_name'].empty` |A boolean indicating if the field has no -values within the doc. - -|`doc['field_name'].multiValued` |A boolean indicating that the field -has several values within the corpus. - -|`doc['field_name'].lat` |The latitude of a geo point type. - -|`doc['field_name'].lon` |The longitude of a geo point type. - -|`doc['field_name'].lats` |The latitudes of a geo point type. - -|`doc['field_name'].lons` |The longitudes of a geo point type. - -|`doc['field_name'].distance(lat, lon)` |The `plane` distance (in meters) -of this geo point field from the provided lat/lon. - -|`doc['field_name'].distanceWithDefault(lat, lon, default)` |The `plane` distance (in meters) -of this geo point field from the provided lat/lon with a default value. - -|`doc['field_name'].distanceInMiles(lat, lon)` |The `plane` distance (in -miles) of this geo point field from the provided lat/lon. - -|`doc['field_name'].distanceInMilesWithDefault(lat, lon, default)` |The `plane` distance (in -miles) of this geo point field from the provided lat/lon with a default value. - -|`doc['field_name'].distanceInKm(lat, lon)` |The `plane` distance (in -km) of this geo point field from the provided lat/lon. - -|`doc['field_name'].distanceInKmWithDefault(lat, lon, default)` |The `plane` distance (in -km) of this geo point field from the provided lat/lon with a default value. - -|`doc['field_name'].arcDistance(lat, lon)` |The `arc` distance (in -meters) of this geo point field from the provided lat/lon. - -|`doc['field_name'].arcDistanceWithDefault(lat, lon, default)` |The `arc` distance (in -meters) of this geo point field from the provided lat/lon with a default value. - -|`doc['field_name'].arcDistanceInMiles(lat, lon)` |The `arc` distance (in -miles) of this geo point field from the provided lat/lon. - -|`doc['field_name'].arcDistanceInMilesWithDefault(lat, lon, default)` |The `arc` distance (in -miles) of this geo point field from the provided lat/lon with a default value. - -|`doc['field_name'].arcDistanceInKm(lat, lon)` |The `arc` distance (in -km) of this geo point field from the provided lat/lon. - -|`doc['field_name'].arcDistanceInKmWithDefault(lat, lon, default)` |The `arc` distance (in -km) of this geo point field from the provided lat/lon with a default value. - -|`doc['field_name'].factorDistance(lat, lon)` |The distance factor of this geo point field from the provided lat/lon. - -|`doc['field_name'].factorDistance(lat, lon, default)` |The distance factor of this geo point field from the provided lat/lon with a default value. - -|`doc['field_name'].geohashDistance(geohash)` |The `arc` distance (in meters) -of this geo point field from the provided geohash. - -|`doc['field_name'].geohashDistanceInKm(geohash)` |The `arc` distance (in km) -of this geo point field from the provided geohash. - -|`doc['field_name'].geohashDistanceInMiles(geohash)` |The `arc` distance (in -miles) of this geo point field from the provided geohash. -|======================================================================= - -[float] -=== Stored Fields - -Stored fields can also be accessed when executing a script. Note, they -are much slower to access compared with document fields, as they are not -loaded into memory. They can be simply accessed using -`_fields['my_field_name'].value` or `_fields['my_field_name'].values`. - -[float] -=== Accessing the score of a document within a script - -When using scripting for calculating the score of a document (for instance, with -the `function_score` query), you can access the score using the `_score` -variable inside of a Groovy script. - -[float] -=== Source Field - -The source field can also be accessed when executing a script. The -source field is loaded per doc, parsed, and then provided to the script -for evaluation. The `_source` forms the context under which the source -field can be accessed, for example `_source.obj2.obj1.field3`. - -Accessing `_source` is much slower compared to using `doc` -but the data is not loaded into memory. For a single field access `_fields` may be -faster than using `_source` due to the extra overhead of potentially parsing large documents. -However, `_source` may be faster if you access multiple fields or if the source has already been -loaded for other purposes. - - -[float] -=== Groovy Built In Functions - -There are several built in functions that can be used within scripts. -They include: - -[cols="<,<",options="header",] -|======================================================================= -|Function |Description -|`sin(a)` |Returns the trigonometric sine of an angle. - -|`cos(a)` |Returns the trigonometric cosine of an angle. - -|`tan(a)` |Returns the trigonometric tangent of an angle. - -|`asin(a)` |Returns the arc sine of a value. - -|`acos(a)` |Returns the arc cosine of a value. - -|`atan(a)` |Returns the arc tangent of a value. - -|`toRadians(angdeg)` |Converts an angle measured in degrees to an -approximately equivalent angle measured in radians - -|`toDegrees(angrad)` |Converts an angle measured in radians to an -approximately equivalent angle measured in degrees. - -|`exp(a)` |Returns Euler's number _e_ raised to the power of value. - -|`log(a)` |Returns the natural logarithm (base _e_) of a value. - -|`log10(a)` |Returns the base 10 logarithm of a value. - -|`sqrt(a)` |Returns the correctly rounded positive square root of a -value. - -|`cbrt(a)` |Returns the cube root of a double value. - -|`IEEEremainder(f1, f2)` |Computes the remainder operation on two -arguments as prescribed by the IEEE 754 standard. - -|`ceil(a)` |Returns the smallest (closest to negative infinity) value -that is greater than or equal to the argument and is equal to a -mathematical integer. - -|`floor(a)` |Returns the largest (closest to positive infinity) value -that is less than or equal to the argument and is equal to a -mathematical integer. - -|`rint(a)` |Returns the value that is closest in value to the argument -and is equal to a mathematical integer. - -|`atan2(y, x)` |Returns the angle _theta_ from the conversion of -rectangular coordinates (_x_, _y_) to polar coordinates (r,_theta_). - -|`pow(a, b)` |Returns the value of the first argument raised to the -power of the second argument. - -|`round(a)` |Returns the closest _int_ to the argument. - -|`random()` |Returns a random _double_ value. - -|`abs(a)` |Returns the absolute value of a value. - -|`max(a, b)` |Returns the greater of two values. - -|`min(a, b)` |Returns the smaller of two values. - -|`ulp(d)` |Returns the size of an ulp of the argument. - -|`signum(d)` |Returns the signum function of the argument. - -|`sinh(x)` |Returns the hyperbolic sine of a value. - -|`cosh(x)` |Returns the hyperbolic cosine of a value. - -|`tanh(x)` |Returns the hyperbolic tangent of a value. - -|`hypot(x, y)` |Returns sqrt(_x2_ + _y2_) without intermediate overflow -or underflow. -|======================================================================= diff --git a/docs/reference/modules/scripting/security.asciidoc b/docs/reference/modules/scripting/security.asciidoc index 3c9144984d5..c5ad89538e3 100644 --- a/docs/reference/modules/scripting/security.asciidoc +++ b/docs/reference/modules/scripting/security.asciidoc @@ -1,5 +1,139 @@ [[modules-scripting-security]] -=== Scripting and the Java Security Manager +=== Scripting and security + +You should never run Elasticsearch as the `root` user, as this would allow a +script to access or do *anything* on your server, without limitations. + +You should not expose Elasticsearch directly to users, but instead have a +proxy application inbetween. If you *do* intend to expose Elasticsearch +directly to your users, then you have to decide whether you trust them enough +to run scripts on your box or not, and apply the appropriate safety measures. + +[[enable-dynamic-scripting]] +[float] +=== Enabling dynamic scripting + +The `script.*` settings allow for <> +control of which script languages (e.g `groovy`, `painless`) are allowed to +run in which context ( e.g. `search`, `aggs`, `update`), and where the script +source is allowed to come from (i.e. `inline`, `stored`, `file`). + +For instance, the following setting enables `stored` `update` scripts for +`groovy`: + +[source,yaml] +---------------- +script.engine.groovy.inline.update: true +---------------- + +Less fine-grained settings exist which allow you to enable or disable scripts +for all sources, all languages, or all contexts. The following settings +enable `inline` and `stored` scripts for all languages in all contexts: + +[source,yaml] +----------------------------------- +script.inline: true +script.stored: true +----------------------------------- + +WARNING: The above settings mean that anybody who can send requests to your +Elasticsearch instance can run whatever scripts they choose! This is a +security risk and may well lead to your Elasticsearch cluster being +compromised. + +[[security-script-source]] +[float] +=== Script source settings + +Scripts may be enabled or disabled depending on their source: `inline`, +`stored` in the cluster state, or from a `file` on each node in the cluster. +Each of these settings takes one of these values: + + +[horizontal] +`false`:: Scripting is enabled. +`true`:: Scripting is disabled. +`sandbox`:: Scripting is enabled only for sandboxed languages. + +The default values are the following: + +[source,yaml] +----------------------------------- +script.inline: sandbox +script.stored: sandbox +script.file: true +----------------------------------- + +NOTE: Global scripting settings affect the `mustache` scripting language. +<> internally use the `mustache` language, +and will still be enabled by default as the `mustache` engine is sandboxed, +but they will be enabled/disabled according to fine-grained settings +specified in `elasticsearch.yml`. + +[[security-script-context]] +[float] +=== Script context settings + +Scripting may also be enabled or disabled in different contexts in the +Elasticsearch API. The supported contexts are: + +[horizontal] +`aggs`:: Aggregations +`search`:: Search api, Percolator API and Suggester API +`update`:: Update api +`plugin`:: Any plugin that makes use of scripts under the generic `plugin` category + +Plugins can also define custom operations that they use scripts for instead +of using the generic `plugin` category. Those operations can be referred to +in the following form: `${pluginName}_${operation}`. + +The following example disables scripting for `update` and `plugin` operations, +regardless of the script source or language. Scripts can still be executed +from sandboxed languages as part of `aggregations`, `search` and plugins +execution though, as the above defaults still get applied. + +[source,yaml] +----------------------------------- +script.update: false +script.plugin: false +----------------------------------- + +[[security-script-fine]] +[float] +=== Fine-grained script settings + +First, the high-level script settings described above are applied in order +(context settings have precedence over source settings). Then, fine-grained +settings which include the script language take precedence over any high-level +settings. + +Fine-grained settings have the form: + +[source,yaml] +------------------------ +script.engine.{lang}.{source}.{context}: true|false +------------------------ + +For example: + +[source,yaml] +----------------------------------- +script.inline: false <1> +script.stored: false <1> +script.file: false <1> + +script.engine.groovy.stored.search: true <2> +script.engine.groovy.stored.aggs: true <2> + +script.engine.mustache.stored.search: true <3> +----------------------------------- +<1> Disable all scripting from any source. +<2> Allow stored Groovy scripts to be used for search and aggregations. +<3> Allow stored Mustache templates to be used for search. + +[[java-security-manager]] +[float] +=== Java Security Manager Elasticsearch runs with the https://docs.oracle.com/javase/tutorial/essential/environment/security.html[Java Security Manager] enabled by default. The security policy in Elasticsearch locks down the diff --git a/docs/reference/modules/scripting/using.asciidoc b/docs/reference/modules/scripting/using.asciidoc new file mode 100644 index 00000000000..71eba9386e3 --- /dev/null +++ b/docs/reference/modules/scripting/using.asciidoc @@ -0,0 +1,238 @@ +[[modules-scripting-using]] +=== How to use scripts + +Wherever scripting is supported in the Elasticsearch API, the syntax follows +the same pattern: + +[source,js] +------------------------------------- + "script": { + "lang": "...", <1> + "inline" | "id" | "file": "...", <2> + "params": { ... } <3> + } +------------------------------------- +<1> The language the script is written in, which defaults to `groovy`. +<2> The script itself which may be specfied as `inline`, `id`, or `file`. +<3> Any named parameters that should be passed into the script. + +For example, the following script is used in a search request to return a +<>: + +[source,js] +------------------------------------- +PUT my_index/my_type/1 +{ + "my_field": 5 +} + +GET my_index/_search +{ + "script_fields": { + "my_doubled_field": { + "script": { + "lang": "expression", + "inline": "doc['my_field'] * multiplier", + "params": { + "multiplier": 2 + } + } + } + } +} +------------------------------------- +// AUTOSENSE + + +[float] +=== Script Parameters + +`lang`:: + + Specifies the language the script is written in. Defaults to `groovy` but + may be set to any of languages listed in <>. The + default language may be changed in the `elasticsearch.yml` config file by + setting `script.default_lang` to the appropriate language. + + +`inline`, `id`, `file`:: + + Specifies the source of the script. An `inline` script is specified + `inline` as in the example above, a stored script with the specified `id` + is retrieved from the cluster state (see <>), + and a `file` script is retrieved from a file in the `config/scripts` + directory (see <>). ++ +While languages like `expression` and `painless` can be used out of the box as +inline or stored scripts, other languages like `groovy` can only be +specified as `file` unless you first adjust the default +<>. + +`params`:: + + Specifies any named parameters that are passed into the script as + variables. + +[IMPORTANT] +.Prefer parameters +======================================== + +The first time Elasticsearch sees a new script, it compiles it and stores the +compiled version in a cache. Compilation can be a heavy process. + +If you need to pass variables into the script, you should pass them in as +named `params` instead of hard-coding values into the script itself. For +example, if you want to be able to multiply a field value by different +multipliers, don't hard-code the multiplier into the script: + +[source,js] +---------------------- + "inline": "doc['my_field'] * 2" +---------------------- + +Instead, pass it in as a named parameter: + +[source,js] +---------------------- + "inline": "doc['my_field'] * multiplier", + "params": { + "multiplier": 2 + } +---------------------- + +The first version has to be recompiled every time the multiplier changes. The +second version is only compiled once. + +======================================== + + +[float] +[[modules-scripting-file-scripts]] +=== File-based Scripts + +To increase security, non-sandboxed languages can only be specified in script +files stored on every node in the cluster. File scripts must be saved in the +`scripts` directory whose default location depends on whether you use the +<> (`$ES_HOME/config/scripts/`), +<>, or <> package. The default may be +changed with the `path.script` setting. + +Any files placed in the `scripts` directory will be compiled automatically +when the node starts up and then <>. + +The file should be named as follows: `{script-name}.{lang}`. For instance, +the following example creates a Groovy script called `calculate-score`: + +[source,sh] +-------------------------------------------------- +cat "log(_score * 2) + my_modifier" > config/scripts/calculate-score.groovy +-------------------------------------------------- + +This script can be used as follows: + +[source,js] +-------------------------------------------------- +GET my_index/_search +{ + "query": { + "script": { + "script": { + "lang": "groovy", <1> + "file": "calculate-score", <2> + "params": { + "my_modifier": 2 + } + } + } + } +} +-------------------------------------------------- +<1> The language of the script, which should correspond with the script file suffix. +<2> The name of the script, which should be the name of the file. + +The `script` directory may contain sub-directories, in which case the +hierarchy of directories is flattened and concatenated with underscores. A +script in `group1/group2/my_script.groovy` should use `group1_group2_myscript` +as the `file` name. + + +[[reload-scripts]] +[float] +==== Automatic script reloading + +The `scripts` directory will be rescanned every `60s` (configurable with the +`resource.reload.interval` setting) and new, changed, or removed scripts will +be compiled, updated, or deleted from the script cache. + +Script reloading can be completely disabled by setting +`script.auto_reload_enabled` to `false`. + +[float] +[[modules-scripting-stored-scripts]] +=== Stored Scripts + +Scripts may be stored in and retrieved from the cluster state using the +`_scripts` end-point: + +[source,js] +----------------------------------- +/_scripts/{lang}/{id} <1> <2> +----------------------------------- +<1> The `lang` represents the script language. +<2> The `id` is a unique identifier or script name. + +This example stores a Groovy script called `calculate-score` in the cluster +state: + +[source,js] +----------------------------------- +POST /_scripts/groovy/calculate-score +{ + "script": "log(_score * 2) + my_modifier" +} +----------------------------------- +// AUTOSENSE + + +This same script can be retrieved with: + +[source,js] +----------------------------------- +GET /_scripts/groovy/calculate-score +----------------------------------- +// AUTOSENSE + +or deleted with: + +[source,js] +----------------------------------- +DELETE /_scripts/groovy/calculate-score +----------------------------------- +// AUTOSENSE + +Stored scripts can be used by specifying the `lang` and `id` parameters as follows: + +[source,js] +-------------------------------------------------- +GET my_index/_search +{ + "query": { + "script": { + "script": { + "lang": "groovy", + "id": "calculate-score", + "params": { + "my_modifier": 2 + } + } + } + } +} +-------------------------------------------------- + + +NOTE: The size of stored scripts is limited to 65,535 bytes. This can be +changed by setting `script.max_size_in_bytes` setting to increase that soft +limit, but if scripts are really large then alternatives like +<> scripts should be considered instead. + From 4c6cf7ee88bc75a161e3752014071d4f9ab8d8d5 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 4 May 2016 12:48:39 -0400 Subject: [PATCH 0071/1311] Pass ES_JAVA_OPTS to JVM for plugins script This commit adds support for ES_JAVA_OPTS to the elasticsearch-plugin script. Relates #18140 --- .../src/main/resources/bin/elasticsearch-plugin | 2 +- .../src/main/resources/bin/elasticsearch-plugin.bat | 2 +- .../scripts/module_and_plugin_test_cases.bash | 12 ++++++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin b/distribution/src/main/resources/bin/elasticsearch-plugin index f686ed054f2..2730a562907 100755 --- a/distribution/src/main/resources/bin/elasticsearch-plugin +++ b/distribution/src/main/resources/bin/elasticsearch-plugin @@ -110,4 +110,4 @@ fi HOSTNAME=`hostname | cut -d. -f1` export HOSTNAME -eval "\"$JAVA\"" -client -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginCli $args +eval "\"$JAVA\"" "$ES_JAVA_OPTS" -client -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginCli $args diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin.bat b/distribution/src/main/resources/bin/elasticsearch-plugin.bat index 6c6be019fc6..c4286aa7b1c 100644 --- a/distribution/src/main/resources/bin/elasticsearch-plugin.bat +++ b/distribution/src/main/resources/bin/elasticsearch-plugin.bat @@ -48,7 +48,7 @@ GOTO loop SET HOSTNAME=%COMPUTERNAME% -"%JAVA_HOME%\bin\java" -client -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! +"%JAVA_HOME%\bin\java" %ES_JAVA_OPTS% -client -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! goto finally diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index 382492e245e..85d829718e2 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -476,3 +476,15 @@ fi # restore JAVA_HOME export JAVA_HOME=$java_home } + +@test "[$GROUP] test ES_JAVA_OPTS" { + # preserve ES_JAVA_OPTS + local es_java_opts=$ES_JAVA_OPTS + + export ES_JAVA_OPTS="-XX:+PrintFlagsFinal" + # this will fail if ES_JAVA_OPTS is not passed through + "$ESHOME/bin/elasticsearch-plugin" list | grep MaxHeapSize + + # restore ES_JAVA_OPTS + export ES_JAVA_OPTS=$es_java_opts +} From 7656d7ea7396cb291d913411724add972b5f01d1 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 4 May 2016 12:49:16 -0400 Subject: [PATCH 0072/1311] docs: remove null from expressions case. Expressions don't have nulls, only doubles. If the field is missing, then its treated as 0.0. You can query .empty to see if its missing and substitute something else. See https://github.com/elastic/elasticsearch/pull/18132#discussion_r62068494 --- docs/reference/modules/scripting/expression.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/modules/scripting/expression.asciidoc b/docs/reference/modules/scripting/expression.asciidoc index d941fa2f4a4..61e28e05298 100644 --- a/docs/reference/modules/scripting/expression.asciidoc +++ b/docs/reference/modules/scripting/expression.asciidoc @@ -99,9 +99,9 @@ The following example shows the difference in years between the `date` fields da |`doc['field_name'].empty` |A boolean indicating if the field has no values within the doc. -|`doc['field_name'].lat` |The latitude of the geo point, or `null`. +|`doc['field_name'].lat` |The latitude of the geo point. -|`doc['field_name'].lon` |The longitude of the geo point, or `null`. +|`doc['field_name'].lon` |The longitude of the geo point. |======================================================================= The following example computes distance in kilometers from Washington, DC: From 124e8e5a6dd3d23cf1182bec63063670e915152c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 4 May 2016 14:11:29 -0400 Subject: [PATCH 0073/1311] Remove client option for JVM for plugins script Today we specify the client option for the JVM when executing plugin commands. Yet, this option does nothing on a 64-bit capable JDK as such JDKs always select the Hotspot server VM. And for 32-bit JDKs, running plugin commands with the server VM is okay. Thus, we should just remove this unnecessary flag and just let the default VM be selected. Relates #18142 --- distribution/src/main/resources/bin/elasticsearch-plugin | 2 +- distribution/src/main/resources/bin/elasticsearch-plugin.bat | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin b/distribution/src/main/resources/bin/elasticsearch-plugin index 2730a562907..eec162dab94 100755 --- a/distribution/src/main/resources/bin/elasticsearch-plugin +++ b/distribution/src/main/resources/bin/elasticsearch-plugin @@ -110,4 +110,4 @@ fi HOSTNAME=`hostname | cut -d. -f1` export HOSTNAME -eval "\"$JAVA\"" "$ES_JAVA_OPTS" -client -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginCli $args +eval "\"$JAVA\"" "$ES_JAVA_OPTS" -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginCli $args diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin.bat b/distribution/src/main/resources/bin/elasticsearch-plugin.bat index c4286aa7b1c..5604b57dbef 100644 --- a/distribution/src/main/resources/bin/elasticsearch-plugin.bat +++ b/distribution/src/main/resources/bin/elasticsearch-plugin.bat @@ -48,7 +48,7 @@ GOTO loop SET HOSTNAME=%COMPUTERNAME% -"%JAVA_HOME%\bin\java" %ES_JAVA_OPTS% -client -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! +"%JAVA_HOME%\bin\java" %ES_JAVA_OPTS% -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.plugins.PluginCli" !args! goto finally From 67c0734bf3d1f6748be56e83c292ad4910339c38 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Wed, 4 May 2016 15:21:47 -0400 Subject: [PATCH 0074/1311] Update misc.asciidoc Added documentation for the cluster.indices.tombstones.size property for maximum tombstones in the cluster state. --- docs/reference/modules/cluster/misc.asciidoc | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/reference/modules/cluster/misc.asciidoc b/docs/reference/modules/cluster/misc.asciidoc index b6c50f36f92..57b6e8ea5c9 100644 --- a/docs/reference/modules/cluster/misc.asciidoc +++ b/docs/reference/modules/cluster/misc.asciidoc @@ -17,6 +17,23 @@ user with access to the <> API can make the cluster read-write again. +[[cluster-max-tombstones]] +==== Index Tombstones + +The cluster state maintains index tombstones to explicitly denote indices that +have been deleted. The number of tombstones maintained in the cluster state is +controlled by the following property, which cannot be updated dynamically: + +`cluster.indices.tombstones.size`:: + +Index tombstones prevent nodes that are not part of the cluster when a delete +occurs from joining the cluster and reimporting the index as though the delete +was never issued. To keep the cluster state from growing huge we only keep the +last `cluster.indices.tombstones.size` deletes, which defaults to 500. You can +increase it if you expect nodes to be absent from the cluster and miss more +than 500 deletes. We think that is rare, thus the default. Tombstones don't take +up much space, but we also think that a number like 50,000 is probably too big. + [[cluster-logger]] ==== Logger From 230697c20220937ae5ffed4de29af73b5124d56d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 27 Apr 2016 18:10:57 -0400 Subject: [PATCH 0075/1311] [reindex] Switch throttle to Float.POSITIVE_INFITINTY/"unlimited" All other values are errors. Add java test for throttling. We had a REST test but it only ran against one node so it didn't catch serialization errors. Add Simple round trip test for rethrottle request --- docs/reference/docs/reindex.asciidoc | 47 +++++++- docs/reference/docs/update-by-query.asciidoc | 51 ++++++++- .../AbstractAsyncBulkByScrollAction.java | 2 +- .../AbstractBaseReindexRestHandler.java | 23 ++-- .../reindex/AbstractBulkByScrollRequest.java | 20 +++- .../AbstractBulkByScrollRequestBuilder.java | 10 ++ ...stractBulkIndexByScrollRequestBuilder.java | 3 +- .../index/reindex/BulkByScrollTask.java | 5 +- .../index/reindex/RethrottleRequest.java | 35 +++++- .../reindex/RethrottleRequestBuilder.java | 4 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../index/reindex/BulkByScrollTaskTests.java | 12 +- .../index/reindex/CancelTestUtils.java | 5 +- .../index/reindex/ReindexBasicTests.java | 12 +- .../index/reindex/ReindexCancelTests.java | 2 +- .../index/reindex/ReindexFailureTests.java | 4 +- .../reindex/ReindexParentChildTests.java | 8 +- .../index/reindex/ReindexTestCase.java | 24 +++- .../index/reindex/ReindexVersioningTests.java | 18 +-- .../index/reindex/RethrottleTests.java | 64 +++++++++++ .../index/reindex/RoundTripTests.java | 22 +++- .../reindex/UpdateByQueryBasicTests.java | 14 ++- .../reindex/UpdateByQueryCancelTests.java | 6 +- .../index/reindex/UpdateByQueryTestCase.java | 52 --------- .../UpdateByQueryWhileModifyingTests.java | 7 +- .../test/reindex/20_validation.yaml | 26 ++++- .../test/reindex/80_throttle.yaml | 108 ++++++++---------- .../test/update_by_query/20_validation.yaml | 38 +++--- .../test/update_by_query/70_throttle.yaml | 88 ++++++-------- .../rest-api-spec/api/reindex.rethrottle.json | 2 +- 30 files changed, 458 insertions(+), 256 deletions(-) create mode 100644 modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java delete mode 100644 modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryTestCase.java diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 488d3c53b89..8742e52c5c8 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -342,14 +342,15 @@ request. `timeout` controls how long each write request waits for unavailable shards to become available. Both work exactly how they work in the <>. -`requests_per_second` can be set to any decimal number (1.4, 6, 1000, etc) and -throttle the number of requests per second that the reindex issues. The +`requests_per_second` can be set to any decimal number (`1.4`, `6`, `1000`, etc) +and throttles the number of requests per second that the reindex issues. The throttling is done waiting between bulk batches so that it can manipulate the scroll timeout. The wait time is the difference between the time it took the batch to complete and the time `requests_per_second * requests_in_the_batch`. Since the batch isn't broken into multiple bulk requests large batch sizes will cause Elasticsearch to create many requests and then wait for a while before -starting the next set. This is "bursty" instead of "smooth". +starting the next set. This is "bursty" instead of "smooth". The default is +`unlimited` which is also the only non-number value that it accepts. [float] === Response body @@ -464,6 +465,46 @@ progress by adding the `updated`, `created`, and `deleted` fields. The request will finish when their sum is equal to the `total` field. +[float] +[[docs-reindex-cancel-task-api]] +=== Works with the Cancel Task API + +Any Reindex can be canceled using the <>: + +[source,js] +-------------------------------------------------- +POST /_tasks/{task_id}/_cancel +-------------------------------------------------- +// AUTOSENSE + +The `task_id` can be found using the tasks API above. + +Cancelation should happen quickly but might take a few seconds. The task status +API above will continue to list the task until it is wakes to cancel itself. + + +[float] +[[docs-reindex-rethrottle]] +=== Rethrottling + +The value of `requests_per_second` can be changed on a running reindex using +the `_rethrottle` API: + +[source,js] +-------------------------------------------------- +POST /_reindex/{task_id}/_rethrottle?requests_per_second=unlimited +-------------------------------------------------- +// AUTOSENSE + +The `task_id` can be found using the tasks API above. + +Just like when setting it on the `_reindex` API `requests_per_second` can be +either `unlimited` to disable throttling or any decimal number like `1.7` or +`12` to throttle to that level. Rethrottling that speeds up the query takes +effect immediately but rethrotting that slows down the query will take effect +on after completing the current batch. This prevents scroll timeouts. + + [float] === Reindex to change the name of a field diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index 84bd61f3e9b..e307d53e8f4 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -172,14 +172,15 @@ request. `timeout` controls how long each write request waits for unavailable shards to become available. Both work exactly how they work in the <>. -`requests_per_second` can be set to any decimal number (1.4, 6, 1000, etc) and -throttle the number of requests per second that the update by query issues. The -throttling is done waiting between bulk batches so that it can manipulate the -scroll timeout. The wait time is the difference between the time it took the +`requests_per_second` can be set to any decimal number (`1.4`, `6`, `1000`, etc) +and throttles the number of requests per second that the update by query issues. +The throttling is done waiting between bulk batches so that it can manipulate +the scroll timeout. The wait time is the difference between the time it took the batch to complete and the time `requests_per_second * requests_in_the_batch`. Since the batch isn't broken into multiple bulk requests large batch sizes will cause Elasticsearch to create many requests and then wait for a while before -starting the next set. This is "bursty" instead of "smooth". +starting the next set. This is "bursty" instead of "smooth". The default is +`unlimited` which is also the only non-number value that it accepts. [float] === Response body @@ -290,6 +291,46 @@ progress by adding the `updated`, `created`, and `deleted` fields. The request will finish when their sum is equal to the `total` field. +[float] +[[docs-update-by-query-cancel-task-api]] +=== Works with the Cancel Task API + +Any Update By Query can be canceled using the <>: + +[source,js] +-------------------------------------------------- +POST /_tasks/{task_id}/_cancel +-------------------------------------------------- +// AUTOSENSE + +The `task_id` can be found using the tasks API above. + +Cancelation should happen quickly but might take a few seconds. The task status +API above will continue to list the task until it is wakes to cancel itself. + + +[float] +[[docs-update-by-query-rethrottle]] +=== Rethrottling + +The value of `requests_per_second` can be changed on a running update by query +using the `_rethrottle` API: + +[source,js] +-------------------------------------------------- +POST /_update_by_query/{task_id}/_rethrottle?requests_per_second=unlimited +-------------------------------------------------- +// AUTOSENSE + +The `task_id` can be found using the tasks API above. + +Just like when setting it on the `_update_by_query` API `requests_per_second` +can be either `unlimited` to disable throttling or any decimal number like `1.7` +or `12` to throttle to that level. Rethrottling that speeds up the query takes +effect immediately but rethrotting that slows down the query will take effect +on after completing the current batch. This prevents scroll timeouts. + + [float] [[picking-up-a-new-property]] === Pick up a new property diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index f403178037b..efa01d78790 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -339,7 +339,7 @@ public abstract class AbstractAsyncBulkByScrollAction, - Response extends ActionResponse, + Response extends BulkIndexByScrollResponse, Self extends AbstractBulkIndexByScrollRequestBuilder> extends AbstractBulkByScrollRequestBuilder { diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java index 888bfeb30ad..39a254bf757 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java @@ -190,7 +190,7 @@ public class BulkByScrollTask extends CancellableTask { builder.field("noops", noops); builder.field("retries", retries); builder.timeValueField("throttled_millis", "throttled", throttled); - builder.field("requests_per_second", requestsPerSecond == 0 ? "unlimited" : requestsPerSecond); + builder.field("requests_per_second", requestsPerSecond == Float.POSITIVE_INFINITY ? "unlimited" : requestsPerSecond); if (reasonCancelled != null) { builder.field("canceled", reasonCancelled); } @@ -393,9 +393,6 @@ public class BulkByScrollTask extends CancellableTask { } private void setRequestsPerSecond(float requestsPerSecond) { - if (requestsPerSecond == -1) { - requestsPerSecond = 0; - } this.requestsPerSecond = requestsPerSecond; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequest.java index 387b074f8c4..93f36d7034b 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequest.java @@ -21,6 +21,10 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -29,11 +33,11 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; */ public class RethrottleRequest extends BaseTasksRequest { /** - * The throttle to apply to all matching requests in sub-requests per second. 0 means set no throttle and that is the default. - * Throttling is done between batches, as we start the next scroll requests. That way we can increase the scroll's timeout to make sure - * that it contains any time that we might wait. + * The throttle to apply to all matching requests in sub-requests per second. 0 means set no throttle. Throttling is done between + * batches, as we start the next scroll requests. That way we can increase the scroll's timeout to make sure that it contains any time + * that we might wait. */ - private float requestsPerSecond = 0; + private Float requestsPerSecond; /** * The throttle to apply to all matching requests in sub-requests per second. 0 means set no throttle and that is the default. @@ -43,9 +47,15 @@ public class RethrottleRequest extends BaseTasksRequest { } /** - * Set the throttle to apply to all matching requests in sub-requests per second. 0 means set no throttle and that is the default. + * Set the throttle to apply to all matching requests in sub-requests per second. {@link Float#POSITIVE_INFINITY} means set no throttle. + * Throttling is done between batches, as we start the next scroll requests. That way we can increase the scroll's timeout to make sure + * that it contains any time that we might wait. */ public RethrottleRequest setRequestsPerSecond(float requestsPerSecond) { + if (requestsPerSecond <= 0) { + throw new IllegalArgumentException( + "[requests_per_second] must be greater than 0. Use Float.POSITIVE_INFINITY to disable throttling."); + } this.requestsPerSecond = requestsPerSecond; return this; } @@ -53,6 +63,9 @@ public class RethrottleRequest extends BaseTasksRequest { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); + if (requestsPerSecond == null) { + validationException = addValidationError("requests_per_second must be set", validationException); + } for (String action : getActions()) { switch (action) { case ReindexAction.NAME: @@ -65,4 +78,16 @@ public class RethrottleRequest extends BaseTasksRequest { } return validationException; } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + requestsPerSecond = in.readFloat(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeFloat(requestsPerSecond); + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java index e04ee055718..d5cc3d9c5d3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java @@ -34,7 +34,9 @@ public class RethrottleRequestBuilder extends TasksRequestBuilder, - Response extends ActionResponse, + Response extends BulkIndexByScrollResponse, Builder extends AbstractBulkIndexByScrollRequestBuilder> Response testCancel(ESIntegTestCase test, Builder request, String actionToCancel) throws Exception { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java index c169f6819ea..1ff7d6d4492 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexBasicTests.java @@ -37,22 +37,22 @@ public class ReindexBasicTests extends ReindexTestCase { // Copy all the docs ReindexRequestBuilder copy = reindex().source("source").destination("dest", "all").refresh(true); - assertThat(copy.get(), responseMatcher().created(4)); + assertThat(copy.get(), reindexResponseMatcher().created(4)); assertHitCount(client().prepareSearch("dest").setTypes("all").setSize(0).get(), 4); // Now none of them copy = reindex().source("source").destination("all", "none").filter(termQuery("foo", "no_match")).refresh(true); - assertThat(copy.get(), responseMatcher().created(0)); + assertThat(copy.get(), reindexResponseMatcher().created(0)); assertHitCount(client().prepareSearch("dest").setTypes("none").setSize(0).get(), 0); // Now half of them copy = reindex().source("source").destination("dest", "half").filter(termQuery("foo", "a")).refresh(true); - assertThat(copy.get(), responseMatcher().created(2)); + assertThat(copy.get(), reindexResponseMatcher().created(2)); assertHitCount(client().prepareSearch("dest").setTypes("half").setSize(0).get(), 2); // Limit with size copy = reindex().source("source").destination("dest", "size_one").size(1).refresh(true); - assertThat(copy.get(), responseMatcher().created(1)); + assertThat(copy.get(), reindexResponseMatcher().created(1)); assertHitCount(client().prepareSearch("dest").setTypes("size_one").setSize(0).get(), 1); } @@ -70,7 +70,7 @@ public class ReindexBasicTests extends ReindexTestCase { ReindexRequestBuilder copy = reindex().source("source").destination("dest", "all").refresh(true); // Use a small batch size so we have to use more than one batch copy.source().setSize(5); - assertThat(copy.get(), responseMatcher().created(max).batches(max, 5)); + assertThat(copy.get(), reindexResponseMatcher().created(max).batches(max, 5)); assertHitCount(client().prepareSearch("dest").setTypes("all").setSize(0).get(), max); // Copy some of the docs @@ -79,7 +79,7 @@ public class ReindexBasicTests extends ReindexTestCase { // Use a small batch size so we have to use more than one batch copy.source().setSize(5); copy.size(half); // The real "size" of the request. - assertThat(copy.get(), responseMatcher().created(half).batches(half, 5)); + assertThat(copy.get(), reindexResponseMatcher().created(half).batches(half, 5)); assertHitCount(client().prepareSearch("dest").setTypes("half").setSize(0).get(), half); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexCancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexCancelTests.java index 590957237f8..44f5a3296cb 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexCancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexCancelTests.java @@ -35,7 +35,7 @@ public class ReindexCancelTests extends ReindexTestCase { public void testCancel() throws Exception { ReindexResponse response = CancelTestUtils.testCancel(this, reindex().destination("dest", "test"), ReindexAction.NAME); - assertThat(response, responseMatcher().created(1).reasonCancelled(equalTo("by user request"))); + assertThat(response, reindexResponseMatcher().created(1).reasonCancelled(equalTo("by user request"))); refresh("dest"); assertHitCount(client().prepareSearch("dest").setSize(0).get(), 1); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java index 7aaec014d3e..196fa6c7e57 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java @@ -58,7 +58,7 @@ public class ReindexFailureTests extends ReindexTestCase { copy.source().setSize(1); ReindexResponse response = copy.get(); - assertThat(response, responseMatcher() + assertThat(response, reindexResponseMatcher() .batches(1) .failures(both(greaterThan(0)).and(lessThanOrEqualTo(maximumNumberOfShards())))); for (Failure failure: response.getIndexingFailures()) { @@ -78,7 +78,7 @@ public class ReindexFailureTests extends ReindexTestCase { copy.destination().setOpType(CREATE); ReindexResponse response = copy.get(); - assertThat(response, responseMatcher().batches(1).versionConflicts(1).failures(1).created(99)); + assertThat(response, reindexResponseMatcher().batches(1).versionConflicts(1).failures(1).created(99)); for (Failure failure: response.getIndexingFailures()) { assertThat(failure.getMessage(), containsString("VersionConflictEngineException[[test][")); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java index b971e125b94..c2f0b5625d8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java @@ -43,18 +43,18 @@ public class ReindexParentChildTests extends ReindexTestCase { // Copy parent to the new index ReindexRequestBuilder copy = reindex().source("source").destination("dest").filter(findsCountry).refresh(true); - assertThat(copy.get(), responseMatcher().created(1)); + assertThat(copy.get(), reindexResponseMatcher().created(1)); // Copy the child to a new index copy = reindex().source("source").destination("dest").filter(findsCity).refresh(true); - assertThat(copy.get(), responseMatcher().created(1)); + assertThat(copy.get(), reindexResponseMatcher().created(1)); // Make sure parent/child is intact on that index assertSearchHits(client().prepareSearch("dest").setQuery(findsCity).get(), "pittsburgh"); // Copy the grandchild to a new index copy = reindex().source("source").destination("dest").filter(findsNeighborhood).refresh(true); - assertThat(copy.get(), responseMatcher().created(1)); + assertThat(copy.get(), reindexResponseMatcher().created(1)); // Make sure parent/child is intact on that index assertSearchHits(client().prepareSearch("dest").setQuery(findsNeighborhood).get(), @@ -63,7 +63,7 @@ public class ReindexParentChildTests extends ReindexTestCase { // Copy the parent/child/grandchild structure all at once to a third index createParentChildIndex("dest_all_at_once"); copy = reindex().source("source").destination("dest_all_at_once").refresh(true); - assertThat(copy.get(), responseMatcher().created(3)); + assertThat(copy.get(), reindexResponseMatcher().created(3)); // Make sure parent/child/grandchild is intact there too assertSearchHits(client().prepareSearch("dest_all_at_once").setQuery(findsNeighborhood).get(), diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java index 8abdb39b6ae..d5426723b85 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java @@ -41,11 +41,23 @@ public abstract class ReindexTestCase extends ESIntegTestCase { return ReindexAction.INSTANCE.newRequestBuilder(client()); } - public IndexBySearchResponseMatcher responseMatcher() { + protected IndexBySearchResponseMatcher reindexResponseMatcher() { return new IndexBySearchResponseMatcher(); } - public static class IndexBySearchResponseMatcher + protected UpdateByQueryRequestBuilder updateByQuery() { + return UpdateByQueryAction.INSTANCE.newRequestBuilder(client()); + } + + protected BulkIndexbyScrollResponseMatcher updateByQueryResponseMatcher() { + return new BulkIndexbyScrollResponseMatcher(); + } + + protected RethrottleRequestBuilder rethrottle() { + return RethrottleAction.INSTANCE.newRequestBuilder(client()); + } + + protected static class IndexBySearchResponseMatcher extends AbstractBulkIndexByScrollResponseMatcher { private Matcher createdMatcher = equalTo(0L); @@ -74,4 +86,12 @@ public abstract class ReindexTestCase extends ESIntegTestCase { return this; } } + + protected static class BulkIndexbyScrollResponseMatcher + extends AbstractBulkIndexByScrollResponseMatcher { + @Override + protected BulkIndexbyScrollResponseMatcher self() { + return this; + } + } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexVersioningTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexVersioningTests.java index 725b55d76bb..0ce5b8740f7 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexVersioningTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexVersioningTests.java @@ -33,55 +33,55 @@ public class ReindexVersioningTests extends ReindexTestCase { public void testExternalVersioningCreatesWhenAbsentAndSetsVersion() throws Exception { setupSourceAbsent(); - assertThat(reindexExternal(), responseMatcher().created(1)); + assertThat(reindexExternal(), reindexResponseMatcher().created(1)); assertDest("source", SOURCE_VERSION); } public void testExternalVersioningUpdatesOnOlderAndSetsVersion() throws Exception { setupDestOlder(); - assertThat(reindexExternal(), responseMatcher().updated(1)); + assertThat(reindexExternal(), reindexResponseMatcher().updated(1)); assertDest("source", SOURCE_VERSION); } public void testExternalVersioningVersionConflictsOnNewer() throws Exception { setupDestNewer(); - assertThat(reindexExternal(), responseMatcher().versionConflicts(1)); + assertThat(reindexExternal(), reindexResponseMatcher().versionConflicts(1)); assertDest("dest", NEWER_VERSION); } public void testInternalVersioningCreatesWhenAbsent() throws Exception { setupSourceAbsent(); - assertThat(reindexInternal(), responseMatcher().created(1)); + assertThat(reindexInternal(), reindexResponseMatcher().created(1)); assertDest("source", 1); } public void testInternalVersioningUpdatesOnOlder() throws Exception { setupDestOlder(); - assertThat(reindexInternal(), responseMatcher().updated(1)); + assertThat(reindexInternal(), reindexResponseMatcher().updated(1)); assertDest("source", OLDER_VERSION + 1); } public void testInternalVersioningUpdatesOnNewer() throws Exception { setupDestNewer(); - assertThat(reindexInternal(), responseMatcher().updated(1)); + assertThat(reindexInternal(), reindexResponseMatcher().updated(1)); assertDest("source", NEWER_VERSION + 1); } public void testCreateCreatesWhenAbsent() throws Exception { setupSourceAbsent(); - assertThat(reindexCreate(), responseMatcher().created(1)); + assertThat(reindexCreate(), reindexResponseMatcher().created(1)); assertDest("source", 1); } public void testCreateVersionConflictsOnOlder() throws Exception { setupDestOlder(); - assertThat(reindexCreate(), responseMatcher().versionConflicts(1)); + assertThat(reindexCreate(), reindexResponseMatcher().versionConflicts(1)); assertDest("dest", OLDER_VERSION); } public void testCreateVersionConflictsOnNewer() throws Exception { setupDestNewer(); - assertThat(reindexCreate(), responseMatcher().versionConflicts(1)); + assertThat(reindexCreate(), reindexResponseMatcher().versionConflicts(1)); assertDest("dest", NEWER_VERSION); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java new file mode 100644 index 00000000000..ab4b76ef23f --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RethrottleTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; + +import static org.hamcrest.Matchers.hasSize; + +/** + * Tests that you can set requests_per_second over the Java API and that you can rethrottle running requests. There are REST tests for this + * too but this is the only place that tests running against multiple nodes so it is the only integration tests that checks for + * serialization. + */ +public class RethrottleTests extends ReindexTestCase { + + public void testReindex() throws Exception { + testCase(reindex().source("test").destination("dest"), ReindexAction.NAME); + } + + public void testUpdateByQuery() throws Exception { + testCase(updateByQuery().source("test"), UpdateByQueryAction.NAME); + } + + private void testCase(AbstractBulkIndexByScrollRequestBuilder request, String actionName) + throws Exception { + // Use a single shard so the reindex has to happen in multiple batches + client().admin().indices().prepareCreate("test").setSettings("index.number_of_shards", 1).get(); + indexRandom(true, + client().prepareIndex("test", "test", "1").setSource("foo", "bar"), + client().prepareIndex("test", "test", "2").setSource("foo", "bar"), + client().prepareIndex("test", "test", "3").setSource("foo", "bar")); + + // Start a request that will never finish unless we rethrottle it + request.setRequestsPerSecond(.000001f); // Throttle forever + request.source().setSize(1); // Make sure we use multiple batches + ListenableActionFuture responseListener = request.execute(); + + // Now rethrottle it so it'll finish + ListTasksResponse rethrottleResponse = rethrottle().setActions(actionName).setRequestsPerSecond(Float.POSITIVE_INFINITY).get(); + assertThat(rethrottleResponse.getTasks(), hasSize(1)); + + // Now the response should come back quickly because we've rethrottled the request + BulkIndexByScrollResponse response = responseListener.get(); + assertEquals("Batches didn't match, this may invalidate the test as throttling is done between batches", 3, response.getBatches()); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 06bc0dba8ed..cbbe77e6479 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -120,9 +121,24 @@ public class RoundTripTests extends ESTestCase { assertResponseEquals(response, tripped); } + public void testRethrottleRequest() throws IOException { + RethrottleRequest request = new RethrottleRequest(); + request.setRequestsPerSecond((float) randomDoubleBetween(0, Float.POSITIVE_INFINITY, false)); + if (randomBoolean()) { + request.setActions(randomFrom(UpdateByQueryAction.NAME, ReindexAction.NAME)); + } else { + request.setTaskId(new TaskId(randomAsciiOfLength(5), randomLong())); + } + RethrottleRequest tripped = new RethrottleRequest(); + roundTrip(request, tripped); + assertEquals(request.getRequestsPerSecond(), tripped.getRequestsPerSecond(), 0.00001); + assertArrayEquals(request.getActions(), tripped.getActions()); + assertEquals(request.getTaskId(), tripped.getTaskId()); + } + private BulkByScrollTask.Status randomStatus() { return new BulkByScrollTask.Status(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), - randomPositiveInt(), randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), + randomInt(Integer.MAX_VALUE), randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), parseTimeValue(randomPositiveTimeValue(), "test"), abs(random().nextFloat()), random().nextBoolean() ? null : randomSimpleString(random()), parseTimeValue(randomPositiveTimeValue(), "test")); } @@ -163,10 +179,6 @@ public class RoundTripTests extends ESTestCase { return l; } - private int randomPositiveInt() { - return randomInt(Integer.MAX_VALUE); - } - private void assertResponseEquals(BulkIndexByScrollResponse expected, BulkIndexByScrollResponse actual) { assertEquals(expected.getTook(), actual.getTook()); assertTaskStatusEquals(expected.getStatus(), actual.getStatus()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java index 75a7ee2284b..3d70d71a9bd 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.search.sort.SortOrder; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -public class UpdateByQueryBasicTests extends UpdateByQueryTestCase { +public class UpdateByQueryBasicTests extends ReindexTestCase { public void testBasics() throws Exception { indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "a"), client().prepareIndex("test", "test", "2").setSource("foo", "a"), @@ -35,26 +35,28 @@ public class UpdateByQueryBasicTests extends UpdateByQueryTestCase { assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion()); // Reindex all the docs - assertThat(request().source("test").refresh(true).get(), responseMatcher().updated(4)); + assertThat(updateByQuery().source("test").refresh(true).get(), updateByQueryResponseMatcher().updated(4)); assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); // Now none of them - assertThat(request().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), responseMatcher().updated(0)); + assertThat(updateByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), + updateByQueryResponseMatcher().updated(0)); assertEquals(2, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); // Now half of them - assertThat(request().source("test").filter(termQuery("foo", "a")).refresh(true).get(), responseMatcher().updated(2)); + assertThat(updateByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), + updateByQueryResponseMatcher().updated(2)); assertEquals(3, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(3, client().prepareGet("test", "test", "2").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "3").get().getVersion()); assertEquals(2, client().prepareGet("test", "test", "4").get().getVersion()); // Limit with size - UpdateByQueryRequestBuilder request = request().source("test").size(3).refresh(true); + UpdateByQueryRequestBuilder request = updateByQuery().source("test").size(3).refresh(true); request.source().addSort("foo.keyword", SortOrder.ASC); - assertThat(request.get(), responseMatcher().updated(3)); + assertThat(request.get(), updateByQueryResponseMatcher().updated(3)); // Only the first three documents are updated because of sort assertEquals(4, client().prepareGet("test", "test", "1").get().getVersion()); assertEquals(4, client().prepareGet("test", "test", "2").get().getVersion()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryCancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryCancelTests.java index 069049765b8..dd63f90322e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryCancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryCancelTests.java @@ -32,11 +32,11 @@ import static org.hamcrest.Matchers.equalTo; * places - that is the responsibility of {@link AsyncBulkByScrollActionTests} which have more precise control to simulate failures but do * not exercise important portion of the stack like transport and task management. */ -public class UpdateByQueryCancelTests extends UpdateByQueryTestCase { +public class UpdateByQueryCancelTests extends ReindexTestCase { public void testCancel() throws Exception { - BulkIndexByScrollResponse response = CancelTestUtils.testCancel(this, request(), UpdateByQueryAction.NAME); + BulkIndexByScrollResponse response = CancelTestUtils.testCancel(this, updateByQuery(), UpdateByQueryAction.NAME); - assertThat(response, responseMatcher().updated(1).reasonCancelled(equalTo("by user request"))); + assertThat(response, updateByQueryResponseMatcher().updated(1).reasonCancelled(equalTo("by user request"))); refresh("source"); assertHitCount(client().prepareSearch("source").setSize(0).setQuery(matchQuery("giraffes", "giraffes")).get(), 1); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryTestCase.java deleted file mode 100644 index 09613eaffeb..00000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryTestCase.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.reindex; - -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; - -import java.util.Collection; - -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; - -@ClusterScope(scope = SUITE, transportClientRatio = 0) -public abstract class UpdateByQueryTestCase extends ESIntegTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(ReindexPlugin.class); - } - - protected UpdateByQueryRequestBuilder request() { - return UpdateByQueryAction.INSTANCE.newRequestBuilder(client()); - } - - public BulkIndexbyScrollResponseMatcher responseMatcher() { - return new BulkIndexbyScrollResponseMatcher(); - } - - public static class BulkIndexbyScrollResponseMatcher extends - AbstractBulkIndexByScrollResponseMatcher { - @Override - protected BulkIndexbyScrollResponseMatcher self() { - return this; - } - } -} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java index e2776d4d5d1..c36cbe974dc 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.equalTo; * Mutates a document while update-by-query-ing it and asserts that the mutation * always sticks. Update-by-query should never revert documents. */ -public class UpdateByQueryWhileModifyingTests extends UpdateByQueryTestCase { +public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { private static final int MAX_MUTATIONS = 50; private static final int MAX_ATTEMPTS = 10; @@ -48,8 +48,9 @@ public class UpdateByQueryWhileModifyingTests extends UpdateByQueryTestCase { Thread updater = new Thread(() -> { while (keepUpdating.get()) { try { - assertThat(request().source("test").refresh(true).abortOnVersionConflict(false).get(), responseMatcher() - .updated(either(equalTo(0L)).or(equalTo(1L))).versionConflicts(either(equalTo(0L)).or(equalTo(1L)))); + BulkIndexByScrollResponse response = updateByQuery().source("test").refresh(true).abortOnVersionConflict(false).get(); + assertThat(response, updateByQueryResponseMatcher().updated(either(equalTo(0L)).or(equalTo(1L))) + .versionConflicts(either(equalTo(0L)).or(equalTo(1L)))); } catch (Throwable t) { failure.set(t); } diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml index 542eb3bd932..dc54c0d91cf 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml @@ -152,7 +152,7 @@ --- "requests_per_second cannot be an empty string": - do: - catch: /requests_per_second cannot be an empty string/ + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ reindex: requests_per_second: "" body: @@ -161,6 +161,30 @@ dest: index: dest +--- +"requests_per_second cannot be negative": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + reindex: + requests_per_second: -12 + body: + source: + index: test + dest: + index: dest + +--- +"requests_per_second cannot be zero": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + reindex: + requests_per_second: 0 + body: + source: + index: test + dest: + index: dest + --- "reindex without source gives useful error message": - do: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/80_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/80_throttle.yaml index 9b818268d4a..597cfa4240f 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/80_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/80_throttle.yaml @@ -52,6 +52,56 @@ - gte: { took: 1000 } - is_false: task +--- +"requests_per_second supports unlimited to turn off throttling": + - do: + indices.create: + index: source + body: + settings: + number_of_shards: "1" + number_of_replicas: "0" + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + - do: + index: + index: source + type: foo + id: 2 + body: { "text": "test" } + - do: + index: + index: source + type: foo + id: 3 + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + reindex: + requests_per_second: unlimited + body: + source: + index: source + size: 1 + dest: + index: dest + - match: {created: 3} + - match: {updated: 0} + - match: {version_conflicts: 0} + - match: {batches: 3} + - match: {failures: []} + - match: {throttled_millis: 0} + - is_false: task + --- "Rethrottle": # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard @@ -110,64 +160,6 @@ wait_for_completion: true task_id: $task ---- -"Rethrottle to -1 which also means unlimited": - # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard - # and a small batch size on the request - - do: - indices.create: - index: source - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - - do: - cluster.health: - wait_for_status: yellow - - do: - index: - index: source - type: foo - id: 1 - body: { "text": "test" } - - do: - index: - index: source - type: foo - id: 2 - body: { "text": "test" } - - do: - index: - index: source - type: foo - id: 3 - body: { "text": "test" } - - do: - indices.refresh: {} - - - do: - reindex: - requests_per_second: .00000001 # About 9.5 years to complete the request - wait_for_completion: false - body: - source: - index: source - size: 1 - dest: - index: dest - - match: {task: '/.+:\d+/'} - - set: {task: task} - - - do: - reindex.rethrottle: - requests_per_second: -1 - task_id: $task - - - do: - tasks.list: - wait_for_completion: true - task_id: $task - --- "Rethrottle but not unlimited": # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml index f237a711f41..ea487eb54e0 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml @@ -40,20 +40,6 @@ index: test scroll_size: asdf ---- -"requests_per_second cannot be an empty string": - - do: - index: - index: test - type: test - id: 1 - body: { "text": "test" } - - do: - catch: /requests_per_second cannot be an empty string/ - update_by_query: - index: test - requests_per_second: '' - --- "update_by_query without source gives useful error message": - do: @@ -86,3 +72,27 @@ index: test body: fields: [_id] + +--- +"requests_per_second cannot be an empty string": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + update_by_query: + requests_per_second: "" + index: test + +--- +"requests_per_second cannot be negative": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + update_by_query: + requests_per_second: -12 + index: test + +--- +"requests_per_second cannot be zero": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + update_by_query: + requests_per_second: 0 + index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/70_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/70_throttle.yaml index 4d6377a5b64..3179ce32a19 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/70_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/70_throttle.yaml @@ -38,6 +38,44 @@ - gt: {throttled_millis: 1000} - lt: {throttled_millis: 4000} +--- +"requests_per_second supports unlimited which turns off throttling": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + update_by_query: + index: test + scroll_size: 1 + requests_per_second: unlimited + - match: {batches: 3} + - match: {updated: 3} + - match: {throttled_millis: 0} + --- "Rethrottle": # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard @@ -88,56 +126,6 @@ wait_for_completion: true task_id: $task ---- -"Rethrottle to -1 which also means unlimited": - # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard - # and a small batch size on the request - - do: - indices.create: - index: test - body: - settings: - number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - - do: - index: - index: test - type: foo - body: { "text": "test" } - - do: - index: - index: test - type: foo - body: { "text": "test" } - - do: - index: - index: test - type: foo - body: { "text": "test" } - - do: - indices.refresh: {} - - - do: - update_by_query: - requests_per_second: .00000001 # About 9.5 years to complete the request - wait_for_completion: false - index: test - scroll_size: 1 - - match: {task: '/.+:\d+/'} - - set: {task: task} - - - do: - reindex.rethrottle: - requests_per_second: -1 - task_id: $task - - - do: - tasks.list: - wait_for_completion: true - task_id: $task - --- "Rethrottle but not unlimited": # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json index 7b8705727a2..4903c7598c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json @@ -14,7 +14,7 @@ "params": { "requests_per_second": { "type": "float", - "default": 0, + "required": true, "description": "The throttle to set on this request in sub-requests per second. 0 means set no throttle. As does \"unlimited\". Otherwise it must be a float." } } From e49d21bdd6f9a50fd1356fe804c486ee2ee3debc Mon Sep 17 00:00:00 2001 From: debadair Date: Wed, 4 May 2016 13:38:57 -0700 Subject: [PATCH 0076/1311] Docs: Fixed broken cross doc links to script topics. --- docs/plugins/lang-javascript.asciidoc | 12 ++++++------ docs/plugins/lang-python.asciidoc | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/plugins/lang-javascript.asciidoc b/docs/plugins/lang-javascript.asciidoc index b3e811209b2..856e4bf4f03 100644 --- a/docs/plugins/lang-javascript.asciidoc +++ b/docs/plugins/lang-javascript.asciidoc @@ -48,7 +48,7 @@ Scripting is available in many APIs, but we will use an example with the WARNING: Enabling inline scripting on an unprotected Elasticsearch cluster is dangerous. See <> for a safer option. -If you have enabled {ref}/modules-scripting.html#enable-dynamic-scripting[inline scripts], +If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[inline scripts], you can use JavaScript as follows: [source,json] @@ -84,14 +84,14 @@ GET test/_search ---- // AUTOSENSE -[[lang-javascript-indexed]] +[[lang-javascript-stored]] [float] -=== Indexed scripts +=== Stored scripts -WARNING: Enabling indexed scripting on an unprotected Elasticsearch cluster is dangerous. +WARNING: Enabling stored scripts on an unprotected Elasticsearch cluster is dangerous. See <> for a safer option. -If you have enabled {ref}/modules-scripting.html#enable-dynamic-scripting[indexed scripts], +If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[stored scripts], you can use JavaScript as follows: [source,json] @@ -133,7 +133,7 @@ GET test/_search ---- // AUTOSENSE -<1> We index the script under the id `my_script`. +<1> We store the script under the id `my_script`. <2> The function score query retrieves the script with id `my_script`. diff --git a/docs/plugins/lang-python.asciidoc b/docs/plugins/lang-python.asciidoc index 9146af03381..49494828b13 100644 --- a/docs/plugins/lang-python.asciidoc +++ b/docs/plugins/lang-python.asciidoc @@ -47,7 +47,7 @@ Scripting is available in many APIs, but we will use an example with the WARNING: Enabling inline scripting on an unprotected Elasticsearch cluster is dangerous. See <> for a safer option. -If you have enabled {ref}/modules-scripting.html#enable-dynamic-scripting[inline scripts], +If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[inline scripts], you can use Python as follows: [source,json] @@ -83,14 +83,14 @@ GET test/_search ---- // AUTOSENSE -[[lang-python-indexed]] +[[lang-python-stored]] [float] -=== Indexed scripts +=== Stored scripts -WARNING: Enabling indexed scripting on an unprotected Elasticsearch cluster is dangerous. +WARNING: Enabling stored scripts on an unprotected Elasticsearch cluster is dangerous. See <> for a safer option. -If you have enabled {ref}/modules-scripting.html#enable-dynamic-scripting[indexed scripts], +If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[stored scripts], you can use Python as follows: [source,json] @@ -132,7 +132,7 @@ GET test/_search ---- // AUTOSENSE -<1> We index the script under the id `my_script`. +<1> We store the script under the id `my_script`. <2> The function score query retrieves the script with id `my_script`. From 07d8b3eb2b0146e3962ebe88d1b290745c5dbffa Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 4 May 2016 22:48:51 +0200 Subject: [PATCH 0077/1311] Fixed a bad asciidoc link --- docs/plugins/mapper-size.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins/mapper-size.asciidoc b/docs/plugins/mapper-size.asciidoc index 3213d502792..708c8bbd8f7 100644 --- a/docs/plugins/mapper-size.asciidoc +++ b/docs/plugins/mapper-size.asciidoc @@ -104,5 +104,5 @@ GET my_index/_search <1> Querying on the `_size` field <2> Aggregating on the `_size` field <3> Sorting on the `_size` field -<4> Accessing the `_size` field in scripts (inline scripts must be modules-scripting.html#enable-dynamic-scripting[enabled] for this example to work) +<4> Accessing the `_size` field in scripts (inline scripts must be modules-security-scripting.html#enable-dynamic-scripting[enabled] for this example to work) From 784c9e5fb9c055f5bf25a7ba214d6c8644fd463c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 4 May 2016 20:06:47 -0400 Subject: [PATCH 0078/1311] Introduce node handshake This commit introduces a handshake when initiating a light connection. During this handshake, node information, cluster name, and version are received from the target node of the connection. This information can be used to immediately validate that the target node is a member of the same cluster, and used to set the version on the stream. This will allow us to extend APIs that are used during initial cluster recovery without a major version change. Relates #15971 --- .../TransportClientNodesService.java | 5 +- .../zen/ping/unicast/UnicastZenPing.java | 8 +- .../transport/TransportService.java | 133 ++++++++++- .../node/tasks/TaskManagerTestCase.java | 5 +- .../bulk/TransportBulkActionTookTests.java | 2 +- .../TransportBroadcastByNodeActionTests.java | 2 +- .../TransportMasterNodeActionTests.java | 2 +- .../nodes/TransportNodesActionTests.java | 2 +- .../BroadcastReplicationTests.java | 2 +- .../TransportReplicationActionTests.java | 2 +- ...ortInstanceSingleOperationActionTests.java | 2 +- .../transport/FailAndRetryMockTransport.java | 13 +- .../TransportClientHeadersTests.java | 4 +- .../TransportClientNodesServiceTests.java | 18 +- .../cluster/NodeConnectionsServiceTests.java | 2 +- .../action/shard/ShardStateActionTests.java | 2 +- .../health/ClusterStateHealthTests.java | 2 +- .../common/network/NetworkModuleTests.java | 2 +- .../discovery/ZenFaultDetectionTests.java | 8 +- .../zen/ping/unicast/UnicastZenPingIT.java | 138 ++++++++++-- .../PublishClusterStateActionTests.java | 21 +- .../mapper/DynamicMappingDisabledTests.java | 6 +- .../indices/store/IndicesStoreTests.java | 2 +- .../AbstractSimpleTransportTestCase.java | 19 +- .../NettySizeHeaderFrameDecoderTests.java | 4 +- .../NettyTransportServiceHandshakeTests.java | 206 ++++++++++++++++++ .../transport/TransportModuleTests.java | 5 +- .../local/SimpleLocalTransportTests.java | 5 +- .../netty/NettyScheduledPingTests.java | 7 +- .../netty/SimpleNettyTransportTests.java | 5 +- .../messy/tests/IndicesRequestTests.java | 5 +- .../discovery/ec2/Ec2DiscoveryTests.java | 3 +- .../discovery/gce/GceDiscoveryTests.java | 3 +- .../test/transport/MockTransportService.java | 19 +- 34 files changed, 558 insertions(+), 106 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index e407a2e7ada..68ed7c927ac 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -360,9 +360,10 @@ public class TransportClientNodesService extends AbstractComponent { try { // its a listed node, light connect to it... logger.trace("connecting to listed node (light) [{}]", listedNode); - transportService.connectToNodeLight(listedNode); + transportService.connectToNodeLight(listedNode, pingTimeout, !ignoreClusterName); } catch (Throwable e) { logger.debug("failed to connect to node [{}], removed from nodes list", e, listedNode); + newFilteredNodes.add(listedNode); continue; } } @@ -434,7 +435,7 @@ public class TransportClientNodesService extends AbstractComponent { } else { // its a listed node, light connect to it... logger.trace("connecting to listed node (light) [{}]", listedNode); - transportService.connectToNodeLight(listedNode); + transportService.connectToNodeLight(listedNode, pingTimeout, !ignoreClusterName); } } catch (Exception e) { logger.debug("failed to connect to node [{}], ignoring...", e, listedNode); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 640582af226..cc37504360c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -402,7 +402,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen // connect to the node, see if we manage to do it, if not, bail if (!nodeFoundByAddress) { logger.trace("[{}] connecting (light) to {}", sendPingsHandler.id(), finalNodeToSend); - transportService.connectToNodeLight(finalNodeToSend); + transportService.connectToNodeLight(finalNodeToSend, timeout.getMillis()); } else { logger.trace("[{}] connecting to {}", sendPingsHandler.id(), finalNodeToSend); transportService.connectToNode(finalNodeToSend); @@ -473,12 +473,6 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen // that's us, ignore continue; } - if (!pingResponse.clusterName().equals(clusterName)) { - // not part of the cluster - logger.debug("[{}] filtering out response from {}, not same cluster_name [{}]", id, pingResponse.node(), - pingResponse.clusterName().value()); - continue; - } SendPingsHandler sendPingsHandler = receivedResponses.get(response.id); if (sendPingsHandler == null) { if (!closed) { diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index b9663da72c2..89cc68debfd 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -19,12 +19,16 @@ package org.elasticsearch.transport; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.metrics.MeanMetric; @@ -50,6 +54,7 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ScheduledFuture; @@ -67,10 +72,12 @@ import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; public class TransportService extends AbstractLifecycleComponent { public static final String DIRECT_RESPONSE_PROFILE = ".direct"; + private static final String HANDSHAKE_ACTION_NAME = "internal:transport/handshake"; private final CountDownLatch blockIncomingRequestsLatch = new CountDownLatch(1); protected final Transport transport; protected final ThreadPool threadPool; + private final ClusterName clusterName; protected final TaskManager taskManager; volatile Map requestHandlers = Collections.emptyMap(); @@ -110,15 +117,16 @@ public class TransportService extends AbstractLifecycleComponent HandshakeRequest.INSTANCE, + ThreadPool.Names.SAME, + (request, channel) -> channel.sendResponse( + new HandshakeResponse(localNode, clusterName, localNode != null ? localNode.getVersion() : Version.CURRENT))); } @Override @@ -263,11 +277,120 @@ public class TransportService extends AbstractLifecycleComponent() { + @Override + public HandshakeResponse newInstance() { + return new HandshakeResponse(); + } + }).txGet(); + } catch (Exception e) { + throw new ConnectTransportException(node, "handshake failed", e); + } + + if (checkClusterName && !Objects.equals(clusterName, response.clusterName)) { + throw new ConnectTransportException(node, "handshake failed, mismatched cluster name [" + response.clusterName + "]"); + } else if (!isVersionCompatible(response.version)) { + throw new ConnectTransportException(node, "handshake failed, incompatible version [" + response.version + "]"); + } + + return response.discoveryNode; + } + + private boolean isVersionCompatible(Version version) { + return version.minimumCompatibilityVersion().equals( + localNode != null ? localNode.getVersion().minimumCompatibilityVersion() : Version.CURRENT.minimumCompatibilityVersion()); + } + + public static class HandshakeRequest extends TransportRequest { + + public static final HandshakeRequest INSTANCE = new HandshakeRequest(); + + private HandshakeRequest() { + } + + } + + public static class HandshakeResponse extends TransportResponse { + private DiscoveryNode discoveryNode; + private ClusterName clusterName; + private Version version; + + public HandshakeResponse() { + } + + public HandshakeResponse(DiscoveryNode discoveryNode, ClusterName clusterName, Version version) { + this.discoveryNode = discoveryNode; + this.version = version; + this.clusterName = clusterName; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + discoveryNode = in.readOptionalWriteable(DiscoveryNode::new); + clusterName = ClusterName.readClusterName(in); + version = Version.readVersion(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalWriteable(discoveryNode); + clusterName.writeTo(out); + Version.writeVersion(version, out); + } } public void disconnectFromNode(DiscoveryNode node) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 0490dfd96aa..9c0e2bfcafd 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -184,9 +184,11 @@ public abstract class TaskManagerTestCase extends ESTestCase { public static class TestNode implements Releasable { public TestNode(String name, ThreadPool threadPool, Settings settings) { + clusterService = createClusterService(threadPool); + ClusterName clusterName = clusterService.state().getClusterName(); transportService = new TransportService(settings, new LocalTransport(settings, threadPool, Version.CURRENT, new NamedWriteableRegistry(), - new NoneCircuitBreakerService()), threadPool) { + new NoneCircuitBreakerService()), threadPool, clusterName) { @Override protected TaskManager createTaskManager() { if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { @@ -197,7 +199,6 @@ public abstract class TaskManagerTestCase extends ESTestCase { } }; transportService.start(); - clusterService = createClusterService(threadPool); clusterService.add(transportService.getTaskManager()); discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), emptyMap(), emptySet(), Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index 39202fcc43a..db1d09b3aa7 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -86,7 +86,7 @@ public class TransportBulkActionTookTests extends ESTestCase { private TransportBulkAction createAction(boolean controlled, AtomicLong expected) { CapturingTransport capturingTransport = new CapturingTransport(); - TransportService transportService = new TransportService(capturingTransport, threadPool); + TransportService transportService = new TransportService(capturingTransport, threadPool, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); IndexNameExpressionResolver resolver = new Resolver(Settings.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index f76fdf4fd20..23b0df27480 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -190,7 +190,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { super.setUp(); transport = new CapturingTransport(); clusterService = createClusterService(THREAD_POOL); - final TransportService transportService = new TransportService(transport, THREAD_POOL); + final TransportService transportService = new TransportService(transport, THREAD_POOL, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); setClusterState(clusterService, TEST_INDEX); diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index a35accc5fc5..ccdb13f710a 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -85,7 +85,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { super.setUp(); transport = new CapturingTransport(); clusterService = createClusterService(threadPool); - transportService = new TransportService(transport, threadPool); + transportService = new TransportService(transport, threadPool, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); localNode = new DiscoveryNode("local_node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index ad2326c3148..6a7f7ac3398 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -119,7 +119,7 @@ public class TransportNodesActionTests extends ESTestCase { super.setUp(); transport = new CapturingTransport(); clusterService = createClusterService(THREAD_POOL); - final TransportService transportService = new TransportService(transport, THREAD_POOL); + final TransportService transportService = new TransportService(transport, THREAD_POOL, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); int numNodes = randomIntBetween(3, 10); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 0bd7f9bf18a..5253097818e 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -91,7 +91,7 @@ public class BroadcastReplicationTests extends ESTestCase { super.setUp(); LocalTransport transport = new LocalTransport(Settings.EMPTY, threadPool, Version.CURRENT, new NamedWriteableRegistry(), circuitBreakerService); clusterService = createClusterService(threadPool); - transportService = new TransportService(transport, threadPool); + transportService = new TransportService(transport, threadPool, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, threadPool, clusterService, transportService, new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), null); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index a10ce35ca41..e9a92ff25b9 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -120,7 +120,7 @@ public class TransportReplicationActionTests extends ESTestCase { super.setUp(); transport = new CapturingTransport(); clusterService = createClusterService(threadPool); - transportService = new TransportService(transport, threadPool); + transportService = new TransportService(transport, threadPool, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); action = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool); diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 2dd31548cb9..86165461c84 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -141,7 +141,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { super.setUp(); transport = new CapturingTransport(); clusterService = createClusterService(THREAD_POOL); - transportService = new TransportService(transport, THREAD_POOL); + transportService = new TransportService(transport, THREAD_POOL, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); action = new TestTransportInstanceSingleOperationAction( diff --git a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index 22b04a19a26..0aaec63fef0 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -20,7 +20,9 @@ package org.elasticsearch.client.transport; import com.carrotsearch.randomizedtesting.generators.RandomInts; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse; +import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.Lifecycle; @@ -34,6 +36,7 @@ import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportServiceAdapter; import java.io.IOException; @@ -47,6 +50,7 @@ import java.util.concurrent.atomic.AtomicInteger; abstract class FailAndRetryMockTransport implements Transport { private final Random random; + private final ClusterName clusterName; private boolean connectMode = true; @@ -57,8 +61,9 @@ abstract class FailAndRetryMockTransport imp private final AtomicInteger successes = new AtomicInteger(); private final Set triedNodes = new CopyOnWriteArraySet<>(); - FailAndRetryMockTransport(Random random) { + FailAndRetryMockTransport(Random random, ClusterName clusterName) { this.random = new Random(random.nextLong()); + this.clusterName = clusterName; } @Override @@ -69,7 +74,11 @@ abstract class FailAndRetryMockTransport imp //we make sure that nodes get added to the connected ones when calling addTransportAddress, by returning proper nodes info if (connectMode) { TransportResponseHandler transportResponseHandler = transportServiceAdapter.onResponseReceived(requestId); - transportResponseHandler.handleResponse(new LivenessResponse(ClusterName.DEFAULT, node)); + if (action.equals(TransportLivenessAction.NAME)) { + transportResponseHandler.handleResponse(new LivenessResponse(clusterName, node)); + } else { + transportResponseHandler.handleResponse(new TransportService.HandshakeResponse(node, clusterName, Version.CURRENT)); + } return; } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index a7e472da3b4..ec0a9e5cc08 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -123,8 +123,8 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase { CountDownLatch clusterStateLatch = new CountDownLatch(1); @Inject - public InternalTransportService(Settings settings, Transport transport, ThreadPool threadPool) { - super(settings, transport, threadPool); + public InternalTransportService(Settings settings, Transport transport, ThreadPool threadPool, ClusterName clusterName) { + super(settings, transport, threadPool, clusterName); } @Override @SuppressWarnings("unchecked") diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 0246ec227dd..b566d764231 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -62,8 +62,9 @@ public class TransportClientNodesServiceTests extends ESTestCase { private final int nodesCount; TestIteration() { + ClusterName clusterName = new ClusterName("test"); threadPool = new ThreadPool("transport-client-nodes-service-tests"); - transport = new FailAndRetryMockTransport(random()) { + transport = new FailAndRetryMockTransport(random(), clusterName) { @Override public List getLocalAddresses() { return Collections.emptyList(); @@ -74,12 +75,12 @@ public class TransportClientNodesServiceTests extends ESTestCase { return new TestResponse(); } }; - transportService = new TransportService(Settings.EMPTY, transport, threadPool) { + transportService = new TransportService(Settings.EMPTY, transport, threadPool, clusterName) { @Override public void sendRequest(DiscoveryNode node, String action, TransportRequest request, final TransportResponseHandler handler) { if (TransportLivenessAction.NAME.equals(action)) { - super.sendRequest(node, action, request, wrapLivenessResponseHandler(handler, node)); + super.sendRequest(node, action, request, wrapLivenessResponseHandler(handler, node, clusterName)); } else { super.sendRequest(node, action, request, handler); } @@ -90,7 +91,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { TransportRequestOptions options, TransportResponseHandler handler) { if (TransportLivenessAction.NAME.equals(action)) { - super.sendRequest(node, action, request, options, wrapLivenessResponseHandler(handler, node)); + super.sendRequest(node, action, request, options, wrapLivenessResponseHandler(handler, node, clusterName)); } else { super.sendRequest(node, action, request, options, handler); } @@ -98,8 +99,8 @@ public class TransportClientNodesServiceTests extends ESTestCase { }; transportService.start(); transportService.acceptIncomingRequests(); - transportClientNodesService = new TransportClientNodesService(Settings.EMPTY, ClusterName.DEFAULT, transportService, threadPool, - Version.CURRENT); + transportClientNodesService = + new TransportClientNodesService(Settings.EMPTY, clusterName, transportService, threadPool, Version.CURRENT); this.nodesCount = randomIntBetween(1, 10); for (int i = 0; i < nodesCount; i++) { transportClientNodesService.addTransportAddresses(new LocalTransportAddress("node" + i)); @@ -108,7 +109,8 @@ public class TransportClientNodesServiceTests extends ESTestCase { } private TransportResponseHandler wrapLivenessResponseHandler(TransportResponseHandler handler, - DiscoveryNode node) { + DiscoveryNode node, + ClusterName clusterName) { return new TransportResponseHandler() { @Override public T newInstance() { @@ -118,7 +120,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { @Override @SuppressWarnings("unchecked") public void handleResponse(T response) { - LivenessResponse livenessResponse = new LivenessResponse(ClusterName.DEFAULT, + LivenessResponse livenessResponse = new LivenessResponse(clusterName, new DiscoveryNode(node.getName(), node.getId(), "liveness-hostname" + node.getId(), "liveness-hostaddress" + node.getId(), new LocalTransportAddress("liveness-address-" + node.getId()), node.getAttributes(), node.getRoles(), diff --git a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java index c263bcbcf37..a3bb73977e7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -149,7 +149,7 @@ public class NodeConnectionsServiceTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); this.transport = new MockTransport(); - transportService = new TransportService(transport, THREAD_POOL); + transportService = new TransportService(transport, THREAD_POOL, ClusterName.DEFAULT); transportService.start(); transportService.acceptIncomingRequests(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index c79d198d350..6085bf92c32 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -106,7 +106,7 @@ public class ShardStateActionTests extends ESTestCase { super.setUp(); this.transport = new CapturingTransport(); clusterService = createClusterService(THREAD_POOL); - transportService = new TransportService(transport, THREAD_POOL); + transportService = new TransportService(transport, THREAD_POOL, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); shardStateAction = new TestShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index bf465d8d44e..298175c41ee 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -80,7 +80,7 @@ public class ClusterStateHealthTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); clusterService = createClusterService(threadPool); - transportService = new TransportService(new CapturingTransport(), threadPool); + transportService = new TransportService(new CapturingTransport(), threadPool, clusterService.state().getClusterName()); transportService.start(); transportService.acceptIncomingRequests(); } diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index cbc517ceb64..d5fc4630bcf 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -52,7 +52,7 @@ public class NetworkModuleTests extends ModuleTestCase { static class FakeTransportService extends TransportService { public FakeTransportService() { - super(null, null); + super(null, null, null); } } diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index ab834ca6ae2..e5199048c35 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -115,8 +115,12 @@ public class ZenFaultDetectionTests extends ESTestCase { protected MockTransportService build(Settings settings, Version version) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); - MockTransportService transportService = new MockTransportService(Settings.EMPTY, - new LocalTransport(settings, threadPool, version, namedWriteableRegistry, circuitBreakerService), threadPool); + MockTransportService transportService = + new MockTransportService( + Settings.EMPTY, + new LocalTransport(settings, threadPool, version, namedWriteableRegistry, circuitBreakerService), + threadPool, + ClusterName.DEFAULT); transportService.start(); transportService.acceptIncomingRequests(); return transportService; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java index 00e3daf1fc8..7847d7027d0 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.discovery.zen.elect.ElectMasterService; @@ -35,16 +36,22 @@ import org.elasticsearch.discovery.zen.ping.PingContextProvider; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.netty.NettyTransport; +import org.jboss.netty.util.internal.ConcurrentHashMap; import java.net.InetSocketAddress; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; public class UnicastZenPingIT extends ESTestCase { public void testSimplePings() throws InterruptedException { @@ -54,36 +61,31 @@ public class UnicastZenPingIT extends ESTestCase { settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), startPort + "-" + endPort).build(); ThreadPool threadPool = new ThreadPool(getClass().getName()); - ClusterName clusterName = new ClusterName("test"); + ClusterName test = new ClusterName("test"); + ClusterName mismatch = new ClusterName("mismatch"); NetworkService networkService = new NetworkService(settings); ElectMasterService electMasterService = new ElectMasterService(settings, Version.CURRENT); - NettyTransport transportA = new NettyTransport(settings, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); - final TransportService transportServiceA = new TransportService(transportA, threadPool).start(); - transportServiceA.acceptIncomingRequests(); - final DiscoveryNode nodeA = new DiscoveryNode("UZP_A", transportServiceA.boundAddress().publishAddress(), - emptyMap(), emptySet(), Version.CURRENT); - - InetSocketTransportAddress addressA = (InetSocketTransportAddress) transportA.boundAddress().publishAddress(); - - NettyTransport transportB = new NettyTransport(settings, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); - final TransportService transportServiceB = new TransportService(transportB, threadPool).start(); - transportServiceB.acceptIncomingRequests(); - final DiscoveryNode nodeB = new DiscoveryNode("UZP_B", transportServiceA.boundAddress().publishAddress(), - emptyMap(), emptySet(), Version.CURRENT); - - InetSocketTransportAddress addressB = (InetSocketTransportAddress) transportB.boundAddress().publishAddress(); + NetworkHandle handleA = startServices(settings, threadPool, networkService, "UZP_A", test, Version.CURRENT); + NetworkHandle handleB = startServices(settings, threadPool, networkService, "UZP_B", test, Version.CURRENT); + NetworkHandle handleC = startServices(settings, threadPool, networkService, "UZP_C", new ClusterName("mismatch"), Version.CURRENT); + // just fake that no versions are compatible with this node + Version previousVersion = VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()); + Version versionD = VersionUtils.randomVersionBetween(random(), previousVersion.minimumCompatibilityVersion(), previousVersion); + NetworkHandle handleD = startServices(settings, threadPool, networkService, "UZP_D", test, versionD); Settings hostsSettings = Settings.builder().putArray("discovery.zen.ping.unicast.hosts", - NetworkAddress.format(new InetSocketAddress(addressA.address().getAddress(), addressA.address().getPort())), - NetworkAddress.format(new InetSocketAddress(addressB.address().getAddress(), addressB.address().getPort()))) + NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())), + NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())), + NetworkAddress.format(new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort())), + NetworkAddress.format(new InetSocketAddress(handleD.address.address().getAddress(), handleD.address.address().getPort()))) .build(); - UnicastZenPing zenPingA = new UnicastZenPing(hostsSettings, threadPool, transportServiceA, clusterName, Version.CURRENT, electMasterService, null); + UnicastZenPing zenPingA = new UnicastZenPing(hostsSettings, threadPool, handleA.transportService, test, Version.CURRENT, electMasterService, null); zenPingA.setPingContextProvider(new PingContextProvider() { @Override public DiscoveryNodes nodes() { - return DiscoveryNodes.builder().put(nodeA).localNodeId("UZP_A").build(); + return DiscoveryNodes.builder().put(handleA.node).localNodeId("UZP_A").build(); } @Override @@ -93,11 +95,11 @@ public class UnicastZenPingIT extends ESTestCase { }); zenPingA.start(); - UnicastZenPing zenPingB = new UnicastZenPing(hostsSettings, threadPool, transportServiceB, clusterName, Version.CURRENT, electMasterService, null); + UnicastZenPing zenPingB = new UnicastZenPing(hostsSettings, threadPool, handleB.transportService, test, Version.CURRENT, electMasterService, null); zenPingB.setPingContextProvider(new PingContextProvider() { @Override public DiscoveryNodes nodes() { - return DiscoveryNodes.builder().put(nodeB).localNodeId("UZP_B").build(); + return DiscoveryNodes.builder().put(handleB.node).localNodeId("UZP_B").build(); } @Override @@ -107,12 +109,41 @@ public class UnicastZenPingIT extends ESTestCase { }); zenPingB.start(); + UnicastZenPing zenPingC = new UnicastZenPing(hostsSettings, threadPool, handleC.transportService, mismatch, versionD, electMasterService, null); + zenPingC.setPingContextProvider(new PingContextProvider() { + @Override + public DiscoveryNodes nodes() { + return DiscoveryNodes.builder().put(handleC.node).localNodeId("UZP_C").build(); + } + + @Override + public boolean nodeHasJoinedClusterOnce() { + return false; + } + }); + zenPingC.start(); + + UnicastZenPing zenPingD = new UnicastZenPing(hostsSettings, threadPool, handleD.transportService, mismatch, Version.CURRENT, electMasterService, null); + zenPingD.setPingContextProvider(new PingContextProvider() { + @Override + public DiscoveryNodes nodes() { + return DiscoveryNodes.builder().put(handleD.node).localNodeId("UZP_D").build(); + } + + @Override + public boolean nodeHasJoinedClusterOnce() { + return false; + } + }); + zenPingD.start(); + try { logger.info("ping from UZP_A"); ZenPing.PingResponse[] pingResponses = zenPingA.pingAndWait(TimeValue.timeValueSeconds(10)); assertThat(pingResponses.length, equalTo(1)); assertThat(pingResponses[0].node().getId(), equalTo("UZP_B")); assertTrue(pingResponses[0].hasJoinedOnce()); + assertCounters(handleA, handleA, handleB, handleC, handleD); // ping again, this time from B, logger.info("ping from UZP_B"); @@ -120,13 +151,72 @@ public class UnicastZenPingIT extends ESTestCase { assertThat(pingResponses.length, equalTo(1)); assertThat(pingResponses[0].node().getId(), equalTo("UZP_A")); assertFalse(pingResponses[0].hasJoinedOnce()); + assertCounters(handleB, handleA, handleB, handleC, handleD); + logger.info("ping from UZP_C"); + pingResponses = zenPingC.pingAndWait(TimeValue.timeValueSeconds(10)); + assertThat(pingResponses.length, equalTo(0)); + assertCounters(handleC, handleA, handleB, handleC, handleD); + + logger.info("ping from UZP_D"); + pingResponses = zenPingD.pingAndWait(TimeValue.timeValueSeconds(10)); + assertThat(pingResponses.length, equalTo(0)); + assertCounters(handleD, handleA, handleB, handleC, handleD); } finally { zenPingA.close(); zenPingB.close(); - transportServiceA.close(); - transportServiceB.close(); + zenPingC.close(); + zenPingD.close(); + handleA.transportService.close(); + handleB.transportService.close(); + handleC.transportService.close(); + handleD.transportService.close(); terminate(threadPool); } } + + // assert that we tried to ping each of the configured nodes at least once + private void assertCounters(NetworkHandle that, NetworkHandle...handles) { + for (NetworkHandle handle : handles) { + if (handle != that) { + assertThat(that.counters.get(handle.address).get(), greaterThan(0)); + } + } + } + + private NetworkHandle startServices(Settings settings, ThreadPool threadPool, NetworkService networkService, String nodeId, ClusterName clusterName, Version version) { + NettyTransport transport = new NettyTransport(settings, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, version, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); + final TransportService transportService = new TransportService(transport, threadPool, clusterName); + transportService.start(); + transportService.acceptIncomingRequests(); + ConcurrentMap counters = new ConcurrentHashMap<>(); + transportService.addConnectionListener(new TransportConnectionListener() { + @Override + public void onNodeConnected(DiscoveryNode node) { + counters.computeIfAbsent(node.getAddress(), k -> new AtomicInteger()); + counters.get(node.getAddress()).incrementAndGet(); + } + + @Override + public void onNodeDisconnected(DiscoveryNode node) { + } + }); + final DiscoveryNode node = new DiscoveryNode(nodeId, transportService.boundAddress().publishAddress(), emptyMap(), emptySet(), version); + transportService.setLocalNode(node); + return new NetworkHandle((InetSocketTransportAddress)transport.boundAddress().publishAddress(), transportService, node, counters); + } + + private static class NetworkHandle { + public final InetSocketTransportAddress address; + public final TransportService transportService; + public final DiscoveryNode node; + public final ConcurrentMap counters; + + public NetworkHandle(InetSocketTransportAddress address, TransportService transportService, DiscoveryNode discoveryNode, ConcurrentMap counters) { + this.address = address; + this.transportService = transportService; + this.node = discoveryNode; + this.counters = counters; + } + } } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 335d1b3e8fd..e6b160eabf8 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -83,6 +83,9 @@ import static org.hamcrest.Matchers.nullValue; @TestLogging("discovery.zen.publish:TRACE") public class PublishClusterStateActionTests extends ESTestCase { + + private static final ClusterName CLUSTER_NAME = ClusterName.DEFAULT; + protected ThreadPool threadPool; protected Map nodes = new HashMap<>(); @@ -101,7 +104,7 @@ public class PublishClusterStateActionTests extends ESTestCase { this.service = service; this.listener = listener; this.logger = logger; - this.clusterState = ClusterState.builder(ClusterName.DEFAULT).nodes(DiscoveryNodes.builder().put(discoveryNode).localNodeId(discoveryNode.getId()).build()).build(); + this.clusterState = ClusterState.builder(CLUSTER_NAME).nodes(DiscoveryNodes.builder().put(discoveryNode).localNodeId(discoveryNode.getId()).build()).build(); } public MockNode setAsMaster() { @@ -229,7 +232,7 @@ public class PublishClusterStateActionTests extends ESTestCase { } protected MockTransportService buildTransportService(Settings settings, Version version) { - MockTransportService transportService = MockTransportService.local(Settings.EMPTY, version, threadPool); + MockTransportService transportService = MockTransportService.local(Settings.EMPTY, version, threadPool, CLUSTER_NAME); transportService.start(); transportService.acceptIncomingRequests(); return transportService; @@ -249,7 +252,7 @@ public class PublishClusterStateActionTests extends ESTestCase { clusterStateSupplier, listener, discoverySettings, - ClusterName.DEFAULT); + CLUSTER_NAME); } public void testSimpleClusterStatePublishing() throws Exception { @@ -343,7 +346,7 @@ public class PublishClusterStateActionTests extends ESTestCase { // Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).put(nodeB.discoveryNode).build(); - ClusterState previousClusterState = ClusterState.builder(ClusterName.DEFAULT).nodes(discoveryNodes).build(); + ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build(); ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromFull(nodeB.clusterState, clusterState); @@ -374,7 +377,7 @@ public class PublishClusterStateActionTests extends ESTestCase { // Initial cluster state DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(nodeA.discoveryNode).localNodeId(nodeA.discoveryNode.getId()).masterNodeId(nodeA.discoveryNode.getId()).build(); - ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).nodes(discoveryNodes).build(); + ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build(); // cluster state update - add nodeB discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeB.discoveryNode).build(); @@ -417,7 +420,7 @@ public class PublishClusterStateActionTests extends ESTestCase { AssertingAckListener[] listeners = new AssertingAckListener[numberOfIterations]; DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build(); MetaData metaData = MetaData.EMPTY_META_DATA; - ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).build(); + ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).build(); ClusterState previousState; for (int i = 0; i < numberOfIterations; i++) { previousState = clusterState; @@ -451,7 +454,7 @@ public class PublishClusterStateActionTests extends ESTestCase { // Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).put(nodeB.discoveryNode).build(); - ClusterState previousClusterState = ClusterState.builder(ClusterName.DEFAULT).nodes(discoveryNodes).build(); + ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build(); ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build(); publishStateAndWait(nodeA.action, clusterState, previousClusterState); assertSameStateFromFull(nodeB.clusterState, clusterState); @@ -503,7 +506,7 @@ public class PublishClusterStateActionTests extends ESTestCase { discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId()); DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build(); MetaData metaData = MetaData.EMPTY_META_DATA; - ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).nodes(discoveryNodes).build(); + ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build(); ClusterState previousState = master.clusterState; try { publishState(master.action, clusterState, previousState, masterNodes + randomIntBetween(1, 5)); @@ -580,7 +583,7 @@ public class PublishClusterStateActionTests extends ESTestCase { discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId()); DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build(); MetaData metaData = MetaData.EMPTY_META_DATA; - ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).nodes(discoveryNodes).build(); + ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build(); ClusterState previousState = master.clusterState; try { publishState(master.action, clusterState, previousState, minMasterNodes); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index a7ceec92a61..7f9c4a3bbfe 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -74,9 +74,9 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false) .build(); clusterService = createClusterService(THREAD_POOL); - transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry(), - new NoneCircuitBreakerService()); - transportService = new TransportService(transport, THREAD_POOL); + transport = + new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); + transportService = new TransportService(transport, THREAD_POOL, clusterService.state().getClusterName()); indicesService = getInstanceFromNode(IndicesService.class); shardStateAction = new ShardStateAction(settings, clusterService, transportService, null, null, THREAD_POOL); actionFilters = new ActionFilters(Collections.emptySet()); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index 721c66c04e1..cf28de64b87 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -87,7 +87,7 @@ public class IndicesStoreTests extends ESTestCase { public void before() { localNode = new DiscoveryNode("abc", new LocalTransportAddress("abc"), emptyMap(), emptySet(), Version.CURRENT); clusterService = createClusterService(threadPool); - indicesStore = new IndicesStore(Settings.EMPTY, null, clusterService, new TransportService(null, null), null); + indicesStore = new IndicesStore(Settings.EMPTY, null, clusterService, new TransportService(null, null, clusterService.state().getClusterName()), null); } @After diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index edeecd61d8e..45966606d31 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -65,7 +66,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { protected DiscoveryNode nodeB; protected MockTransportService serviceB; - protected abstract MockTransportService build(Settings settings, Version version, NamedWriteableRegistry namedWriteableRegistry); + protected abstract MockTransportService build(Settings settings, Version version, ClusterName clusterName); @Override @Before @@ -78,8 +79,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .build(), - version0, new NamedWriteableRegistry() - ); + version0, + ClusterName.DEFAULT); serviceA.acceptIncomingRequests(); nodeA = new DiscoveryNode("TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), emptySet(), version0); serviceB = build( @@ -88,8 +89,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .build(), - version1, new NamedWriteableRegistry() - ); + version1, + ClusterName.DEFAULT); serviceB.acceptIncomingRequests(); nodeB = new DiscoveryNode("TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), emptySet(), version1); @@ -1178,7 +1179,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } try { - serviceB.connectToNodeLight(nodeA); + serviceB.connectToNodeLight(nodeA, 100); fail("exception should be thrown"); } catch (ConnectTransportException e) { // all is well @@ -1238,7 +1239,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } try { - serviceB.connectToNodeLight(nodeA); + serviceB.connectToNodeLight(nodeA, 100); fail("exception should be thrown"); } catch (ConnectTransportException e) { // all is well @@ -1298,8 +1299,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") .build(), - version0, new NamedWriteableRegistry() - ); + version0, + ClusterName.DEFAULT); AtomicBoolean requestProcessed = new AtomicBoolean(); service.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, (request, channel) -> { diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java index 46c3cdbe3aa..d2d88b87593 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java +++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; @@ -70,7 +71,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase { nettyTransport = new NettyTransport(settings, threadPool, networkService, bigArrays, Version.CURRENT, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); nettyTransport.start(); - TransportService transportService = new TransportService(nettyTransport, threadPool); + TransportService transportService = new TransportService(nettyTransport, threadPool, ClusterName.DEFAULT); nettyTransport.transportServiceAdapter(transportService.createAdapter()); TransportAddress[] boundAddresses = nettyTransport.boundAddress().boundAddresses(); @@ -84,6 +85,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase { public void terminateThreadPool() throws InterruptedException { nettyTransport.stop(); terminate(threadPool); + threadPool = null; } public void testThatTextMessageIsReturnedOnHTTPLikeRequest() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java b/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java new file mode 100644 index 00000000000..b376a55af73 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java @@ -0,0 +1,206 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.netty.NettyTransport; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; + +public class NettyTransportServiceHandshakeTests extends ESTestCase { + private static ThreadPool threadPool; + + @BeforeClass + public static void startThreadPool() { + threadPool = new ThreadPool(NettyTransportServiceHandshakeTests.class.getSimpleName()); + } + + private List transportServices = new ArrayList<>(); + + private NetworkHandle startServices(String nodeNameAndId, Settings settings, Version version, ClusterName clusterName) { + NettyTransport transport = + new NettyTransport( + settings, + threadPool, + new NetworkService(settings), + BigArrays.NON_RECYCLING_INSTANCE, + Version.CURRENT, + new NamedWriteableRegistry(), + new NoneCircuitBreakerService()); + TransportService transportService = new MockTransportService(settings, transport, threadPool, clusterName); + transportService.start(); + transportService.acceptIncomingRequests(); + DiscoveryNode node = + new DiscoveryNode( + nodeNameAndId, + nodeNameAndId, + transportService.boundAddress().publishAddress(), + emptyMap(), + emptySet(), + version); + transportService.setLocalNode(node); + transportServices.add(transportService); + return new NetworkHandle(transportService, node); + } + + @After + public void tearDown() throws Exception { + for (TransportService transportService : transportServices) { + transportService.close(); + } + super.tearDown(); + } + + @AfterClass + public static void terminateThreadPool() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + // since static must set to null to be eligible for collection + threadPool = null; + } + + public void testConnectToNodeLight() { + Settings settings = Settings.EMPTY; + + ClusterName test = new ClusterName("test"); + + NetworkHandle handleA = startServices("TS_A", settings, Version.CURRENT, test); + NetworkHandle handleB = + startServices( + "TS_B", + settings, + VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT), + test); + + DiscoveryNode connectedNode = + handleA.transportService.connectToNodeLight( + new DiscoveryNode( + "", + handleB.discoveryNode.getAddress(), + emptyMap(), + emptySet(), + Version.CURRENT.minimumCompatibilityVersion()), + 100); + assertNotNull(connectedNode); + + // the name and version should be updated + assertEquals(connectedNode.getName(), "TS_B"); + assertEquals(connectedNode.getVersion(), handleB.discoveryNode.getVersion()); + } + + public void testMismatchedClusterName() { + Settings settings = Settings.EMPTY; + + NetworkHandle handleA = startServices("TS_A", settings, Version.CURRENT, new ClusterName("a")); + NetworkHandle handleB = startServices("TS_B", settings, Version.CURRENT, new ClusterName("b")); + + try { + handleA.transportService.connectToNodeLight( + new DiscoveryNode( + "", + handleB.discoveryNode.getAddress(), + emptyMap(), + emptySet(), + Version.CURRENT.minimumCompatibilityVersion()), + 100); + fail("expected handshake to fail from mismatched cluster names"); + } catch (ConnectTransportException e) { + assertThat(e.getMessage(), containsString("handshake failed, mismatched cluster name [Cluster [b]]")); + } + } + + public void testIncompatibleVersions() { + Settings settings = Settings.EMPTY; + + ClusterName test = new ClusterName("test"); + NetworkHandle handleA = startServices("TS_A", settings, Version.CURRENT, test); + NetworkHandle handleB = + startServices("TS_B", settings, VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()), test); + + try { + handleA.transportService.connectToNodeLight( + new DiscoveryNode( + "", + handleB.discoveryNode.getAddress(), + emptyMap(), + emptySet(), + Version.CURRENT.minimumCompatibilityVersion()), + 100); + fail("expected handshake to fail from incompatible versions"); + } catch (ConnectTransportException e) { + assertThat(e.getMessage(), containsString("handshake failed, incompatible version")); + } + } + + public void testIgnoreMismatchedClusterName() { + Settings settings = Settings.EMPTY; + + NetworkHandle handleA = startServices("TS_A", settings, Version.CURRENT, new ClusterName("a")); + NetworkHandle handleB = + startServices( + "TS_B", + settings, + VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT), + new ClusterName("b") + ); + + DiscoveryNode connectedNode = handleA.transportService.connectToNodeLight( + new DiscoveryNode( + "", + handleB.discoveryNode.getAddress(), + emptyMap(), + emptySet(), + Version.CURRENT.minimumCompatibilityVersion()), + 100, + false); + assertNotNull(connectedNode); + assertEquals(connectedNode.getName(), "TS_B"); + assertEquals(connectedNode.getVersion(), handleB.discoveryNode.getVersion()); + } + + private static class NetworkHandle { + private TransportService transportService; + private DiscoveryNode discoveryNode; + + public NetworkHandle(TransportService transportService, DiscoveryNode discoveryNode) { + this.transportService = transportService; + this.discoveryNode = discoveryNode; + } + } +} diff --git a/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java b/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java index 888a73c9386..4eb14014cfc 100644 --- a/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TransportModuleTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -43,8 +44,8 @@ public class TransportModuleTests extends ModuleTestCase { static class FakeTransportService extends TransportService { @Inject - public FakeTransportService(Settings settings, Transport transport, ThreadPool threadPool) { - super(settings, transport, threadPool); + public FakeTransportService(Settings settings, Transport transport, ThreadPool threadPool, ClusterName clusterName) { + super(settings, transport, threadPool, clusterName); } } } diff --git a/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java b/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java index f071d56e3b9..2907a0a6ca8 100644 --- a/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.transport.local; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.transport.MockTransportService; @@ -28,8 +29,8 @@ import org.elasticsearch.transport.AbstractSimpleTransportTestCase; public class SimpleLocalTransportTests extends AbstractSimpleTransportTestCase { @Override - protected MockTransportService build(Settings settings, Version version, NamedWriteableRegistry namedWriteableRegistry) { - MockTransportService transportService = MockTransportService.local(settings, version, threadPool); + protected MockTransportService build(Settings settings, Version version, ClusterName clusterName) { + MockTransportService transportService = MockTransportService.local(settings, version, threadPool, clusterName); transportService.start(); return transportService; } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java index 49f86b909a6..8c00ae01b74 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lease.Releasables; @@ -63,14 +64,16 @@ public class NettyScheduledPingTests extends ESTestCase { NamedWriteableRegistry registryA = new NamedWriteableRegistry(); final NettyTransport nettyA = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, registryA, circuitBreakerService); - MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool); + ClusterName test = new ClusterName("test"); + MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool, test); serviceA.start(); serviceA.acceptIncomingRequests(); NamedWriteableRegistry registryB = new NamedWriteableRegistry(); final NettyTransport nettyB = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, registryB, circuitBreakerService); - MockTransportService serviceB = new MockTransportService(settings, nettyB, threadPool); + MockTransportService serviceB = new MockTransportService(settings, nettyB, threadPool, test); + serviceB.start(); serviceB.acceptIncomingRequests(); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java index 7ba50b9c997..dac416128e5 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; @@ -39,9 +40,9 @@ import static org.hamcrest.Matchers.containsString; public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase { @Override - protected MockTransportService build(Settings settings, Version version, NamedWriteableRegistry namedWriteableRegistry) { + protected MockTransportService build(Settings settings, Version version, ClusterName clusterName) { settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), "0").build(); - MockTransportService transportService = MockTransportService.nettyFromThreadPool(settings, version, threadPool); + MockTransportService transportService = MockTransportService.nettyFromThreadPool(settings, version, threadPool, clusterName); transportService.start(); return transportService; } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index b3d01e62fae..c2bc877b903 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -77,6 +77,7 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.network.NetworkModule; @@ -725,8 +726,8 @@ public class IndicesRequestTests extends ESIntegTestCase { private final Map> requests = new HashMap<>(); @Inject - public InterceptingTransportService(Settings settings, Transport transport, ThreadPool threadPool) { - super(settings, transport, threadPool); + public InterceptingTransportService(Settings settings, Transport transport, ThreadPool threadPool, ClusterName clusterName) { + super(settings, transport, threadPool, clusterName); } synchronized List consumeRequests(String action) { diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index d86efaa2a85..c4863680613 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -23,6 +23,7 @@ import com.amazonaws.services.ec2.model.Tag; import org.elasticsearch.Version; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2Service.DISCOVERY_EC2; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; @@ -65,7 +66,7 @@ public class Ec2DiscoveryTests extends ESTestCase { @Before public void createTransportService() { - transportService = MockTransportService.local(Settings.EMPTY, Version.CURRENT, threadPool); + transportService = MockTransportService.local(Settings.EMPTY, Version.CURRENT, threadPool, ClusterName.DEFAULT); } protected List buildDynamicNodes(Settings nodeSettings, int nodes) { diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index 3b265d6a067..4525b1ece1d 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.discovery.gce; import org.elasticsearch.Version; import org.elasticsearch.cloud.gce.GceComputeService; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -93,7 +94,7 @@ public class GceDiscoveryTests extends ESTestCase { @Before public void createTransportService() { - transportService = MockTransportService.local(Settings.EMPTY, Version.CURRENT, threadPool); + transportService = MockTransportService.local(Settings.EMPTY, Version.CURRENT, threadPool, ClusterName.DEFAULT); } @Before diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 8e85134763d..05b8b23e90c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -20,6 +20,9 @@ package org.elasticsearch.test.transport; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.transport.TransportService; + import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; @@ -97,25 +100,29 @@ public class MockTransportService extends TransportService { } } - public static MockTransportService local(Settings settings, Version version, ThreadPool threadPool) { + public static MockTransportService local(Settings settings, Version version, ThreadPool threadPool, ClusterName clusterName) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); Transport transport = new LocalTransport(settings, threadPool, version, namedWriteableRegistry, new NoneCircuitBreakerService()); - return new MockTransportService(settings, transport, threadPool); + return new MockTransportService(settings, transport, threadPool, clusterName); } - public static MockTransportService nettyFromThreadPool(Settings settings, Version version, ThreadPool threadPool) { + public static MockTransportService nettyFromThreadPool( + Settings settings, + Version version, + ThreadPool threadPool, + ClusterName clusterName) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); Transport transport = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, version, namedWriteableRegistry, new NoneCircuitBreakerService()); - return new MockTransportService(Settings.EMPTY, transport, threadPool); + return new MockTransportService(Settings.EMPTY, transport, threadPool, clusterName); } private final Transport original; @Inject - public MockTransportService(Settings settings, Transport transport, ThreadPool threadPool) { - super(settings, new LookupTestTransport(transport), threadPool); + public MockTransportService(Settings settings, Transport transport, ThreadPool threadPool, ClusterName clusterName) { + super(settings, new LookupTestTransport(transport), threadPool, clusterName); this.original = transport; } From 8fc51380de681f15a0467c0c484897f6c2044fa2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 4 May 2016 17:29:23 -0700 Subject: [PATCH 0079/1311] Tests: improve logging for vagrant to emit entire output on failure This change makes the vagrant tasks extend LoggedExec, so that the entire vagrant output can be dumped on failure (and completely logged when using --info). It should help for debugging issues like #18122. --- .../elasticsearch/gradle/LoggedExec.groovy | 9 +- .../gradle/vagrant/BatsOverVagrantTask.groovy | 50 ++----- .../vagrant/TapLoggerOutputStream.groovy | 4 +- .../gradle/vagrant/VagrantCommandTask.groovy | 64 ++++---- .../vagrant/VagrantLoggerOutputStream.groovy | 138 +++++++++--------- qa/vagrant/build.gradle | 4 +- 6 files changed, 122 insertions(+), 147 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy index 1896cdf1b67..b1b04a2ded6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy @@ -26,14 +26,17 @@ import org.gradle.api.tasks.Exec * A wrapper around gradle's Exec task to capture output and log on error. */ class LoggedExec extends Exec { + + protected ByteArrayOutputStream output = new ByteArrayOutputStream() + LoggedExec() { if (logger.isInfoEnabled() == false) { - standardOutput = new ByteArrayOutputStream() - errorOutput = standardOutput + standardOutput = output + errorOutput = output ignoreExitValue = true doLast { if (execResult.exitValue != 0) { - standardOutput.toString('UTF-8').eachLine { line -> logger.error(line) } + output.toString('UTF-8').eachLine { line -> logger.error(line) } throw new GradleException("Process '${executable} ${args.join(' ')}' finished with non-zero exit value ${execResult.exitValue}") } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy index 2f2030f6cd2..c68e0528c9b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle.vagrant import org.gradle.api.DefaultTask +import org.gradle.api.tasks.Input import org.gradle.api.tasks.TaskAction import org.gradle.logging.ProgressLoggerFactory import org.gradle.process.internal.ExecAction @@ -30,41 +31,22 @@ import javax.inject.Inject * Runs bats over vagrant. Pretty much like running it using Exec but with a * nicer output formatter. */ -class BatsOverVagrantTask extends DefaultTask { - String command - String boxName - ExecAction execAction +public class BatsOverVagrantTask extends VagrantCommandTask { - BatsOverVagrantTask() { - execAction = getExecActionFactory().newExecAction() - } + @Input + String command - @Inject - ProgressLoggerFactory getProgressLoggerFactory() { - throw new UnsupportedOperationException(); - } + BatsOverVagrantTask() { + project.afterEvaluate { + args 'ssh', boxName, '--command', command + } + } - @Inject - ExecActionFactory getExecActionFactory() { - throw new UnsupportedOperationException(); - } - - void boxName(String boxName) { - this.boxName = boxName - } - - void command(String command) { - this.command = command - } - - @TaskAction - void exec() { - // It'd be nice if --machine-readable were, well, nice - execAction.commandLine(['vagrant', 'ssh', boxName, '--command', command]) - execAction.setStandardOutput(new TapLoggerOutputStream( - command: command, - factory: getProgressLoggerFactory(), - logger: logger)) - execAction.execute(); - } + @Override + protected OutputStream createLoggerOutputStream() { + return new TapLoggerOutputStream( + command: commandLine.join(' '), + factory: getProgressLoggerFactory(), + logger: logger) + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy index 5f4a5e0a0c4..6b87ceeaf96 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy @@ -47,9 +47,9 @@ class TapLoggerOutputStream extends LoggingOutputStream { TapLoggerOutputStream(Map args) { logger = args.logger progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) - progressLogger.setDescription("TAP output for $args.command") + progressLogger.setDescription("TAP output for `$args.command`") progressLogger.started() - progressLogger.progress("Starting $args.command...") + progressLogger.progress("Starting `$args.command`...") } void flush() { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy index 92b4a575eba..d79c2533fab 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy @@ -18,11 +18,10 @@ */ package org.elasticsearch.gradle.vagrant -import org.gradle.api.DefaultTask -import org.gradle.api.tasks.TaskAction +import org.apache.commons.io.output.TeeOutputStream +import org.elasticsearch.gradle.LoggedExec +import org.gradle.api.tasks.Input import org.gradle.logging.ProgressLoggerFactory -import org.gradle.process.internal.ExecAction -import org.gradle.process.internal.ExecActionFactory import javax.inject.Inject @@ -30,43 +29,30 @@ import javax.inject.Inject * Runs a vagrant command. Pretty much like Exec task but with a nicer output * formatter and defaults to `vagrant` as first part of commandLine. */ -class VagrantCommandTask extends DefaultTask { - List commandLine - String boxName - ExecAction execAction +public class VagrantCommandTask extends LoggedExec { - VagrantCommandTask() { - execAction = getExecActionFactory().newExecAction() - } + @Input + String boxName - @Inject - ProgressLoggerFactory getProgressLoggerFactory() { - throw new UnsupportedOperationException(); - } + public VagrantCommandTask() { + executable = 'vagrant' + project.afterEvaluate { + // It'd be nice if --machine-readable were, well, nice + standardOutput = new TeeOutputStream(standardOutput, createLoggerOutputStream()) + } + } - @Inject - ExecActionFactory getExecActionFactory() { - throw new UnsupportedOperationException(); - } + protected OutputStream createLoggerOutputStream() { + return new VagrantLoggerOutputStream( + command: commandLine.join(' '), + factory: getProgressLoggerFactory(), + /* Vagrant tends to output a lot of stuff, but most of the important + stuff starts with ==> $box */ + squashedPrefix: "==> $boxName: ") + } - void boxName(String boxName) { - this.boxName = boxName - } - - void commandLine(Object... commandLine) { - this.commandLine = commandLine - } - - @TaskAction - void exec() { - // It'd be nice if --machine-readable were, well, nice - execAction.commandLine(['vagrant'] + commandLine) - execAction.setStandardOutput(new VagrantLoggerOutputStream( - command: commandLine.join(' '), - factory: getProgressLoggerFactory(), - /* Vagrant tends to output a lot of stuff, but most of the important - stuff starts with ==> $box */ - squashedPrefix: "==> $boxName: ")) - execAction.execute(); - } + @Inject + ProgressLoggerFactory getProgressLoggerFactory() { + throw new UnsupportedOperationException(); + } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy index 488c4511b1f..22008fa1a0e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy @@ -20,6 +20,7 @@ package org.elasticsearch.gradle.vagrant import com.carrotsearch.gradle.junit4.LoggingOutputStream import org.gradle.logging.ProgressLogger +import org.gradle.logging.ProgressLoggerFactory /** * Adapts an OutputStream being written to by vagrant into a ProcessLogger. It @@ -42,79 +43,82 @@ import org.gradle.logging.ProgressLogger * to catch so it can render the output like * "Heading text > stdout from the provisioner". */ -class VagrantLoggerOutputStream extends LoggingOutputStream { - static final String HEADING_PREFIX = '==> ' +public class VagrantLoggerOutputStream extends LoggingOutputStream { + private static final String HEADING_PREFIX = '==> ' - ProgressLogger progressLogger - String squashedPrefix - String lastLine = '' - boolean inProgressReport = false - String heading = '' + ProgressLoggerFactory progressLoggerFactory - VagrantLoggerOutputStream(Map args) { - progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) - progressLogger.setDescription("Vagrant $args.command") - progressLogger.started() - progressLogger.progress("Starting vagrant $args.command...") - squashedPrefix = args.squashedPrefix - } - void flush() { - if (end == start) return - line(new String(buffer, start, end - start)) - start = end - } + private ProgressLogger progressLogger + String squashedPrefix + String lastLine = '' + boolean inProgressReport = false + String heading = '' - void line(String line) { - // debugPrintLine(line) // Uncomment me to log every incoming line - if (line.startsWith('\r\u001b')) { - /* We don't want to try to be a full terminal emulator but we want to - keep the escape sequences from leaking and catch _some_ of the - meaning. */ - line = line.substring(2) - if ('[K' == line) { - inProgressReport = true - } - return + VagrantLoggerOutputStream(Map args) { + progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) + progressLogger.setDescription("Vagrant output for `$args.command`") + progressLogger.started() + progressLogger.progress("Starting `$args.command`...") + squashedPrefix = args.squashedPrefix } - if (line.startsWith(squashedPrefix)) { - line = line.substring(squashedPrefix.length()) - inProgressReport = false - lastLine = line - if (line.startsWith(HEADING_PREFIX)) { - line = line.substring(HEADING_PREFIX.length()) - heading = line + ' > ' - } else { - line = heading + line - } - } else if (inProgressReport) { - inProgressReport = false - line = lastLine + line - } else { - return - } - // debugLogLine(line) // Uncomment me to log every line we add to the logger - progressLogger.progress(line) - } - void debugPrintLine(line) { - System.out.print '----------> ' - for (int i = start; i < end; i++) { - switch (buffer[i] as char) { - case ' '..'~': - System.out.print buffer[i] as char - break - default: - System.out.print '%' - System.out.print Integer.toHexString(buffer[i]) - } + void flush() { + if (end == start) return + line(new String(buffer, start, end - start)) + start = end } - System.out.print '\n' - } - void debugLogLine(line) { - System.out.print '>>>>>>>>>>> ' - System.out.print line - System.out.print '\n' - } + void line(String line) { + // debugPrintLine(line) // Uncomment me to log every incoming line + if (line.startsWith('\r\u001b')) { + /* We don't want to try to be a full terminal emulator but we want to + keep the escape sequences from leaking and catch _some_ of the + meaning. */ + line = line.substring(2) + if ('[K' == line) { + inProgressReport = true + } + return + } + if (line.startsWith(squashedPrefix)) { + line = line.substring(squashedPrefix.length()) + inProgressReport = false + lastLine = line + if (line.startsWith(HEADING_PREFIX)) { + line = line.substring(HEADING_PREFIX.length()) + heading = line + ' > ' + } else { + line = heading + line + } + } else if (inProgressReport) { + inProgressReport = false + line = lastLine + line + } else { + return + } + // debugLogLine(line) // Uncomment me to log every line we add to the logger + progressLogger.progress(line) + } + + void debugPrintLine(line) { + System.out.print '----------> ' + for (int i = start; i < end; i++) { + switch (buffer[i] as char) { + case ' '..'~': + System.out.print buffer[i] as char + break + default: + System.out.print '%' + System.out.print Integer.toHexString(buffer[i]) + } + } + System.out.print '\n' + } + + void debugLogLine(line) { + System.out.print '>>>>>>>>>>> ' + System.out.print line + System.out.print '\n' + } } diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 735a401e542..211f8ffa5d1 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -156,7 +156,7 @@ for (String box : availableBoxes) { // always add a halt task for all boxes, so clean makes sure they are all shutdown Task halt = tasks.create("vagrant${boxTask}#halt", VagrantCommandTask) { boxName box - commandLine 'halt', box + args 'halt', box } stop.dependsOn(halt) if (boxes.contains(box) == false) { @@ -176,7 +176,7 @@ for (String box : availableBoxes) { vagrant's default but its possible to change that default and folks do. But the boxes that we use are unlikely to work properly with other virtualization providers. Thus the lock. */ - commandLine 'up', box, '--provision', '--provider', 'virtualbox' + args 'up', box, '--provision', '--provider', 'virtualbox' /* It'd be possible to check if the box is already up here and output SKIPPED but that would require running vagrant status which is slow! */ dependsOn checkVagrantVersion From 928e2b904db713d8d85e7bce0391f53f55928af9 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 5 May 2016 03:42:14 -0400 Subject: [PATCH 0080/1311] painless: optimize/simplify dynamic field and method access --- .../java/org/elasticsearch/painless/Def.java | 52 +++++-------------- 1 file changed, 12 insertions(+), 40 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 71c7de23c85..b8cf7fb6069 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -242,18 +242,16 @@ public class Def { } public static Method getMethod(final Object owner, final String name, final Definition definition) { - Struct struct = null; Class clazz = owner.getClass(); - Method method = null; while (clazz != null) { - struct = definition.classes.get(clazz); + Struct struct = definition.classes.get(clazz); if (struct != null) { - method = struct.methods.get(name); + Method method = struct.methods.get(name); if (method != null) { - break; + return method; } } @@ -261,45 +259,31 @@ public class Def { struct = definition.classes.get(iface); if (struct != null) { - method = struct.methods.get(name); + Method method = struct.methods.get(name); if (method != null) { - break; + return method; } } } - if (struct != null) { - method = struct.methods.get(name); - - if (method != null) { - break; - } - } - clazz = clazz.getSuperclass(); } - if (struct == null) { - throw new IllegalArgumentException("Unable to find a dynamic struct for class [" + owner.getClass() + "]."); - } - - return method; + return null; } public static Field getField(final Object owner, final String name, final Definition definition) { - Struct struct = null; Class clazz = owner.getClass(); - Field field = null; while (clazz != null) { - struct = definition.classes.get(clazz); + Struct struct = definition.classes.get(clazz); if (struct != null) { - field = struct.members.get(name); + Field field = struct.members.get(name); if (field != null) { - break; + return field; } } @@ -307,30 +291,18 @@ public class Def { struct = definition.classes.get(iface); if (struct != null) { - field = struct.members.get(name); + Field field = struct.members.get(name); if (field != null) { - break; + return field; } } } - if (struct != null) { - field = struct.members.get(name); - - if (field != null) { - break; - } - } - clazz = clazz.getSuperclass(); } - if (struct == null) { - throw new IllegalArgumentException("Unable to find a dynamic struct for class [" + owner.getClass() + "]."); - } - - return field; + return null; } public static Transform getTransform(Class fromClass, Class toClass, final Definition definition) { From 59c135b58d7c2fd788667790ca7cae48f2f49c28 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 5 May 2016 03:47:56 -0400 Subject: [PATCH 0081/1311] make internal Def methods private and add basic javadocs --- .../src/main/java/org/elasticsearch/painless/Def.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index b8cf7fb6069..4e170c93e49 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -241,7 +241,8 @@ public class Def { } } - public static Method getMethod(final Object owner, final String name, final Definition definition) { + /** Method lookup for owner.name(), returns null if no matching method was found */ + private static Method getMethod(final Object owner, final String name, final Definition definition) { Class clazz = owner.getClass(); while (clazz != null) { @@ -273,7 +274,8 @@ public class Def { return null; } - public static Field getField(final Object owner, final String name, final Definition definition) { + /** Field lookup for owner.name, returns null if no matching field was found */ + private static Field getField(final Object owner, final String name, final Definition definition) { Class clazz = owner.getClass(); while (clazz != null) { From e11b96ca9c26957374a1928c605845cc4f678f51 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 5 May 2016 10:36:21 -0400 Subject: [PATCH 0082/1311] Default to server VM and add client VM check Today we softly warn about running with the client VM. However, we should really refuse to start in production mode if running with the client VM as the performance of the client VM is too devastating for a server application. This commit adds an option to jvm.options to ensure that we are starting with the server VM (on all 32-bit non-Windows platforms on server-class machines (2+ CPUs, 2+ GB physical RAM) this is the default and on all 64-bit platforms this is the only option) and adds a bootstrap check for the client VM. Relates #18155 --- .../elasticsearch/bootstrap/Bootstrap.java | 6 ---- .../bootstrap/BootstrapCheck.java | 28 +++++++++++++++++++ .../bootstrap/BootstrapCheckTests.java | 22 +++++++++++++++ .../src/main/resources/config/jvm.options | 3 ++ 4 files changed, 53 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 1131a4a99ec..6d35cafd088 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -246,12 +246,6 @@ final class Bootstrap { PidFile.create(environment.pidFile(), true); } - // warn if running using the client VM - if (JvmInfo.jvmInfo().getVmName().toLowerCase(Locale.ROOT).contains("client")) { - ESLogger logger = Loggers.getLogger(Bootstrap.class); - logger.warn("jvm uses the client vm, make sure to run `java` with the server vm for best performance by adding `-server` to the command line"); - } - try { if (!foreground) { Loggers.disableConsoleLogging(); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java index 626e274076c..37a89531184 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -163,6 +163,7 @@ final class BootstrapCheck { if (Constants.LINUX) { checks.add(new MaxMapCountCheck()); } + checks.add(new ClientJvmCheck()); return Collections.unmodifiableList(checks); } @@ -476,4 +477,31 @@ final class BootstrapCheck { } + static class ClientJvmCheck implements BootstrapCheck.Check { + + @Override + public boolean check() { + return getVmName().toLowerCase(Locale.ROOT).contains("client"); + } + + // visible for testing + String getVmName() { + return JvmInfo.jvmInfo().getVmName(); + } + + @Override + public String errorMessage() { + return String.format( + Locale.ROOT, + "JVM is using the client VM [%s] but should be using a server VM for the best performance", + getVmName()); + } + + @Override + public final boolean isSystemCheck() { + return false; + } + + } + } diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java index 235957ac18b..d354adc7544 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java @@ -31,6 +31,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.containsString; @@ -373,6 +374,27 @@ public class BootstrapCheckTests extends ESTestCase { expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, false, defaultChecks, "testMinMasterNodes")); } + public void testClientJvmCheck() { + final AtomicReference vmName = new AtomicReference<>("Java HotSpot(TM) 32-Bit Client VM"); + final BootstrapCheck.Check check = new BootstrapCheck.ClientJvmCheck() { + @Override + String getVmName() { + return vmName.get(); + } + }; + + RuntimeException e = expectThrows( + RuntimeException.class, + () -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testClientJvmCheck")); + assertThat( + e.getMessage(), + containsString("JVM is using the client VM [Java HotSpot(TM) 32-Bit Client VM] " + + "but should be using a server VM for the best performance")); + + vmName.set("Java HotSpot(TM) 32-Bit Server VM"); + BootstrapCheck.check(true, false, Collections.singletonList(check), "testClientJvmCheck"); + } + public void testIgnoringSystemChecks() { BootstrapCheck.Check check = new BootstrapCheck.Check() { @Override diff --git a/distribution/src/main/resources/config/jvm.options b/distribution/src/main/resources/config/jvm.options index 726f2772db6..b27b0a0a0f0 100644 --- a/distribution/src/main/resources/config/jvm.options +++ b/distribution/src/main/resources/config/jvm.options @@ -48,6 +48,9 @@ ## basic +# force the server VM +-server + # set to headless, just in case -Djava.awt.headless=true From 2cae575f538af8cea473fa69788645c39c157307 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 5 May 2016 09:26:02 -0700 Subject: [PATCH 0083/1311] Added single-quoted strings. Closes #18150 --- .../modules/scripting/painless.asciidoc | 14 +- .../src/main/antlr/PainlessLexer.g4 | 3 +- .../src/main/antlr/PainlessParser.g4 | 1 - .../org/elasticsearch/painless/Analyzer.java | 8 - .../painless/AnalyzerCaster.java | 15 +- .../painless/AnalyzerExpression.java | 12 - .../painless/AnalyzerExternal.java | 3 +- .../elasticsearch/painless/Definition.java | 9 + .../elasticsearch/painless/PainlessLexer.java | 342 ++++++------ .../painless/PainlessParser.java | 495 +++++++++--------- .../painless/PainlessParserBaseVisitor.java | 7 - .../painless/PainlessParserVisitor.java | 7 - .../org/elasticsearch/painless/Utility.java | 28 + .../org/elasticsearch/painless/Writer.java | 8 - .../painless/WriterExpression.java | 15 - .../painless/BasicExpressionTests.java | 2 +- .../elasticsearch/painless/EqualsTests.java | 44 +- .../elasticsearch/painless/StringTests.java | 114 ++++ .../test/plan_a/20_scriptfield.yaml | 2 +- .../rest-api-spec/test/plan_a/30_search.yaml | 12 +- 20 files changed, 614 insertions(+), 527 deletions(-) diff --git a/docs/reference/modules/scripting/painless.asciidoc b/docs/reference/modules/scripting/painless.asciidoc index 1937bd9947b..b96e1f099de 100644 --- a/docs/reference/modules/scripting/painless.asciidoc +++ b/docs/reference/modules/scripting/painless.asciidoc @@ -86,7 +86,7 @@ GET /hockey-stats/_search "script_score": { "script": { "lang": "painless", - "inline": "int total = 0; for (int i = 0; i < input.doc.goals.size(); ++i) { total += input.doc.goals[i]; } return total;" + "inline": "int total = 0; for (int i = 0; i < input.doc['goals'].size(); ++i) { total += input.doc['goals'][i]; } return total;" } } } @@ -108,7 +108,7 @@ GET /hockey-stats/_search "total_goals": { "script": { "lang": "painless", - "inline": "int total = 0; for (int i = 0; i < input.doc.goals.size(); ++i) { total += input.doc.goals[i]; } return total;" + "inline": "int total = 0; for (int i = 0; i < input.doc['goals'].size(); ++i) { total += input.doc['goals'][i]; } return total;" } } } @@ -118,7 +118,7 @@ GET /hockey-stats/_search You must always specify the index of the field value you want, even if there's only a single item in the field. All fields in Elasticsearch are multi-valued and Painless does not provide a `.value` shortcut. The following example uses a Painless script to sort the players by their combined first and last names. The names are accessed using -`input.doc.first.0` and `input.doc.last.0`. +`input.doc['first'].0` and `input.doc['last'].0`. [source,sh] ---------------------------------------------------------------- @@ -133,7 +133,7 @@ GET /hockey-stats/_search "order": "asc", "script": { "lang": "painless", - "inline": "input.doc.first.0 + \" \" + input.doc.last.0" + "inline": "input.doc['first'].0 + ' ' + input.doc['last'].0" } } } @@ -219,13 +219,13 @@ GET /hockey-stats/_search "full_name_dynamic": { "script": { "lang": "painless", - "inline": "def first = input.doc.first.0; def last = input.doc.last.0; return first + \" \" + last;" + "inline": "def first = input.doc['first'].0; def last = input.doc['last'].0; return first + ' ' + last;" } }, "full_name_static": { "script": { "lang": "painless", - "inline": "String first = (String)((List)((Map)input.get(\"doc\")).get(\"first\")).get(0); String last = (String)((List)((Map)input.get(\"doc\")).get(\"last\")).get(0); return first + \" \" + last;" + "inline": "String first = (String)((List)((Map)input.get('doc')).get('first')).get(0); String last = (String)((List)((Map)input.get('doc')).get('last')).get(0); return first + ' ' + last;" } } } @@ -727,4 +727,4 @@ Def static Long defToLong(def) static Float defToFloat(def) static Double defToDouble(def) ------ \ No newline at end of file +----- diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index f1e40f93d02..eab0670f973 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -90,8 +90,7 @@ HEX: '0' [xX] [0-9a-fA-F]+ [lL]?; INTEGER: ( '0' | [1-9] [0-9]* ) [lLfFdD]?; DECIMAL: ( '0' | [1-9] [0-9]* ) DOT [0-9]* ( [eE] [+\-]? [0-9]+ )? [fF]?; -STRING: '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"'; -CHAR: '\'' . '\''; +STRING: ( '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' ) | ( '\'' ( '\\\'' | '\\\\' | ~[\\"] )*? '\'' ); TRUE: 'true'; FALSE: 'false'; diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.g4 b/modules/lang-painless/src/main/antlr/PainlessParser.g4 index c9e0de72210..1ecd72528e0 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessParser.g4 @@ -89,7 +89,6 @@ generic expression : LP expression RP # precedence | ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric - | CHAR # char | TRUE # true | FALSE # false | NULL # null diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java index 50473ea4e88..f411db00802 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java @@ -26,7 +26,6 @@ import org.elasticsearch.painless.PainlessParser.BinaryContext; import org.elasticsearch.painless.PainlessParser.BoolContext; import org.elasticsearch.painless.PainlessParser.BreakContext; import org.elasticsearch.painless.PainlessParser.CastContext; -import org.elasticsearch.painless.PainlessParser.CharContext; import org.elasticsearch.painless.PainlessParser.CompContext; import org.elasticsearch.painless.PainlessParser.ConditionalContext; import org.elasticsearch.painless.PainlessParser.ContinueContext; @@ -276,13 +275,6 @@ class Analyzer extends PainlessParserBaseVisitor { return null; } - @Override - public Void visitChar(final CharContext ctx) { - expression.processChar(ctx); - - return null; - } - @Override public Void visitTrue(final TrueContext ctx) { expression.processTrue(ctx); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 46a510bc6bb..dddb3491f64 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -159,6 +159,7 @@ class AnalyzerCaster { return checkTransform(source, cast); case BYTE_OBJ: case SHORT_OBJ: + case STRING: if (explicit) return checkTransform(source, cast); @@ -371,6 +372,7 @@ class AnalyzerCaster { case SHORT: case BYTE_OBJ: case SHORT_OBJ: + case STRING: if (explicit) return checkTransform(source, cast); @@ -470,6 +472,15 @@ class AnalyzerCaster { } break; + case STRING: + switch (to.sort) { + case CHAR: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } } try { @@ -556,8 +567,8 @@ class AnalyzerCaster { } catch (IllegalAccessException | IllegalArgumentException | java.lang.reflect.InvocationTargetException | NullPointerException | ExceptionInInitializerError exception) { - throw new IllegalStateException(AnalyzerUtility.error(source) + "Unable to invoke transform to cast constant from " + - "[" + transform.from.name + "] to [" + transform.to.name + "]."); + throw new IllegalArgumentException(AnalyzerUtility.error(source) + + "Cannot cast constant from [" + transform.from.name + "] to [" + transform.to.name + "]."); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java index 029e7e530b5..3db42f88696 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExpression.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.PainlessParser.AssignmentContext; import org.elasticsearch.painless.PainlessParser.BinaryContext; import org.elasticsearch.painless.PainlessParser.BoolContext; import org.elasticsearch.painless.PainlessParser.CastContext; -import org.elasticsearch.painless.PainlessParser.CharContext; import org.elasticsearch.painless.PainlessParser.CompContext; import org.elasticsearch.painless.PainlessParser.ConditionalContext; import org.elasticsearch.painless.PainlessParser.DecltypeContext; @@ -161,17 +160,6 @@ class AnalyzerExpression { } } - void processChar(final CharContext ctx) { - final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); - - if (ctx.CHAR() == null) { - throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state."); - } - - charemd.preConst = ctx.CHAR().getText().charAt(1); - charemd.from = definition.charType; - } - void processTrue(final TrueContext ctx) { final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java index 2ce5fa14d22..32ab8f43c9e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerExternal.java @@ -584,8 +584,7 @@ class AnalyzerExternal { "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); } - if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 2 || - setter.arguments.get(0).sort != Sort.INT)) { + if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Illegal list set shortcut [" + value + "] for type [" + struct.name + "]."); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 6cb4125f026..450f2aa6fa4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -801,6 +801,7 @@ class Definition { addMethod("Utility", "charToLong", null, true, longobjType, new Type[] {charType}, null, null); addMethod("Utility", "charToFloat", null, true, floatobjType, new Type[] {charType}, null, null); addMethod("Utility", "charToDouble", null, true, doubleobjType, new Type[] {charType}, null, null); + addMethod("Utility", "charToString", null, true, stringType, new Type[] {charType}, null, null); addMethod("Utility", "CharacterToboolean", null, true, booleanType, new Type[] {charobjType}, null, null); addMethod("Utility", "CharacterTobyte", null, true, byteType, new Type[] {charobjType}, null, null); addMethod("Utility", "CharacterToshort", null, true, shortType, new Type[] {charobjType}, null, null); @@ -815,6 +816,7 @@ class Definition { addMethod("Utility", "CharacterToLong", null, true, longobjType, new Type[] {charobjType}, null, null); addMethod("Utility", "CharacterToFloat", null, true, floatobjType, new Type[] {charobjType}, null, null); addMethod("Utility", "CharacterToDouble", null, true, doubleobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToString", null, true, stringType, new Type[] {charobjType}, null, null); addMethod("Utility", "intToboolean", null, true, booleanType, new Type[] {intType}, null, null); addMethod("Utility", "intToByte", null, true, byteobjType, new Type[] {intType}, null, null); addMethod("Utility", "intToShort", null, true, shortobjType, new Type[] {intType}, null, null); @@ -851,6 +853,8 @@ class Definition { addMethod("Utility", "doubleToFloat", null, true, floatobjType, new Type[] {doubleType}, null, null); addMethod("Utility", "DoubleToboolean", null, true, booleanType, new Type[] {doubleobjType}, null, null); addMethod("Utility", "DoubleTochar", null, true, charType, new Type[] {doubleobjType}, null, null); + addMethod("Utility", "StringTochar", null, true, charType, new Type[] {stringType}, null, null); + addMethod("Utility", "StringToCharacter", null, true, charobjType, new Type[] {stringType}, null, null); addMethod("Math", "abs", null, true, doubleType, new Type[] {doubleType}, null, null); addMethod("Math", "fabs", "abs", true, floatType, new Type[] {floatType}, null, null); @@ -1125,6 +1129,7 @@ class Definition { addTransform(charType, longobjType, "Utility", "charToLong", true); addTransform(charType, floatobjType, "Utility", "charToFloat", true); addTransform(charType, doubleobjType, "Utility", "charToDouble", true); + addTransform(charType, stringType, "Utility", "charToString", true); addTransform(intType, booleanType, "Utility", "intToboolean", true); addTransform(intType, objectType, "Integer", "valueOf", true); @@ -1281,6 +1286,7 @@ class Definition { addTransform(charobjType, longobjType, "Utility", "CharacterToLong", true); addTransform(charobjType, floatobjType, "Utility", "CharacterToFloat", true); addTransform(charobjType, doubleobjType, "Utility", "CharacterToDouble", true); + addTransform(charobjType, stringType, "Utility", "CharacterToString", true); addTransform(intobjType, booleanType, "Utility", "IntegerToboolean", true); addTransform(intobjType, byteType, "Integer", "byteValue", false); @@ -1345,6 +1351,9 @@ class Definition { addTransform(doubleobjType, intobjType, "Utility", "NumberToInteger", true); addTransform(doubleobjType, longobjType, "Utility", "NumberToLong", true); addTransform(doubleobjType, floatobjType, "Utility", "NumberToFloat", true); + + addTransform(stringType, charType, "Utility", "StringTochar", true); + addTransform(stringType, charobjType, "Utility", "StringToCharacter", true); } private void addDefaultBounds() { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java index ad71e155bb0..6951c67605a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java @@ -25,7 +25,7 @@ class PainlessLexer extends Lexer { BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, ALSH=60, ARSH=61, AUSH=62, OCTAL=63, HEX=64, INTEGER=65, DECIMAL=66, STRING=67, - CHAR=68, TRUE=69, FALSE=70, NULL=71, ID=72, EXTINTEGER=73, EXTID=74; + TRUE=68, FALSE=69, NULL=70, ID=71, EXTINTEGER=72, EXTID=73; public static final int EXT = 1; public static String[] modeNames = { "DEFAULT_MODE", "EXT" @@ -39,8 +39,8 @@ class PainlessLexer extends Lexer { "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", - "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", - "ID", "EXTINTEGER", "EXTID" + "HEX", "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", + "EXTINTEGER", "EXTID" }; private static final String[] _LITERAL_NAMES = { @@ -51,7 +51,7 @@ class PainlessLexer extends Lexer { "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, - null, null, null, null, "'true'", "'false'", "'null'" + null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", @@ -61,8 +61,8 @@ class PainlessLexer extends Lexer { "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", - "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", - "ID", "EXTINTEGER", "EXTID" + "HEX", "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", + "EXTINTEGER", "EXTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -119,7 +119,7 @@ class PainlessLexer extends Lexer { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2L\u01f4\b\1\b\1\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2K\u01fb\b\1\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -128,168 +128,172 @@ class PainlessLexer extends Lexer { "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+ "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+ "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+ - "I\tI\4J\tJ\4K\tK\3\2\6\2\u009a\n\2\r\2\16\2\u009b\3\2\3\2\3\3\3\3\3\3"+ - "\3\3\7\3\u00a4\n\3\f\3\16\3\u00a7\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00ae\n"+ - "\3\f\3\16\3\u00b1\13\3\3\3\3\3\5\3\u00b5\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3"+ - "\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3"+ - "\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20"+ - "\3\20\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+ - "\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25"+ - "\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\30"+ - "\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35"+ - "\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3#\3#\3$\3"+ - "$\3$\3%\3%\3&\3&\3&\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3*\3*\3*\3*\3+\3"+ - "+\3,\3,\3-\3-\3.\3.\3.\3/\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\62\3\63"+ - "\3\63\3\63\3\64\3\64\3\65\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\3"+ - "8\38\39\39\39\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3=\3>\3>\3>\3>\3?\3"+ - "?\3?\3?\3?\3@\3@\6@\u017b\n@\r@\16@\u017c\3@\5@\u0180\n@\3A\3A\3A\6A\u0185"+ - "\nA\rA\16A\u0186\3A\5A\u018a\nA\3B\3B\3B\7B\u018f\nB\fB\16B\u0192\13B"+ - "\5B\u0194\nB\3B\5B\u0197\nB\3C\3C\3C\7C\u019c\nC\fC\16C\u019f\13C\5C\u01a1"+ - "\nC\3C\3C\7C\u01a5\nC\fC\16C\u01a8\13C\3C\3C\5C\u01ac\nC\3C\6C\u01af\n"+ - "C\rC\16C\u01b0\5C\u01b3\nC\3C\5C\u01b6\nC\3D\3D\3D\3D\3D\3D\7D\u01be\n"+ - "D\fD\16D\u01c1\13D\3D\3D\3E\3E\3E\3E\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3G"+ - "\3H\3H\3H\3H\3H\3I\3I\7I\u01db\nI\fI\16I\u01de\13I\3J\3J\3J\7J\u01e3\n"+ - "J\fJ\16J\u01e6\13J\5J\u01e8\nJ\3J\3J\3K\3K\7K\u01ee\nK\fK\16K\u01f1\13"+ - "K\3K\3K\5\u00a5\u00af\u01bf\2L\4\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13"+ - "\26\f\30\r\32\16\34\17\36\20 \21\"\22$\23&\24(\25*\26,\27.\30\60\31\62"+ - "\32\64\33\66\348\35:\36<\37> @!B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61"+ - "b\62d\63f\64h\65j\66l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086"+ - "D\u0088E\u008aF\u008cG\u008eH\u0090I\u0092J\u0094K\u0096L\4\2\3\21\5\2"+ - "\13\f\17\17\"\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2"+ - "\63;\3\2\62;\b\2FFHHNNffhhnn\4\2GGgg\4\2--//\4\2HHhh\4\2$$^^\5\2C\\aa"+ - "c|\6\2\62;C\\aac|\u020b\2\4\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2"+ - "\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26"+ - "\3\2\2\2\2\30\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2"+ - "\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2"+ - "\2.\3\2\2\2\2\60\3\2\2\2\2\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2"+ - "\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2"+ - "\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R"+ - "\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3"+ - "\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2"+ - "\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2"+ - "x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2"+ - "\2\2\2\u0084\3\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2"+ - "\u008c\3\2\2\2\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\3\u0094"+ - "\3\2\2\2\3\u0096\3\2\2\2\4\u0099\3\2\2\2\6\u00b4\3\2\2\2\b\u00b8\3\2\2"+ - "\2\n\u00ba\3\2\2\2\f\u00bc\3\2\2\2\16\u00be\3\2\2\2\20\u00c0\3\2\2\2\22"+ - "\u00c2\3\2\2\2\24\u00c4\3\2\2\2\26\u00c8\3\2\2\2\30\u00ca\3\2\2\2\32\u00cc"+ - "\3\2\2\2\34\u00cf\3\2\2\2\36\u00d4\3\2\2\2 \u00da\3\2\2\2\"\u00dd\3\2"+ - "\2\2$\u00e1\3\2\2\2&\u00ea\3\2\2\2(\u00f0\3\2\2\2*\u00f7\3\2\2\2,\u00fb"+ - "\3\2\2\2.\u00ff\3\2\2\2\60\u0105\3\2\2\2\62\u010b\3\2\2\2\64\u010d\3\2"+ - "\2\2\66\u010f\3\2\2\28\u0111\3\2\2\2:\u0113\3\2\2\2<\u0115\3\2\2\2>\u0117"+ - "\3\2\2\2@\u0119\3\2\2\2B\u011c\3\2\2\2D\u011f\3\2\2\2F\u0123\3\2\2\2H"+ - "\u0125\3\2\2\2J\u0128\3\2\2\2L\u012a\3\2\2\2N\u012d\3\2\2\2P\u0130\3\2"+ - "\2\2R\u0134\3\2\2\2T\u0137\3\2\2\2V\u013b\3\2\2\2X\u013d\3\2\2\2Z\u013f"+ - "\3\2\2\2\\\u0141\3\2\2\2^\u0144\3\2\2\2`\u0147\3\2\2\2b\u0149\3\2\2\2"+ - "d\u014b\3\2\2\2f\u014e\3\2\2\2h\u0151\3\2\2\2j\u0153\3\2\2\2l\u0156\3"+ - "\2\2\2n\u0159\3\2\2\2p\u015c\3\2\2\2r\u015f\3\2\2\2t\u0162\3\2\2\2v\u0165"+ - "\3\2\2\2x\u0168\3\2\2\2z\u016b\3\2\2\2|\u016f\3\2\2\2~\u0173\3\2\2\2\u0080"+ - "\u0178\3\2\2\2\u0082\u0181\3\2\2\2\u0084\u0193\3\2\2\2\u0086\u01a0\3\2"+ - "\2\2\u0088\u01b7\3\2\2\2\u008a\u01c4\3\2\2\2\u008c\u01c8\3\2\2\2\u008e"+ - "\u01cd\3\2\2\2\u0090\u01d3\3\2\2\2\u0092\u01d8\3\2\2\2\u0094\u01e7\3\2"+ - "\2\2\u0096\u01eb\3\2\2\2\u0098\u009a\t\2\2\2\u0099\u0098\3\2\2\2\u009a"+ - "\u009b\3\2\2\2\u009b\u0099\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009d\3\2"+ - "\2\2\u009d\u009e\b\2\2\2\u009e\5\3\2\2\2\u009f\u00a0\7\61\2\2\u00a0\u00a1"+ - "\7\61\2\2\u00a1\u00a5\3\2\2\2\u00a2\u00a4\13\2\2\2\u00a3\u00a2\3\2\2\2"+ - "\u00a4\u00a7\3\2\2\2\u00a5\u00a6\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00a8"+ - "\3\2\2\2\u00a7\u00a5\3\2\2\2\u00a8\u00b5\t\3\2\2\u00a9\u00aa\7\61\2\2"+ - "\u00aa\u00ab\7,\2\2\u00ab\u00af\3\2\2\2\u00ac\u00ae\13\2\2\2\u00ad\u00ac"+ - "\3\2\2\2\u00ae\u00b1\3\2\2\2\u00af\u00b0\3\2\2\2\u00af\u00ad\3\2\2\2\u00b0"+ - "\u00b2\3\2\2\2\u00b1\u00af\3\2\2\2\u00b2\u00b3\7,\2\2\u00b3\u00b5\7\61"+ - "\2\2\u00b4\u009f\3\2\2\2\u00b4\u00a9\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b6"+ - "\u00b7\b\3\2\2\u00b7\7\3\2\2\2\u00b8\u00b9\7}\2\2\u00b9\t\3\2\2\2\u00ba"+ - "\u00bb\7\177\2\2\u00bb\13\3\2\2\2\u00bc\u00bd\7]\2\2\u00bd\r\3\2\2\2\u00be"+ - "\u00bf\7_\2\2\u00bf\17\3\2\2\2\u00c0\u00c1\7*\2\2\u00c1\21\3\2\2\2\u00c2"+ - "\u00c3\7+\2\2\u00c3\23\3\2\2\2\u00c4\u00c5\7\60\2\2\u00c5\u00c6\3\2\2"+ - "\2\u00c6\u00c7\b\n\3\2\u00c7\25\3\2\2\2\u00c8\u00c9\7.\2\2\u00c9\27\3"+ - "\2\2\2\u00ca\u00cb\7=\2\2\u00cb\31\3\2\2\2\u00cc\u00cd\7k\2\2\u00cd\u00ce"+ - "\7h\2\2\u00ce\33\3\2\2\2\u00cf\u00d0\7g\2\2\u00d0\u00d1\7n\2\2\u00d1\u00d2"+ - "\7u\2\2\u00d2\u00d3\7g\2\2\u00d3\35\3\2\2\2\u00d4\u00d5\7y\2\2\u00d5\u00d6"+ - "\7j\2\2\u00d6\u00d7\7k\2\2\u00d7\u00d8\7n\2\2\u00d8\u00d9\7g\2\2\u00d9"+ - "\37\3\2\2\2\u00da\u00db\7f\2\2\u00db\u00dc\7q\2\2\u00dc!\3\2\2\2\u00dd"+ - "\u00de\7h\2\2\u00de\u00df\7q\2\2\u00df\u00e0\7t\2\2\u00e0#\3\2\2\2\u00e1"+ - "\u00e2\7e\2\2\u00e2\u00e3\7q\2\2\u00e3\u00e4\7p\2\2\u00e4\u00e5\7v\2\2"+ - "\u00e5\u00e6\7k\2\2\u00e6\u00e7\7p\2\2\u00e7\u00e8\7w\2\2\u00e8\u00e9"+ - "\7g\2\2\u00e9%\3\2\2\2\u00ea\u00eb\7d\2\2\u00eb\u00ec\7t\2\2\u00ec\u00ed"+ - "\7g\2\2\u00ed\u00ee\7c\2\2\u00ee\u00ef\7m\2\2\u00ef\'\3\2\2\2\u00f0\u00f1"+ - "\7t\2\2\u00f1\u00f2\7g\2\2\u00f2\u00f3\7v\2\2\u00f3\u00f4\7w\2\2\u00f4"+ - "\u00f5\7t\2\2\u00f5\u00f6\7p\2\2\u00f6)\3\2\2\2\u00f7\u00f8\7p\2\2\u00f8"+ - "\u00f9\7g\2\2\u00f9\u00fa\7y\2\2\u00fa+\3\2\2\2\u00fb\u00fc\7v\2\2\u00fc"+ - "\u00fd\7t\2\2\u00fd\u00fe\7{\2\2\u00fe-\3\2\2\2\u00ff\u0100\7e\2\2\u0100"+ - "\u0101\7c\2\2\u0101\u0102\7v\2\2\u0102\u0103\7e\2\2\u0103\u0104\7j\2\2"+ - "\u0104/\3\2\2\2\u0105\u0106\7v\2\2\u0106\u0107\7j\2\2\u0107\u0108\7t\2"+ - "\2\u0108\u0109\7q\2\2\u0109\u010a\7y\2\2\u010a\61\3\2\2\2\u010b\u010c"+ - "\7#\2\2\u010c\63\3\2\2\2\u010d\u010e\7\u0080\2\2\u010e\65\3\2\2\2\u010f"+ - "\u0110\7,\2\2\u0110\67\3\2\2\2\u0111\u0112\7\61\2\2\u01129\3\2\2\2\u0113"+ - "\u0114\7\'\2\2\u0114;\3\2\2\2\u0115\u0116\7-\2\2\u0116=\3\2\2\2\u0117"+ - "\u0118\7/\2\2\u0118?\3\2\2\2\u0119\u011a\7>\2\2\u011a\u011b\7>\2\2\u011b"+ - "A\3\2\2\2\u011c\u011d\7@\2\2\u011d\u011e\7@\2\2\u011eC\3\2\2\2\u011f\u0120"+ - "\7@\2\2\u0120\u0121\7@\2\2\u0121\u0122\7@\2\2\u0122E\3\2\2\2\u0123\u0124"+ - "\7>\2\2\u0124G\3\2\2\2\u0125\u0126\7>\2\2\u0126\u0127\7?\2\2\u0127I\3"+ - "\2\2\2\u0128\u0129\7@\2\2\u0129K\3\2\2\2\u012a\u012b\7@\2\2\u012b\u012c"+ - "\7?\2\2\u012cM\3\2\2\2\u012d\u012e\7?\2\2\u012e\u012f\7?\2\2\u012fO\3"+ - "\2\2\2\u0130\u0131\7?\2\2\u0131\u0132\7?\2\2\u0132\u0133\7?\2\2\u0133"+ - "Q\3\2\2\2\u0134\u0135\7#\2\2\u0135\u0136\7?\2\2\u0136S\3\2\2\2\u0137\u0138"+ - "\7#\2\2\u0138\u0139\7?\2\2\u0139\u013a\7?\2\2\u013aU\3\2\2\2\u013b\u013c"+ - "\7(\2\2\u013cW\3\2\2\2\u013d\u013e\7`\2\2\u013eY\3\2\2\2\u013f\u0140\7"+ - "~\2\2\u0140[\3\2\2\2\u0141\u0142\7(\2\2\u0142\u0143\7(\2\2\u0143]\3\2"+ - "\2\2\u0144\u0145\7~\2\2\u0145\u0146\7~\2\2\u0146_\3\2\2\2\u0147\u0148"+ - "\7A\2\2\u0148a\3\2\2\2\u0149\u014a\7<\2\2\u014ac\3\2\2\2\u014b\u014c\7"+ - "-\2\2\u014c\u014d\7-\2\2\u014de\3\2\2\2\u014e\u014f\7/\2\2\u014f\u0150"+ - "\7/\2\2\u0150g\3\2\2\2\u0151\u0152\7?\2\2\u0152i\3\2\2\2\u0153\u0154\7"+ - "-\2\2\u0154\u0155\7?\2\2\u0155k\3\2\2\2\u0156\u0157\7/\2\2\u0157\u0158"+ - "\7?\2\2\u0158m\3\2\2\2\u0159\u015a\7,\2\2\u015a\u015b\7?\2\2\u015bo\3"+ - "\2\2\2\u015c\u015d\7\61\2\2\u015d\u015e\7?\2\2\u015eq\3\2\2\2\u015f\u0160"+ - "\7\'\2\2\u0160\u0161\7?\2\2\u0161s\3\2\2\2\u0162\u0163\7(\2\2\u0163\u0164"+ - "\7?\2\2\u0164u\3\2\2\2\u0165\u0166\7`\2\2\u0166\u0167\7?\2\2\u0167w\3"+ - "\2\2\2\u0168\u0169\7~\2\2\u0169\u016a\7?\2\2\u016ay\3\2\2\2\u016b\u016c"+ - "\7>\2\2\u016c\u016d\7>\2\2\u016d\u016e\7?\2\2\u016e{\3\2\2\2\u016f\u0170"+ - "\7@\2\2\u0170\u0171\7@\2\2\u0171\u0172\7?\2\2\u0172}\3\2\2\2\u0173\u0174"+ - "\7@\2\2\u0174\u0175\7@\2\2\u0175\u0176\7@\2\2\u0176\u0177\7?\2\2\u0177"+ - "\177\3\2\2\2\u0178\u017a\7\62\2\2\u0179\u017b\t\4\2\2\u017a\u0179\3\2"+ - "\2\2\u017b\u017c\3\2\2\2\u017c\u017a\3\2\2\2\u017c\u017d\3\2\2\2\u017d"+ - "\u017f\3\2\2\2\u017e\u0180\t\5\2\2\u017f\u017e\3\2\2\2\u017f\u0180\3\2"+ - "\2\2\u0180\u0081\3\2\2\2\u0181\u0182\7\62\2\2\u0182\u0184\t\6\2\2\u0183"+ - "\u0185\t\7\2\2\u0184\u0183\3\2\2\2\u0185\u0186\3\2\2\2\u0186\u0184\3\2"+ - "\2\2\u0186\u0187\3\2\2\2\u0187\u0189\3\2\2\2\u0188\u018a\t\5\2\2\u0189"+ - "\u0188\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u0083\3\2\2\2\u018b\u0194\7\62"+ - "\2\2\u018c\u0190\t\b\2\2\u018d\u018f\t\t\2\2\u018e\u018d\3\2\2\2\u018f"+ - "\u0192\3\2\2\2\u0190\u018e\3\2\2\2\u0190\u0191\3\2\2\2\u0191\u0194\3\2"+ - "\2\2\u0192\u0190\3\2\2\2\u0193\u018b\3\2\2\2\u0193\u018c\3\2\2\2\u0194"+ - "\u0196\3\2\2\2\u0195\u0197\t\n\2\2\u0196\u0195\3\2\2\2\u0196\u0197\3\2"+ - "\2\2\u0197\u0085\3\2\2\2\u0198\u01a1\7\62\2\2\u0199\u019d\t\b\2\2\u019a"+ - "\u019c\t\t\2\2\u019b\u019a\3\2\2\2\u019c\u019f\3\2\2\2\u019d\u019b\3\2"+ - "\2\2\u019d\u019e\3\2\2\2\u019e\u01a1\3\2\2\2\u019f\u019d\3\2\2\2\u01a0"+ - "\u0198\3\2\2\2\u01a0\u0199\3\2\2\2\u01a1\u01a2\3\2\2\2\u01a2\u01a6\5\24"+ - "\n\2\u01a3\u01a5\t\t\2\2\u01a4\u01a3\3\2\2\2\u01a5\u01a8\3\2\2\2\u01a6"+ - "\u01a4\3\2\2\2\u01a6\u01a7\3\2\2\2\u01a7\u01b2\3\2\2\2\u01a8\u01a6\3\2"+ - "\2\2\u01a9\u01ab\t\13\2\2\u01aa\u01ac\t\f\2\2\u01ab\u01aa\3\2\2\2\u01ab"+ - "\u01ac\3\2\2\2\u01ac\u01ae\3\2\2\2\u01ad\u01af\t\t\2\2\u01ae\u01ad\3\2"+ - "\2\2\u01af\u01b0\3\2\2\2\u01b0\u01ae\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1"+ - "\u01b3\3\2\2\2\u01b2\u01a9\3\2\2\2\u01b2\u01b3\3\2\2\2\u01b3\u01b5\3\2"+ - "\2\2\u01b4\u01b6\t\r\2\2\u01b5\u01b4\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6"+ - "\u0087\3\2\2\2\u01b7\u01bf\7$\2\2\u01b8\u01b9\7^\2\2\u01b9\u01be\7$\2"+ - "\2\u01ba\u01bb\7^\2\2\u01bb\u01be\7^\2\2\u01bc\u01be\n\16\2\2\u01bd\u01b8"+ - "\3\2\2\2\u01bd\u01ba\3\2\2\2\u01bd\u01bc\3\2\2\2\u01be\u01c1\3\2\2\2\u01bf"+ - "\u01c0\3\2\2\2\u01bf\u01bd\3\2\2\2\u01c0\u01c2\3\2\2\2\u01c1\u01bf\3\2"+ - "\2\2\u01c2\u01c3\7$\2\2\u01c3\u0089\3\2\2\2\u01c4\u01c5\7)\2\2\u01c5\u01c6"+ - "\13\2\2\2\u01c6\u01c7\7)\2\2\u01c7\u008b\3\2\2\2\u01c8\u01c9\7v\2\2\u01c9"+ - "\u01ca\7t\2\2\u01ca\u01cb\7w\2\2\u01cb\u01cc\7g\2\2\u01cc\u008d\3\2\2"+ - "\2\u01cd\u01ce\7h\2\2\u01ce\u01cf\7c\2\2\u01cf\u01d0\7n\2\2\u01d0\u01d1"+ - "\7u\2\2\u01d1\u01d2\7g\2\2\u01d2\u008f\3\2\2\2\u01d3\u01d4\7p\2\2\u01d4"+ - "\u01d5\7w\2\2\u01d5\u01d6\7n\2\2\u01d6\u01d7\7n\2\2\u01d7\u0091\3\2\2"+ - "\2\u01d8\u01dc\t\17\2\2\u01d9\u01db\t\20\2\2\u01da\u01d9\3\2\2\2\u01db"+ - "\u01de\3\2\2\2\u01dc\u01da\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u0093\3\2"+ - "\2\2\u01de\u01dc\3\2\2\2\u01df\u01e8\7\62\2\2\u01e0\u01e4\t\b\2\2\u01e1"+ - "\u01e3\t\t\2\2\u01e2\u01e1\3\2\2\2\u01e3\u01e6\3\2\2\2\u01e4\u01e2\3\2"+ - "\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e8\3\2\2\2\u01e6\u01e4\3\2\2\2\u01e7"+ - "\u01df\3\2\2\2\u01e7\u01e0\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01ea\bJ"+ - "\4\2\u01ea\u0095\3\2\2\2\u01eb\u01ef\t\17\2\2\u01ec\u01ee\t\20\2\2\u01ed"+ - "\u01ec\3\2\2\2\u01ee\u01f1\3\2\2\2\u01ef\u01ed\3\2\2\2\u01ef\u01f0\3\2"+ - "\2\2\u01f0\u01f2\3\2\2\2\u01f1\u01ef\3\2\2\2\u01f2\u01f3\bK\4\2\u01f3"+ - "\u0097\3\2\2\2\34\2\3\u009b\u00a5\u00af\u00b4\u017c\u017f\u0186\u0189"+ - "\u0190\u0193\u0196\u019d\u01a0\u01a6\u01ab\u01b0\u01b2\u01b5\u01bd\u01bf"+ - "\u01dc\u01e4\u01e7\u01ef\5\b\2\2\4\3\2\4\2\2"; + "I\tI\4J\tJ\3\2\6\2\u0098\n\2\r\2\16\2\u0099\3\2\3\2\3\3\3\3\3\3\3\3\7"+ + "\3\u00a2\n\3\f\3\16\3\u00a5\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00ac\n\3\f\3"+ + "\16\3\u00af\13\3\3\3\3\3\5\3\u00b3\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6"+ + "\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\r"+ + "\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20"+ + "\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23"+ + "\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25"+ + "\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30"+ + "\3\30\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35"+ + "\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3#\3#\3$\3$\3$"+ + "\3%\3%\3&\3&\3&\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3*\3*\3*\3*\3+\3+\3,"+ + "\3,\3-\3-\3.\3.\3.\3/\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\62\3\63\3"+ + "\63\3\63\3\64\3\64\3\65\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\38"+ + "\38\39\39\39\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3=\3>\3>\3>\3>\3?\3?"+ + "\3?\3?\3?\3@\3@\6@\u0179\n@\r@\16@\u017a\3@\5@\u017e\n@\3A\3A\3A\6A\u0183"+ + "\nA\rA\16A\u0184\3A\5A\u0188\nA\3B\3B\3B\7B\u018d\nB\fB\16B\u0190\13B"+ + "\5B\u0192\nB\3B\5B\u0195\nB\3C\3C\3C\7C\u019a\nC\fC\16C\u019d\13C\5C\u019f"+ + "\nC\3C\3C\7C\u01a3\nC\fC\16C\u01a6\13C\3C\3C\5C\u01aa\nC\3C\6C\u01ad\n"+ + "C\rC\16C\u01ae\5C\u01b1\nC\3C\5C\u01b4\nC\3D\3D\3D\3D\3D\3D\7D\u01bc\n"+ + "D\fD\16D\u01bf\13D\3D\3D\3D\3D\3D\3D\3D\7D\u01c8\nD\fD\16D\u01cb\13D\3"+ + "D\5D\u01ce\nD\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3H\3H\7"+ + "H\u01e2\nH\fH\16H\u01e5\13H\3I\3I\3I\7I\u01ea\nI\fI\16I\u01ed\13I\5I\u01ef"+ + "\nI\3I\3I\3J\3J\7J\u01f5\nJ\fJ\16J\u01f8\13J\3J\3J\6\u00a3\u00ad\u01bd"+ + "\u01c9\2K\4\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34"+ + "\17\36\20 \21\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35"+ + ":\36<\37> @!B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66"+ + "l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008c"+ + "G\u008eH\u0090I\u0092J\u0094K\4\2\3\21\5\2\13\f\17\17\"\"\4\2\f\f\17\17"+ + "\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4"+ + "\2GGgg\4\2--//\4\2HHhh\4\2$$^^\5\2C\\aac|\6\2\62;C\\aac|\u0216\2\4\3\2"+ + "\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20"+ + "\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3\2"+ + "\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3"+ + "\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62\3"+ + "\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3"+ + "\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2"+ + "\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2"+ + "X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3"+ + "\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2"+ + "\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2"+ + "~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086\3\2"+ + "\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2\2\2"+ + "\u0090\3\2\2\2\3\u0092\3\2\2\2\3\u0094\3\2\2\2\4\u0097\3\2\2\2\6\u00b2"+ + "\3\2\2\2\b\u00b6\3\2\2\2\n\u00b8\3\2\2\2\f\u00ba\3\2\2\2\16\u00bc\3\2"+ + "\2\2\20\u00be\3\2\2\2\22\u00c0\3\2\2\2\24\u00c2\3\2\2\2\26\u00c6\3\2\2"+ + "\2\30\u00c8\3\2\2\2\32\u00ca\3\2\2\2\34\u00cd\3\2\2\2\36\u00d2\3\2\2\2"+ + " \u00d8\3\2\2\2\"\u00db\3\2\2\2$\u00df\3\2\2\2&\u00e8\3\2\2\2(\u00ee\3"+ + "\2\2\2*\u00f5\3\2\2\2,\u00f9\3\2\2\2.\u00fd\3\2\2\2\60\u0103\3\2\2\2\62"+ + "\u0109\3\2\2\2\64\u010b\3\2\2\2\66\u010d\3\2\2\28\u010f\3\2\2\2:\u0111"+ + "\3\2\2\2<\u0113\3\2\2\2>\u0115\3\2\2\2@\u0117\3\2\2\2B\u011a\3\2\2\2D"+ + "\u011d\3\2\2\2F\u0121\3\2\2\2H\u0123\3\2\2\2J\u0126\3\2\2\2L\u0128\3\2"+ + "\2\2N\u012b\3\2\2\2P\u012e\3\2\2\2R\u0132\3\2\2\2T\u0135\3\2\2\2V\u0139"+ + "\3\2\2\2X\u013b\3\2\2\2Z\u013d\3\2\2\2\\\u013f\3\2\2\2^\u0142\3\2\2\2"+ + "`\u0145\3\2\2\2b\u0147\3\2\2\2d\u0149\3\2\2\2f\u014c\3\2\2\2h\u014f\3"+ + "\2\2\2j\u0151\3\2\2\2l\u0154\3\2\2\2n\u0157\3\2\2\2p\u015a\3\2\2\2r\u015d"+ + "\3\2\2\2t\u0160\3\2\2\2v\u0163\3\2\2\2x\u0166\3\2\2\2z\u0169\3\2\2\2|"+ + "\u016d\3\2\2\2~\u0171\3\2\2\2\u0080\u0176\3\2\2\2\u0082\u017f\3\2\2\2"+ + "\u0084\u0191\3\2\2\2\u0086\u019e\3\2\2\2\u0088\u01cd\3\2\2\2\u008a\u01cf"+ + "\3\2\2\2\u008c\u01d4\3\2\2\2\u008e\u01da\3\2\2\2\u0090\u01df\3\2\2\2\u0092"+ + "\u01ee\3\2\2\2\u0094\u01f2\3\2\2\2\u0096\u0098\t\2\2\2\u0097\u0096\3\2"+ + "\2\2\u0098\u0099\3\2\2\2\u0099\u0097\3\2\2\2\u0099\u009a\3\2\2\2\u009a"+ + "\u009b\3\2\2\2\u009b\u009c\b\2\2\2\u009c\5\3\2\2\2\u009d\u009e\7\61\2"+ + "\2\u009e\u009f\7\61\2\2\u009f\u00a3\3\2\2\2\u00a0\u00a2\13\2\2\2\u00a1"+ + "\u00a0\3\2\2\2\u00a2\u00a5\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a3\u00a1\3\2"+ + "\2\2\u00a4\u00a6\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00b3\t\3\2\2\u00a7"+ + "\u00a8\7\61\2\2\u00a8\u00a9\7,\2\2\u00a9\u00ad\3\2\2\2\u00aa\u00ac\13"+ + "\2\2\2\u00ab\u00aa\3\2\2\2\u00ac\u00af\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ad"+ + "\u00ab\3\2\2\2\u00ae\u00b0\3\2\2\2\u00af\u00ad\3\2\2\2\u00b0\u00b1\7,"+ + "\2\2\u00b1\u00b3\7\61\2\2\u00b2\u009d\3\2\2\2\u00b2\u00a7\3\2\2\2\u00b3"+ + "\u00b4\3\2\2\2\u00b4\u00b5\b\3\2\2\u00b5\7\3\2\2\2\u00b6\u00b7\7}\2\2"+ + "\u00b7\t\3\2\2\2\u00b8\u00b9\7\177\2\2\u00b9\13\3\2\2\2\u00ba\u00bb\7"+ + "]\2\2\u00bb\r\3\2\2\2\u00bc\u00bd\7_\2\2\u00bd\17\3\2\2\2\u00be\u00bf"+ + "\7*\2\2\u00bf\21\3\2\2\2\u00c0\u00c1\7+\2\2\u00c1\23\3\2\2\2\u00c2\u00c3"+ + "\7\60\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00c5\b\n\3\2\u00c5\25\3\2\2\2\u00c6"+ + "\u00c7\7.\2\2\u00c7\27\3\2\2\2\u00c8\u00c9\7=\2\2\u00c9\31\3\2\2\2\u00ca"+ + "\u00cb\7k\2\2\u00cb\u00cc\7h\2\2\u00cc\33\3\2\2\2\u00cd\u00ce\7g\2\2\u00ce"+ + "\u00cf\7n\2\2\u00cf\u00d0\7u\2\2\u00d0\u00d1\7g\2\2\u00d1\35\3\2\2\2\u00d2"+ + "\u00d3\7y\2\2\u00d3\u00d4\7j\2\2\u00d4\u00d5\7k\2\2\u00d5\u00d6\7n\2\2"+ + "\u00d6\u00d7\7g\2\2\u00d7\37\3\2\2\2\u00d8\u00d9\7f\2\2\u00d9\u00da\7"+ + "q\2\2\u00da!\3\2\2\2\u00db\u00dc\7h\2\2\u00dc\u00dd\7q\2\2\u00dd\u00de"+ + "\7t\2\2\u00de#\3\2\2\2\u00df\u00e0\7e\2\2\u00e0\u00e1\7q\2\2\u00e1\u00e2"+ + "\7p\2\2\u00e2\u00e3\7v\2\2\u00e3\u00e4\7k\2\2\u00e4\u00e5\7p\2\2\u00e5"+ + "\u00e6\7w\2\2\u00e6\u00e7\7g\2\2\u00e7%\3\2\2\2\u00e8\u00e9\7d\2\2\u00e9"+ + "\u00ea\7t\2\2\u00ea\u00eb\7g\2\2\u00eb\u00ec\7c\2\2\u00ec\u00ed\7m\2\2"+ + "\u00ed\'\3\2\2\2\u00ee\u00ef\7t\2\2\u00ef\u00f0\7g\2\2\u00f0\u00f1\7v"+ + "\2\2\u00f1\u00f2\7w\2\2\u00f2\u00f3\7t\2\2\u00f3\u00f4\7p\2\2\u00f4)\3"+ + "\2\2\2\u00f5\u00f6\7p\2\2\u00f6\u00f7\7g\2\2\u00f7\u00f8\7y\2\2\u00f8"+ + "+\3\2\2\2\u00f9\u00fa\7v\2\2\u00fa\u00fb\7t\2\2\u00fb\u00fc\7{\2\2\u00fc"+ + "-\3\2\2\2\u00fd\u00fe\7e\2\2\u00fe\u00ff\7c\2\2\u00ff\u0100\7v\2\2\u0100"+ + "\u0101\7e\2\2\u0101\u0102\7j\2\2\u0102/\3\2\2\2\u0103\u0104\7v\2\2\u0104"+ + "\u0105\7j\2\2\u0105\u0106\7t\2\2\u0106\u0107\7q\2\2\u0107\u0108\7y\2\2"+ + "\u0108\61\3\2\2\2\u0109\u010a\7#\2\2\u010a\63\3\2\2\2\u010b\u010c\7\u0080"+ + "\2\2\u010c\65\3\2\2\2\u010d\u010e\7,\2\2\u010e\67\3\2\2\2\u010f\u0110"+ + "\7\61\2\2\u01109\3\2\2\2\u0111\u0112\7\'\2\2\u0112;\3\2\2\2\u0113\u0114"+ + "\7-\2\2\u0114=\3\2\2\2\u0115\u0116\7/\2\2\u0116?\3\2\2\2\u0117\u0118\7"+ + ">\2\2\u0118\u0119\7>\2\2\u0119A\3\2\2\2\u011a\u011b\7@\2\2\u011b\u011c"+ + "\7@\2\2\u011cC\3\2\2\2\u011d\u011e\7@\2\2\u011e\u011f\7@\2\2\u011f\u0120"+ + "\7@\2\2\u0120E\3\2\2\2\u0121\u0122\7>\2\2\u0122G\3\2\2\2\u0123\u0124\7"+ + ">\2\2\u0124\u0125\7?\2\2\u0125I\3\2\2\2\u0126\u0127\7@\2\2\u0127K\3\2"+ + "\2\2\u0128\u0129\7@\2\2\u0129\u012a\7?\2\2\u012aM\3\2\2\2\u012b\u012c"+ + "\7?\2\2\u012c\u012d\7?\2\2\u012dO\3\2\2\2\u012e\u012f\7?\2\2\u012f\u0130"+ + "\7?\2\2\u0130\u0131\7?\2\2\u0131Q\3\2\2\2\u0132\u0133\7#\2\2\u0133\u0134"+ + "\7?\2\2\u0134S\3\2\2\2\u0135\u0136\7#\2\2\u0136\u0137\7?\2\2\u0137\u0138"+ + "\7?\2\2\u0138U\3\2\2\2\u0139\u013a\7(\2\2\u013aW\3\2\2\2\u013b\u013c\7"+ + "`\2\2\u013cY\3\2\2\2\u013d\u013e\7~\2\2\u013e[\3\2\2\2\u013f\u0140\7("+ + "\2\2\u0140\u0141\7(\2\2\u0141]\3\2\2\2\u0142\u0143\7~\2\2\u0143\u0144"+ + "\7~\2\2\u0144_\3\2\2\2\u0145\u0146\7A\2\2\u0146a\3\2\2\2\u0147\u0148\7"+ + "<\2\2\u0148c\3\2\2\2\u0149\u014a\7-\2\2\u014a\u014b\7-\2\2\u014be\3\2"+ + "\2\2\u014c\u014d\7/\2\2\u014d\u014e\7/\2\2\u014eg\3\2\2\2\u014f\u0150"+ + "\7?\2\2\u0150i\3\2\2\2\u0151\u0152\7-\2\2\u0152\u0153\7?\2\2\u0153k\3"+ + "\2\2\2\u0154\u0155\7/\2\2\u0155\u0156\7?\2\2\u0156m\3\2\2\2\u0157\u0158"+ + "\7,\2\2\u0158\u0159\7?\2\2\u0159o\3\2\2\2\u015a\u015b\7\61\2\2\u015b\u015c"+ + "\7?\2\2\u015cq\3\2\2\2\u015d\u015e\7\'\2\2\u015e\u015f\7?\2\2\u015fs\3"+ + "\2\2\2\u0160\u0161\7(\2\2\u0161\u0162\7?\2\2\u0162u\3\2\2\2\u0163\u0164"+ + "\7`\2\2\u0164\u0165\7?\2\2\u0165w\3\2\2\2\u0166\u0167\7~\2\2\u0167\u0168"+ + "\7?\2\2\u0168y\3\2\2\2\u0169\u016a\7>\2\2\u016a\u016b\7>\2\2\u016b\u016c"+ + "\7?\2\2\u016c{\3\2\2\2\u016d\u016e\7@\2\2\u016e\u016f\7@\2\2\u016f\u0170"+ + "\7?\2\2\u0170}\3\2\2\2\u0171\u0172\7@\2\2\u0172\u0173\7@\2\2\u0173\u0174"+ + "\7@\2\2\u0174\u0175\7?\2\2\u0175\177\3\2\2\2\u0176\u0178\7\62\2\2\u0177"+ + "\u0179\t\4\2\2\u0178\u0177\3\2\2\2\u0179\u017a\3\2\2\2\u017a\u0178\3\2"+ + "\2\2\u017a\u017b\3\2\2\2\u017b\u017d\3\2\2\2\u017c\u017e\t\5\2\2\u017d"+ + "\u017c\3\2\2\2\u017d\u017e\3\2\2\2\u017e\u0081\3\2\2\2\u017f\u0180\7\62"+ + "\2\2\u0180\u0182\t\6\2\2\u0181\u0183\t\7\2\2\u0182\u0181\3\2\2\2\u0183"+ + "\u0184\3\2\2\2\u0184\u0182\3\2\2\2\u0184\u0185\3\2\2\2\u0185\u0187\3\2"+ + "\2\2\u0186\u0188\t\5\2\2\u0187\u0186\3\2\2\2\u0187\u0188\3\2\2\2\u0188"+ + "\u0083\3\2\2\2\u0189\u0192\7\62\2\2\u018a\u018e\t\b\2\2\u018b\u018d\t"+ + "\t\2\2\u018c\u018b\3\2\2\2\u018d\u0190\3\2\2\2\u018e\u018c\3\2\2\2\u018e"+ + "\u018f\3\2\2\2\u018f\u0192\3\2\2\2\u0190\u018e\3\2\2\2\u0191\u0189\3\2"+ + "\2\2\u0191\u018a\3\2\2\2\u0192\u0194\3\2\2\2\u0193\u0195\t\n\2\2\u0194"+ + "\u0193\3\2\2\2\u0194\u0195\3\2\2\2\u0195\u0085\3\2\2\2\u0196\u019f\7\62"+ + "\2\2\u0197\u019b\t\b\2\2\u0198\u019a\t\t\2\2\u0199\u0198\3\2\2\2\u019a"+ + "\u019d\3\2\2\2\u019b\u0199\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019f\3\2"+ + "\2\2\u019d\u019b\3\2\2\2\u019e\u0196\3\2\2\2\u019e\u0197\3\2\2\2\u019f"+ + "\u01a0\3\2\2\2\u01a0\u01a4\5\24\n\2\u01a1\u01a3\t\t\2\2\u01a2\u01a1\3"+ + "\2\2\2\u01a3\u01a6\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5"+ + "\u01b0\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a7\u01a9\t\13\2\2\u01a8\u01aa\t"+ + "\f\2\2\u01a9\u01a8\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa\u01ac\3\2\2\2\u01ab"+ + "\u01ad\t\t\2\2\u01ac\u01ab\3\2\2\2\u01ad\u01ae\3\2\2\2\u01ae\u01ac\3\2"+ + "\2\2\u01ae\u01af\3\2\2\2\u01af\u01b1\3\2\2\2\u01b0\u01a7\3\2\2\2\u01b0"+ + "\u01b1\3\2\2\2\u01b1\u01b3\3\2\2\2\u01b2\u01b4\t\r\2\2\u01b3\u01b2\3\2"+ + "\2\2\u01b3\u01b4\3\2\2\2\u01b4\u0087\3\2\2\2\u01b5\u01bd\7$\2\2\u01b6"+ + "\u01b7\7^\2\2\u01b7\u01bc\7$\2\2\u01b8\u01b9\7^\2\2\u01b9\u01bc\7^\2\2"+ + "\u01ba\u01bc\n\16\2\2\u01bb\u01b6\3\2\2\2\u01bb\u01b8\3\2\2\2\u01bb\u01ba"+ + "\3\2\2\2\u01bc\u01bf\3\2\2\2\u01bd\u01be\3\2\2\2\u01bd\u01bb\3\2\2\2\u01be"+ + "\u01c0\3\2\2\2\u01bf\u01bd\3\2\2\2\u01c0\u01ce\7$\2\2\u01c1\u01c9\7)\2"+ + "\2\u01c2\u01c3\7^\2\2\u01c3\u01c8\7)\2\2\u01c4\u01c5\7^\2\2\u01c5\u01c8"+ + "\7^\2\2\u01c6\u01c8\n\16\2\2\u01c7\u01c2\3\2\2\2\u01c7\u01c4\3\2\2\2\u01c7"+ + "\u01c6\3\2\2\2\u01c8\u01cb\3\2\2\2\u01c9\u01ca\3\2\2\2\u01c9\u01c7\3\2"+ + "\2\2\u01ca\u01cc\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cc\u01ce\7)\2\2\u01cd"+ + "\u01b5\3\2\2\2\u01cd\u01c1\3\2\2\2\u01ce\u0089\3\2\2\2\u01cf\u01d0\7v"+ + "\2\2\u01d0\u01d1\7t\2\2\u01d1\u01d2\7w\2\2\u01d2\u01d3\7g\2\2\u01d3\u008b"+ + "\3\2\2\2\u01d4\u01d5\7h\2\2\u01d5\u01d6\7c\2\2\u01d6\u01d7\7n\2\2\u01d7"+ + "\u01d8\7u\2\2\u01d8\u01d9\7g\2\2\u01d9\u008d\3\2\2\2\u01da\u01db\7p\2"+ + "\2\u01db\u01dc\7w\2\2\u01dc\u01dd\7n\2\2\u01dd\u01de\7n\2\2\u01de\u008f"+ + "\3\2\2\2\u01df\u01e3\t\17\2\2\u01e0\u01e2\t\20\2\2\u01e1\u01e0\3\2\2\2"+ + "\u01e2\u01e5\3\2\2\2\u01e3\u01e1\3\2\2\2\u01e3\u01e4\3\2\2\2\u01e4\u0091"+ + "\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e6\u01ef\7\62\2\2\u01e7\u01eb\t\b\2\2"+ + "\u01e8\u01ea\t\t\2\2\u01e9\u01e8\3\2\2\2\u01ea\u01ed\3\2\2\2\u01eb\u01e9"+ + "\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec\u01ef\3\2\2\2\u01ed\u01eb\3\2\2\2\u01ee"+ + "\u01e6\3\2\2\2\u01ee\u01e7\3\2\2\2\u01ef\u01f0\3\2\2\2\u01f0\u01f1\bI"+ + "\4\2\u01f1\u0093\3\2\2\2\u01f2\u01f6\t\17\2\2\u01f3\u01f5\t\20\2\2\u01f4"+ + "\u01f3\3\2\2\2\u01f5\u01f8\3\2\2\2\u01f6\u01f4\3\2\2\2\u01f6\u01f7\3\2"+ + "\2\2\u01f7\u01f9\3\2\2\2\u01f8\u01f6\3\2\2\2\u01f9\u01fa\bJ\4\2\u01fa"+ + "\u0095\3\2\2\2\37\2\3\u0099\u00a3\u00ad\u00b2\u017a\u017d\u0184\u0187"+ + "\u018e\u0191\u0194\u019b\u019e\u01a4\u01a9\u01ae\u01b0\u01b3\u01bb\u01bd"+ + "\u01c7\u01c9\u01cd\u01e3\u01eb\u01ee\u01f6\5\b\2\2\4\3\2\4\2\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java index c4499fca638..b92c9d75198 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java @@ -25,7 +25,7 @@ class PainlessParser extends Parser { BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, ALSH=60, ARSH=61, AUSH=62, OCTAL=63, HEX=64, INTEGER=65, DECIMAL=66, STRING=67, - CHAR=68, TRUE=69, FALSE=70, NULL=71, ID=72, EXTINTEGER=73, EXTID=74; + TRUE=68, FALSE=69, NULL=70, ID=71, EXTINTEGER=72, EXTID=73; public static final int RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_emptyscope = 4, RULE_initializer = 5, RULE_afterthought = 6, RULE_declaration = 7, RULE_decltype = 8, @@ -50,7 +50,7 @@ class PainlessParser extends Parser { "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, - null, null, null, null, "'true'", "'false'", "'null'" + null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", @@ -60,8 +60,8 @@ class PainlessParser extends Parser { "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", - "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", "NULL", - "ID", "EXTINTEGER", "EXTID" + "HEX", "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", + "EXTINTEGER", "EXTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -151,7 +151,7 @@ class PainlessParser extends Parser { setState(55); _errHandler.sync(this); _la = _input.LA(1); - } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0) ); + } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0) ); setState(57); match(EOF); } @@ -458,7 +458,7 @@ class PainlessParser extends Parser { match(LP); setState(88); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { { setState(87); initializer(); @@ -469,7 +469,7 @@ class PainlessParser extends Parser { match(SEMICOLON); setState(92); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { { setState(91); expression(0); @@ -480,7 +480,7 @@ class PainlessParser extends Parser { match(SEMICOLON); setState(96); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { { setState(95); afterthought(); @@ -720,7 +720,7 @@ class PainlessParser extends Parser { setState(142); _errHandler.sync(this); _la = _input.LA(1); - } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0) ); + } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0) ); setState(144); match(RBRACK); } @@ -747,7 +747,6 @@ class PainlessParser extends Parser { case INTEGER: case DECIMAL: case STRING: - case CHAR: case TRUE: case FALSE: case NULL: @@ -1541,15 +1540,6 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class CharContext extends ExpressionContext { - public TerminalNode CHAR() { return getToken(PainlessParser.CHAR, 0); } - public CharContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitChar(this); - else return visitor.visitChildren(this); - } - } public static class TrueContext extends ExpressionContext { public TerminalNode TRUE() { return getToken(PainlessParser.TRUE, 0); } public TrueContext(ExpressionContext ctx) { copyFrom(ctx); } @@ -1576,7 +1566,7 @@ class PainlessParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(237); + setState(236); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { @@ -1657,74 +1647,65 @@ class PainlessParser extends Parser { break; case 6: { - _localctx = new CharContext(_localctx); + _localctx = new TrueContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(226); - match(CHAR); + match(TRUE); } break; case 7: { - _localctx = new TrueContext(_localctx); + _localctx = new FalseContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(227); - match(TRUE); + match(FALSE); } break; case 8: { - _localctx = new FalseContext(_localctx); + _localctx = new NullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(228); - match(FALSE); + match(NULL); } break; case 9: { - _localctx = new NullContext(_localctx); + _localctx = new PostincContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(229); - match(NULL); + extstart(); + setState(230); + increment(); } break; case 10: { - _localctx = new PostincContext(_localctx); + _localctx = new PreincContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(230); - extstart(); - setState(231); + setState(232); increment(); + setState(233); + extstart(); } break; case 11: - { - _localctx = new PreincContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(233); - increment(); - setState(234); - extstart(); - } - break; - case 12: { _localctx = new ExternalContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(236); + setState(235); extstart(); } break; } _ctx.stop = _input.LT(-1); - setState(277); + setState(276); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1732,22 +1713,22 @@ class PainlessParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(275); + setState(274); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(239); + setState(238); if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(240); + setState(239); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(241); + setState(240); expression(13); } break; @@ -1755,16 +1736,16 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(242); + setState(241); if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(243); + setState(242); _la = _input.LA(1); if ( !(_la==ADD || _la==SUB) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(244); + setState(243); expression(12); } break; @@ -1772,16 +1753,16 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(245); + setState(244); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(246); + setState(245); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(247); + setState(246); expression(11); } break; @@ -1789,16 +1770,16 @@ class PainlessParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(248); + setState(247); if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(249); + setState(248); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(250); + setState(249); expression(10); } break; @@ -1806,16 +1787,16 @@ class PainlessParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(251); + setState(250); if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(252); + setState(251); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(253); + setState(252); expression(9); } break; @@ -1823,11 +1804,11 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(254); + setState(253); if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(255); + setState(254); match(BWAND); - setState(256); + setState(255); expression(8); } break; @@ -1835,11 +1816,11 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(257); + setState(256); if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); - setState(258); + setState(257); match(BWXOR); - setState(259); + setState(258); expression(7); } break; @@ -1847,11 +1828,11 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(260); + setState(259); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(261); + setState(260); match(BWOR); - setState(262); + setState(261); expression(6); } break; @@ -1859,11 +1840,11 @@ class PainlessParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(263); + setState(262); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(264); + setState(263); match(BOOLAND); - setState(265); + setState(264); expression(5); } break; @@ -1871,11 +1852,11 @@ class PainlessParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(266); + setState(265); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(267); + setState(266); match(BOOLOR); - setState(268); + setState(267); expression(4); } break; @@ -1883,22 +1864,22 @@ class PainlessParser extends Parser { { _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(269); + setState(268); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(270); + setState(269); match(COND); - setState(271); + setState(270); expression(0); - setState(272); + setState(271); match(COLON); - setState(273); + setState(272); expression(2); } break; } } } - setState(279); + setState(278); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -1946,40 +1927,40 @@ class PainlessParser extends Parser { ExtstartContext _localctx = new ExtstartContext(_ctx, getState()); enterRule(_localctx, 28, RULE_extstart); try { - setState(285); + setState(284); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(280); + setState(279); extprec(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(281); + setState(280); extcast(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(282); + setState(281); extvar(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(283); + setState(282); extnew(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(284); + setState(283); extstring(); } break; @@ -2037,54 +2018,54 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(287); + setState(286); match(LP); - setState(293); + setState(292); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(288); + setState(287); extprec(); } break; case 2: { - setState(289); + setState(288); extcast(); } break; case 3: { - setState(290); + setState(289); extvar(); } break; case 4: { - setState(291); + setState(290); extnew(); } break; case 5: { - setState(292); + setState(291); extstring(); } break; } - setState(295); + setState(294); match(RP); - setState(298); + setState(297); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(296); + setState(295); extdot(); } break; case 2: { - setState(297); + setState(296); extbrace(); } break; @@ -2140,41 +2121,41 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(300); + setState(299); match(LP); - setState(301); + setState(300); decltype(); - setState(302); + setState(301); match(RP); - setState(308); + setState(307); switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(303); + setState(302); extprec(); } break; case 2: { - setState(304); + setState(303); extcast(); } break; case 3: { - setState(305); + setState(304); extvar(); } break; case 4: { - setState(306); + setState(305); extnew(); } break; case 5: { - setState(307); + setState(306); extstring(); } break; @@ -2221,23 +2202,23 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(310); + setState(309); match(LBRACE); - setState(311); + setState(310); expression(0); - setState(312); + setState(311); match(RBRACE); - setState(315); + setState(314); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(313); + setState(312); extdot(); } break; case 2: { - setState(314); + setState(313); extbrace(); } break; @@ -2280,19 +2261,19 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(317); + setState(316); match(DOT); - setState(320); + setState(319); switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(318); + setState(317); extcall(); } break; case 2: { - setState(319); + setState(318); extfield(); } break; @@ -2338,21 +2319,21 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(322); + setState(321); match(EXTID); - setState(323); + setState(322); arguments(); - setState(326); + setState(325); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(324); + setState(323); extdot(); } break; case 2: { - setState(325); + setState(324); extbrace(); } break; @@ -2397,19 +2378,19 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(327); identifier(); - setState(331); + setState(330); switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: { - setState(329); + setState(328); extdot(); } break; case 2: { - setState(330); + setState(329); extbrace(); } break; @@ -2454,24 +2435,24 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(333); + setState(332); _la = _input.LA(1); if ( !(_la==EXTINTEGER || _la==EXTID) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(336); + setState(335); switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: { - setState(334); + setState(333); extdot(); } break; case 2: { - setState(335); + setState(334); extbrace(); } break; @@ -2532,22 +2513,22 @@ class PainlessParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(338); + setState(337); match(NEW); - setState(339); + setState(338); identifier(); - setState(355); + setState(354); switch (_input.LA(1)) { case LP: { { - setState(340); + setState(339); arguments(); - setState(342); + setState(341); switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: { - setState(341); + setState(340); extdot(); } break; @@ -2558,7 +2539,7 @@ class PainlessParser extends Parser { case LBRACE: { { - setState(348); + setState(347); _errHandler.sync(this); _alt = 1; do { @@ -2566,11 +2547,11 @@ class PainlessParser extends Parser { case 1: { { - setState(344); + setState(343); match(LBRACE); - setState(345); + setState(344); expression(0); - setState(346); + setState(345); match(RBRACE); } } @@ -2578,15 +2559,15 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(350); + setState(349); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,39,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(353); + setState(352); switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: { - setState(352); + setState(351); extdot(); } break; @@ -2635,19 +2616,19 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(357); + setState(356); match(STRING); - setState(360); + setState(359); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(358); + setState(357); extdot(); } break; case 2: { - setState(359); + setState(358); extbrace(); } break; @@ -2697,34 +2678,34 @@ class PainlessParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(362); + setState(361); match(LP); - setState(371); + setState(370); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { { - setState(363); + setState(362); expression(0); - setState(368); + setState(367); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(364); + setState(363); match(COMMA); - setState(365); + setState(364); expression(0); } } - setState(370); + setState(369); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(373); + setState(372); match(RP); } } @@ -2761,7 +2742,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(375); + setState(374); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); @@ -2817,7 +2798,7 @@ class PainlessParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3L\u017c\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3K\u017b\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -2834,40 +2815,40 @@ class PainlessParser extends Parser { "\f\3\r\3\r\5\r\u00c7\n\r\3\16\3\16\3\16\3\16\7\16\u00cd\n\16\f\16\16\16"+ "\u00d0\13\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3"+ "\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3"+ - "\17\3\17\3\17\3\17\3\17\5\17\u00f0\n\17\3\17\3\17\3\17\3\17\3\17\3\17"+ + "\17\3\17\3\17\3\17\5\17\u00ef\n\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17"+ "\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17"+ "\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17"+ - "\3\17\3\17\7\17\u0116\n\17\f\17\16\17\u0119\13\17\3\20\3\20\3\20\3\20"+ - "\3\20\5\20\u0120\n\20\3\21\3\21\3\21\3\21\3\21\3\21\5\21\u0128\n\21\3"+ - "\21\3\21\3\21\5\21\u012d\n\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+ - "\5\22\u0137\n\22\3\23\3\23\3\23\3\23\3\23\5\23\u013e\n\23\3\24\3\24\3"+ - "\24\5\24\u0143\n\24\3\25\3\25\3\25\3\25\5\25\u0149\n\25\3\26\3\26\3\26"+ - "\5\26\u014e\n\26\3\27\3\27\3\27\5\27\u0153\n\27\3\30\3\30\3\30\3\30\5"+ - "\30\u0159\n\30\3\30\3\30\3\30\3\30\6\30\u015f\n\30\r\30\16\30\u0160\3"+ - "\30\5\30\u0164\n\30\5\30\u0166\n\30\3\31\3\31\3\31\5\31\u016b\n\31\3\32"+ - "\3\32\3\32\3\32\7\32\u0171\n\32\f\32\16\32\u0174\13\32\5\32\u0176\n\32"+ - "\3\32\3\32\3\33\3\33\3\33\2\3\34\34\2\4\6\b\n\f\16\20\22\24\26\30\32\34"+ - "\36 \"$&(*,.\60\62\64\2\f\4\2\32\33\37 \3\2\65@\3\2AD\3\2\34\36\3\2\37"+ - " \3\2!#\3\2$\'\3\2(+\3\2KL\3\2\63\64\u01b9\2\67\3\2\2\2\4\u008a\3\2\2"+ - "\2\6\u0095\3\2\2\2\b\u0099\3\2\2\2\n\u009b\3\2\2\2\f\u00a0\3\2\2\2\16"+ - "\u00a2\3\2\2\2\20\u00a4\3\2\2\2\22\u00ad\3\2\2\2\24\u00b5\3\2\2\2\26\u00ba"+ - "\3\2\2\2\30\u00c4\3\2\2\2\32\u00c8\3\2\2\2\34\u00ef\3\2\2\2\36\u011f\3"+ - "\2\2\2 \u0121\3\2\2\2\"\u012e\3\2\2\2$\u0138\3\2\2\2&\u013f\3\2\2\2(\u0144"+ - "\3\2\2\2*\u014a\3\2\2\2,\u014f\3\2\2\2.\u0154\3\2\2\2\60\u0167\3\2\2\2"+ - "\62\u016c\3\2\2\2\64\u0179\3\2\2\2\668\5\4\3\2\67\66\3\2\2\289\3\2\2\2"+ - "9\67\3\2\2\29:\3\2\2\2:;\3\2\2\2;<\7\2\2\3<\3\3\2\2\2=>\7\16\2\2>?\7\t"+ - "\2\2?@\5\34\17\2@A\7\n\2\2AD\5\6\4\2BC\7\17\2\2CE\5\6\4\2DB\3\2\2\2DE"+ - "\3\2\2\2E\u008b\3\2\2\2FG\7\20\2\2GH\7\t\2\2HI\5\34\17\2IL\7\n\2\2JM\5"+ - "\6\4\2KM\5\b\5\2LJ\3\2\2\2LK\3\2\2\2M\u008b\3\2\2\2NO\7\21\2\2OP\5\6\4"+ - "\2PQ\7\20\2\2QR\7\t\2\2RS\5\34\17\2SU\7\n\2\2TV\7\r\2\2UT\3\2\2\2UV\3"+ - "\2\2\2V\u008b\3\2\2\2WX\7\22\2\2XZ\7\t\2\2Y[\5\f\7\2ZY\3\2\2\2Z[\3\2\2"+ - "\2[\\\3\2\2\2\\^\7\r\2\2]_\5\34\17\2^]\3\2\2\2^_\3\2\2\2_`\3\2\2\2`b\7"+ - "\r\2\2ac\5\16\b\2ba\3\2\2\2bc\3\2\2\2cd\3\2\2\2dg\7\n\2\2eh\5\6\4\2fh"+ - "\5\b\5\2ge\3\2\2\2gf\3\2\2\2h\u008b\3\2\2\2ik\5\20\t\2jl\7\r\2\2kj\3\2"+ - "\2\2kl\3\2\2\2l\u008b\3\2\2\2mo\7\23\2\2np\7\r\2\2on\3\2\2\2op\3\2\2\2"+ - "p\u008b\3\2\2\2qs\7\24\2\2rt\7\r\2\2sr\3\2\2\2st\3\2\2\2t\u008b\3\2\2"+ - "\2uv\7\25\2\2vx\5\34\17\2wy\7\r\2\2xw\3\2\2\2xy\3\2\2\2y\u008b\3\2\2\2"+ - "z{\7\27\2\2{}\5\6\4\2|~\5\26\f\2}|\3\2\2\2~\177\3\2\2\2\177}\3\2\2\2\177"+ + "\3\17\7\17\u0115\n\17\f\17\16\17\u0118\13\17\3\20\3\20\3\20\3\20\3\20"+ + "\5\20\u011f\n\20\3\21\3\21\3\21\3\21\3\21\3\21\5\21\u0127\n\21\3\21\3"+ + "\21\3\21\5\21\u012c\n\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\5\22"+ + "\u0136\n\22\3\23\3\23\3\23\3\23\3\23\5\23\u013d\n\23\3\24\3\24\3\24\5"+ + "\24\u0142\n\24\3\25\3\25\3\25\3\25\5\25\u0148\n\25\3\26\3\26\3\26\5\26"+ + "\u014d\n\26\3\27\3\27\3\27\5\27\u0152\n\27\3\30\3\30\3\30\3\30\5\30\u0158"+ + "\n\30\3\30\3\30\3\30\3\30\6\30\u015e\n\30\r\30\16\30\u015f\3\30\5\30\u0163"+ + "\n\30\5\30\u0165\n\30\3\31\3\31\3\31\5\31\u016a\n\31\3\32\3\32\3\32\3"+ + "\32\7\32\u0170\n\32\f\32\16\32\u0173\13\32\5\32\u0175\n\32\3\32\3\32\3"+ + "\33\3\33\3\33\2\3\34\34\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*"+ + ",.\60\62\64\2\f\4\2\32\33\37 \3\2\65@\3\2AD\3\2\34\36\3\2\37 \3\2!#\3"+ + "\2$\'\3\2(+\3\2JK\3\2\63\64\u01b7\2\67\3\2\2\2\4\u008a\3\2\2\2\6\u0095"+ + "\3\2\2\2\b\u0099\3\2\2\2\n\u009b\3\2\2\2\f\u00a0\3\2\2\2\16\u00a2\3\2"+ + "\2\2\20\u00a4\3\2\2\2\22\u00ad\3\2\2\2\24\u00b5\3\2\2\2\26\u00ba\3\2\2"+ + "\2\30\u00c4\3\2\2\2\32\u00c8\3\2\2\2\34\u00ee\3\2\2\2\36\u011e\3\2\2\2"+ + " \u0120\3\2\2\2\"\u012d\3\2\2\2$\u0137\3\2\2\2&\u013e\3\2\2\2(\u0143\3"+ + "\2\2\2*\u0149\3\2\2\2,\u014e\3\2\2\2.\u0153\3\2\2\2\60\u0166\3\2\2\2\62"+ + "\u016b\3\2\2\2\64\u0178\3\2\2\2\668\5\4\3\2\67\66\3\2\2\289\3\2\2\29\67"+ + "\3\2\2\29:\3\2\2\2:;\3\2\2\2;<\7\2\2\3<\3\3\2\2\2=>\7\16\2\2>?\7\t\2\2"+ + "?@\5\34\17\2@A\7\n\2\2AD\5\6\4\2BC\7\17\2\2CE\5\6\4\2DB\3\2\2\2DE\3\2"+ + "\2\2E\u008b\3\2\2\2FG\7\20\2\2GH\7\t\2\2HI\5\34\17\2IL\7\n\2\2JM\5\6\4"+ + "\2KM\5\b\5\2LJ\3\2\2\2LK\3\2\2\2M\u008b\3\2\2\2NO\7\21\2\2OP\5\6\4\2P"+ + "Q\7\20\2\2QR\7\t\2\2RS\5\34\17\2SU\7\n\2\2TV\7\r\2\2UT\3\2\2\2UV\3\2\2"+ + "\2V\u008b\3\2\2\2WX\7\22\2\2XZ\7\t\2\2Y[\5\f\7\2ZY\3\2\2\2Z[\3\2\2\2["+ + "\\\3\2\2\2\\^\7\r\2\2]_\5\34\17\2^]\3\2\2\2^_\3\2\2\2_`\3\2\2\2`b\7\r"+ + "\2\2ac\5\16\b\2ba\3\2\2\2bc\3\2\2\2cd\3\2\2\2dg\7\n\2\2eh\5\6\4\2fh\5"+ + "\b\5\2ge\3\2\2\2gf\3\2\2\2h\u008b\3\2\2\2ik\5\20\t\2jl\7\r\2\2kj\3\2\2"+ + "\2kl\3\2\2\2l\u008b\3\2\2\2mo\7\23\2\2np\7\r\2\2on\3\2\2\2op\3\2\2\2p"+ + "\u008b\3\2\2\2qs\7\24\2\2rt\7\r\2\2sr\3\2\2\2st\3\2\2\2t\u008b\3\2\2\2"+ + "uv\7\25\2\2vx\5\34\17\2wy\7\r\2\2xw\3\2\2\2xy\3\2\2\2y\u008b\3\2\2\2z"+ + "{\7\27\2\2{}\5\6\4\2|~\5\26\f\2}|\3\2\2\2~\177\3\2\2\2\177}\3\2\2\2\177"+ "\u0080\3\2\2\2\u0080\u008b\3\2\2\2\u0081\u0082\7\31\2\2\u0082\u0084\5"+ "\34\17\2\u0083\u0085\7\r\2\2\u0084\u0083\3\2\2\2\u0084\u0085\3\2\2\2\u0085"+ "\u008b\3\2\2\2\u0086\u0088\5\34\17\2\u0087\u0089\7\r\2\2\u0088\u0087\3"+ @@ -2892,77 +2873,77 @@ class PainlessParser extends Parser { "\2\u00b9\25\3\2\2\2\u00ba\u00bb\7\30\2\2\u00bb\u00bc\7\t\2\2\u00bc\u00bd"+ "\5\30\r\2\u00bd\u00be\5\30\r\2\u00be\u00bf\3\2\2\2\u00bf\u00c2\7\n\2\2"+ "\u00c0\u00c3\5\6\4\2\u00c1\u00c3\5\n\6\2\u00c2\u00c0\3\2\2\2\u00c2\u00c1"+ - "\3\2\2\2\u00c3\27\3\2\2\2\u00c4\u00c6\7J\2\2\u00c5\u00c7\5\32\16\2\u00c6"+ + "\3\2\2\2\u00c3\27\3\2\2\2\u00c4\u00c6\7I\2\2\u00c5\u00c7\5\32\16\2\u00c6"+ "\u00c5\3\2\2\2\u00c6\u00c7\3\2\2\2\u00c7\31\3\2\2\2\u00c8\u00c9\7$\2\2"+ "\u00c9\u00ce\5\30\r\2\u00ca\u00cb\7\f\2\2\u00cb\u00cd\5\30\r\2\u00cc\u00ca"+ "\3\2\2\2\u00cd\u00d0\3\2\2\2\u00ce\u00cc\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf"+ "\u00d1\3\2\2\2\u00d0\u00ce\3\2\2\2\u00d1\u00d2\7&\2\2\u00d2\33\3\2\2\2"+ - "\u00d3\u00d4\b\17\1\2\u00d4\u00d5\t\2\2\2\u00d5\u00f0\5\34\17\20\u00d6"+ + "\u00d3\u00d4\b\17\1\2\u00d4\u00d5\t\2\2\2\u00d5\u00ef\5\34\17\20\u00d6"+ "\u00d7\7\t\2\2\u00d7\u00d8\5\22\n\2\u00d8\u00d9\7\n\2\2\u00d9\u00da\5"+ - "\34\17\17\u00da\u00f0\3\2\2\2\u00db\u00dc\5\36\20\2\u00dc\u00dd\t\3\2"+ - "\2\u00dd\u00de\5\34\17\3\u00de\u00f0\3\2\2\2\u00df\u00e0\7\t\2\2\u00e0"+ - "\u00e1\5\34\17\2\u00e1\u00e2\7\n\2\2\u00e2\u00f0\3\2\2\2\u00e3\u00f0\t"+ - "\4\2\2\u00e4\u00f0\7F\2\2\u00e5\u00f0\7G\2\2\u00e6\u00f0\7H\2\2\u00e7"+ - "\u00f0\7I\2\2\u00e8\u00e9\5\36\20\2\u00e9\u00ea\5\64\33\2\u00ea\u00f0"+ - "\3\2\2\2\u00eb\u00ec\5\64\33\2\u00ec\u00ed\5\36\20\2\u00ed\u00f0\3\2\2"+ - "\2\u00ee\u00f0\5\36\20\2\u00ef\u00d3\3\2\2\2\u00ef\u00d6\3\2\2\2\u00ef"+ - "\u00db\3\2\2\2\u00ef\u00df\3\2\2\2\u00ef\u00e3\3\2\2\2\u00ef\u00e4\3\2"+ - "\2\2\u00ef\u00e5\3\2\2\2\u00ef\u00e6\3\2\2\2\u00ef\u00e7\3\2\2\2\u00ef"+ - "\u00e8\3\2\2\2\u00ef\u00eb\3\2\2\2\u00ef\u00ee\3\2\2\2\u00f0\u0117\3\2"+ - "\2\2\u00f1\u00f2\f\16\2\2\u00f2\u00f3\t\5\2\2\u00f3\u0116\5\34\17\17\u00f4"+ - "\u00f5\f\r\2\2\u00f5\u00f6\t\6\2\2\u00f6\u0116\5\34\17\16\u00f7\u00f8"+ - "\f\f\2\2\u00f8\u00f9\t\7\2\2\u00f9\u0116\5\34\17\r\u00fa\u00fb\f\13\2"+ - "\2\u00fb\u00fc\t\b\2\2\u00fc\u0116\5\34\17\f\u00fd\u00fe\f\n\2\2\u00fe"+ - "\u00ff\t\t\2\2\u00ff\u0116\5\34\17\13\u0100\u0101\f\t\2\2\u0101\u0102"+ - "\7,\2\2\u0102\u0116\5\34\17\n\u0103\u0104\f\b\2\2\u0104\u0105\7-\2\2\u0105"+ - "\u0116\5\34\17\t\u0106\u0107\f\7\2\2\u0107\u0108\7.\2\2\u0108\u0116\5"+ - "\34\17\b\u0109\u010a\f\6\2\2\u010a\u010b\7/\2\2\u010b\u0116\5\34\17\7"+ - "\u010c\u010d\f\5\2\2\u010d\u010e\7\60\2\2\u010e\u0116\5\34\17\6\u010f"+ - "\u0110\f\4\2\2\u0110\u0111\7\61\2\2\u0111\u0112\5\34\17\2\u0112\u0113"+ - "\7\62\2\2\u0113\u0114\5\34\17\4\u0114\u0116\3\2\2\2\u0115\u00f1\3\2\2"+ - "\2\u0115\u00f4\3\2\2\2\u0115\u00f7\3\2\2\2\u0115\u00fa\3\2\2\2\u0115\u00fd"+ - "\3\2\2\2\u0115\u0100\3\2\2\2\u0115\u0103\3\2\2\2\u0115\u0106\3\2\2\2\u0115"+ - "\u0109\3\2\2\2\u0115\u010c\3\2\2\2\u0115\u010f\3\2\2\2\u0116\u0119\3\2"+ - "\2\2\u0117\u0115\3\2\2\2\u0117\u0118\3\2\2\2\u0118\35\3\2\2\2\u0119\u0117"+ - "\3\2\2\2\u011a\u0120\5 \21\2\u011b\u0120\5\"\22\2\u011c\u0120\5*\26\2"+ - "\u011d\u0120\5.\30\2\u011e\u0120\5\60\31\2\u011f\u011a\3\2\2\2\u011f\u011b"+ - "\3\2\2\2\u011f\u011c\3\2\2\2\u011f\u011d\3\2\2\2\u011f\u011e\3\2\2\2\u0120"+ - "\37\3\2\2\2\u0121\u0127\7\t\2\2\u0122\u0128\5 \21\2\u0123\u0128\5\"\22"+ - "\2\u0124\u0128\5*\26\2\u0125\u0128\5.\30\2\u0126\u0128\5\60\31\2\u0127"+ - "\u0122\3\2\2\2\u0127\u0123\3\2\2\2\u0127\u0124\3\2\2\2\u0127\u0125\3\2"+ - "\2\2\u0127\u0126\3\2\2\2\u0128\u0129\3\2\2\2\u0129\u012c\7\n\2\2\u012a"+ - "\u012d\5&\24\2\u012b\u012d\5$\23\2\u012c\u012a\3\2\2\2\u012c\u012b\3\2"+ - "\2\2\u012c\u012d\3\2\2\2\u012d!\3\2\2\2\u012e\u012f\7\t\2\2\u012f\u0130"+ - "\5\22\n\2\u0130\u0136\7\n\2\2\u0131\u0137\5 \21\2\u0132\u0137\5\"\22\2"+ - "\u0133\u0137\5*\26\2\u0134\u0137\5.\30\2\u0135\u0137\5\60\31\2\u0136\u0131"+ - "\3\2\2\2\u0136\u0132\3\2\2\2\u0136\u0133\3\2\2\2\u0136\u0134\3\2\2\2\u0136"+ - "\u0135\3\2\2\2\u0137#\3\2\2\2\u0138\u0139\7\7\2\2\u0139\u013a\5\34\17"+ - "\2\u013a\u013d\7\b\2\2\u013b\u013e\5&\24\2\u013c\u013e\5$\23\2\u013d\u013b"+ - "\3\2\2\2\u013d\u013c\3\2\2\2\u013d\u013e\3\2\2\2\u013e%\3\2\2\2\u013f"+ - "\u0142\7\13\2\2\u0140\u0143\5(\25\2\u0141\u0143\5,\27\2\u0142\u0140\3"+ - "\2\2\2\u0142\u0141\3\2\2\2\u0143\'\3\2\2\2\u0144\u0145\7L\2\2\u0145\u0148"+ - "\5\62\32\2\u0146\u0149\5&\24\2\u0147\u0149\5$\23\2\u0148\u0146\3\2\2\2"+ - "\u0148\u0147\3\2\2\2\u0148\u0149\3\2\2\2\u0149)\3\2\2\2\u014a\u014d\5"+ - "\30\r\2\u014b\u014e\5&\24\2\u014c\u014e\5$\23\2\u014d\u014b\3\2\2\2\u014d"+ - "\u014c\3\2\2\2\u014d\u014e\3\2\2\2\u014e+\3\2\2\2\u014f\u0152\t\n\2\2"+ - "\u0150\u0153\5&\24\2\u0151\u0153\5$\23\2\u0152\u0150\3\2\2\2\u0152\u0151"+ - "\3\2\2\2\u0152\u0153\3\2\2\2\u0153-\3\2\2\2\u0154\u0155\7\26\2\2\u0155"+ - "\u0165\5\30\r\2\u0156\u0158\5\62\32\2\u0157\u0159\5&\24\2\u0158\u0157"+ - "\3\2\2\2\u0158\u0159\3\2\2\2\u0159\u0166\3\2\2\2\u015a\u015b\7\7\2\2\u015b"+ - "\u015c\5\34\17\2\u015c\u015d\7\b\2\2\u015d\u015f\3\2\2\2\u015e\u015a\3"+ - "\2\2\2\u015f\u0160\3\2\2\2\u0160\u015e\3\2\2\2\u0160\u0161\3\2\2\2\u0161"+ - "\u0163\3\2\2\2\u0162\u0164\5&\24\2\u0163\u0162\3\2\2\2\u0163\u0164\3\2"+ - "\2\2\u0164\u0166\3\2\2\2\u0165\u0156\3\2\2\2\u0165\u015e\3\2\2\2\u0166"+ - "/\3\2\2\2\u0167\u016a\7E\2\2\u0168\u016b\5&\24\2\u0169\u016b\5$\23\2\u016a"+ - "\u0168\3\2\2\2\u016a\u0169\3\2\2\2\u016a\u016b\3\2\2\2\u016b\61\3\2\2"+ - "\2\u016c\u0175\7\t\2\2\u016d\u0172\5\34\17\2\u016e\u016f\7\f\2\2\u016f"+ - "\u0171\5\34\17\2\u0170\u016e\3\2\2\2\u0171\u0174\3\2\2\2\u0172\u0170\3"+ - "\2\2\2\u0172\u0173\3\2\2\2\u0173\u0176\3\2\2\2\u0174\u0172\3\2\2\2\u0175"+ - "\u016d\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0177\3\2\2\2\u0177\u0178\7\n"+ - "\2\2\u0178\63\3\2\2\2\u0179\u017a\t\13\2\2\u017a\65\3\2\2\2/9DLUZ^bgk"+ - "osx\177\u0084\u0088\u008a\u0090\u0095\u0099\u00a0\u00aa\u00b2\u00b8\u00c2"+ - "\u00c6\u00ce\u00ef\u0115\u0117\u011f\u0127\u012c\u0136\u013d\u0142\u0148"+ - "\u014d\u0152\u0158\u0160\u0163\u0165\u016a\u0172\u0175"; + "\34\17\17\u00da\u00ef\3\2\2\2\u00db\u00dc\5\36\20\2\u00dc\u00dd\t\3\2"+ + "\2\u00dd\u00de\5\34\17\3\u00de\u00ef\3\2\2\2\u00df\u00e0\7\t\2\2\u00e0"+ + "\u00e1\5\34\17\2\u00e1\u00e2\7\n\2\2\u00e2\u00ef\3\2\2\2\u00e3\u00ef\t"+ + "\4\2\2\u00e4\u00ef\7F\2\2\u00e5\u00ef\7G\2\2\u00e6\u00ef\7H\2\2\u00e7"+ + "\u00e8\5\36\20\2\u00e8\u00e9\5\64\33\2\u00e9\u00ef\3\2\2\2\u00ea\u00eb"+ + "\5\64\33\2\u00eb\u00ec\5\36\20\2\u00ec\u00ef\3\2\2\2\u00ed\u00ef\5\36"+ + "\20\2\u00ee\u00d3\3\2\2\2\u00ee\u00d6\3\2\2\2\u00ee\u00db\3\2\2\2\u00ee"+ + "\u00df\3\2\2\2\u00ee\u00e3\3\2\2\2\u00ee\u00e4\3\2\2\2\u00ee\u00e5\3\2"+ + "\2\2\u00ee\u00e6\3\2\2\2\u00ee\u00e7\3\2\2\2\u00ee\u00ea\3\2\2\2\u00ee"+ + "\u00ed\3\2\2\2\u00ef\u0116\3\2\2\2\u00f0\u00f1\f\16\2\2\u00f1\u00f2\t"+ + "\5\2\2\u00f2\u0115\5\34\17\17\u00f3\u00f4\f\r\2\2\u00f4\u00f5\t\6\2\2"+ + "\u00f5\u0115\5\34\17\16\u00f6\u00f7\f\f\2\2\u00f7\u00f8\t\7\2\2\u00f8"+ + "\u0115\5\34\17\r\u00f9\u00fa\f\13\2\2\u00fa\u00fb\t\b\2\2\u00fb\u0115"+ + "\5\34\17\f\u00fc\u00fd\f\n\2\2\u00fd\u00fe\t\t\2\2\u00fe\u0115\5\34\17"+ + "\13\u00ff\u0100\f\t\2\2\u0100\u0101\7,\2\2\u0101\u0115\5\34\17\n\u0102"+ + "\u0103\f\b\2\2\u0103\u0104\7-\2\2\u0104\u0115\5\34\17\t\u0105\u0106\f"+ + "\7\2\2\u0106\u0107\7.\2\2\u0107\u0115\5\34\17\b\u0108\u0109\f\6\2\2\u0109"+ + "\u010a\7/\2\2\u010a\u0115\5\34\17\7\u010b\u010c\f\5\2\2\u010c\u010d\7"+ + "\60\2\2\u010d\u0115\5\34\17\6\u010e\u010f\f\4\2\2\u010f\u0110\7\61\2\2"+ + "\u0110\u0111\5\34\17\2\u0111\u0112\7\62\2\2\u0112\u0113\5\34\17\4\u0113"+ + "\u0115\3\2\2\2\u0114\u00f0\3\2\2\2\u0114\u00f3\3\2\2\2\u0114\u00f6\3\2"+ + "\2\2\u0114\u00f9\3\2\2\2\u0114\u00fc\3\2\2\2\u0114\u00ff\3\2\2\2\u0114"+ + "\u0102\3\2\2\2\u0114\u0105\3\2\2\2\u0114\u0108\3\2\2\2\u0114\u010b\3\2"+ + "\2\2\u0114\u010e\3\2\2\2\u0115\u0118\3\2\2\2\u0116\u0114\3\2\2\2\u0116"+ + "\u0117\3\2\2\2\u0117\35\3\2\2\2\u0118\u0116\3\2\2\2\u0119\u011f\5 \21"+ + "\2\u011a\u011f\5\"\22\2\u011b\u011f\5*\26\2\u011c\u011f\5.\30\2\u011d"+ + "\u011f\5\60\31\2\u011e\u0119\3\2\2\2\u011e\u011a\3\2\2\2\u011e\u011b\3"+ + "\2\2\2\u011e\u011c\3\2\2\2\u011e\u011d\3\2\2\2\u011f\37\3\2\2\2\u0120"+ + "\u0126\7\t\2\2\u0121\u0127\5 \21\2\u0122\u0127\5\"\22\2\u0123\u0127\5"+ + "*\26\2\u0124\u0127\5.\30\2\u0125\u0127\5\60\31\2\u0126\u0121\3\2\2\2\u0126"+ + "\u0122\3\2\2\2\u0126\u0123\3\2\2\2\u0126\u0124\3\2\2\2\u0126\u0125\3\2"+ + "\2\2\u0127\u0128\3\2\2\2\u0128\u012b\7\n\2\2\u0129\u012c\5&\24\2\u012a"+ + "\u012c\5$\23\2\u012b\u0129\3\2\2\2\u012b\u012a\3\2\2\2\u012b\u012c\3\2"+ + "\2\2\u012c!\3\2\2\2\u012d\u012e\7\t\2\2\u012e\u012f\5\22\n\2\u012f\u0135"+ + "\7\n\2\2\u0130\u0136\5 \21\2\u0131\u0136\5\"\22\2\u0132\u0136\5*\26\2"+ + "\u0133\u0136\5.\30\2\u0134\u0136\5\60\31\2\u0135\u0130\3\2\2\2\u0135\u0131"+ + "\3\2\2\2\u0135\u0132\3\2\2\2\u0135\u0133\3\2\2\2\u0135\u0134\3\2\2\2\u0136"+ + "#\3\2\2\2\u0137\u0138\7\7\2\2\u0138\u0139\5\34\17\2\u0139\u013c\7\b\2"+ + "\2\u013a\u013d\5&\24\2\u013b\u013d\5$\23\2\u013c\u013a\3\2\2\2\u013c\u013b"+ + "\3\2\2\2\u013c\u013d\3\2\2\2\u013d%\3\2\2\2\u013e\u0141\7\13\2\2\u013f"+ + "\u0142\5(\25\2\u0140\u0142\5,\27\2\u0141\u013f\3\2\2\2\u0141\u0140\3\2"+ + "\2\2\u0142\'\3\2\2\2\u0143\u0144\7K\2\2\u0144\u0147\5\62\32\2\u0145\u0148"+ + "\5&\24\2\u0146\u0148\5$\23\2\u0147\u0145\3\2\2\2\u0147\u0146\3\2\2\2\u0147"+ + "\u0148\3\2\2\2\u0148)\3\2\2\2\u0149\u014c\5\30\r\2\u014a\u014d\5&\24\2"+ + "\u014b\u014d\5$\23\2\u014c\u014a\3\2\2\2\u014c\u014b\3\2\2\2\u014c\u014d"+ + "\3\2\2\2\u014d+\3\2\2\2\u014e\u0151\t\n\2\2\u014f\u0152\5&\24\2\u0150"+ + "\u0152\5$\23\2\u0151\u014f\3\2\2\2\u0151\u0150\3\2\2\2\u0151\u0152\3\2"+ + "\2\2\u0152-\3\2\2\2\u0153\u0154\7\26\2\2\u0154\u0164\5\30\r\2\u0155\u0157"+ + "\5\62\32\2\u0156\u0158\5&\24\2\u0157\u0156\3\2\2\2\u0157\u0158\3\2\2\2"+ + "\u0158\u0165\3\2\2\2\u0159\u015a\7\7\2\2\u015a\u015b\5\34\17\2\u015b\u015c"+ + "\7\b\2\2\u015c\u015e\3\2\2\2\u015d\u0159\3\2\2\2\u015e\u015f\3\2\2\2\u015f"+ + "\u015d\3\2\2\2\u015f\u0160\3\2\2\2\u0160\u0162\3\2\2\2\u0161\u0163\5&"+ + "\24\2\u0162\u0161\3\2\2\2\u0162\u0163\3\2\2\2\u0163\u0165\3\2\2\2\u0164"+ + "\u0155\3\2\2\2\u0164\u015d\3\2\2\2\u0165/\3\2\2\2\u0166\u0169\7E\2\2\u0167"+ + "\u016a\5&\24\2\u0168\u016a\5$\23\2\u0169\u0167\3\2\2\2\u0169\u0168\3\2"+ + "\2\2\u0169\u016a\3\2\2\2\u016a\61\3\2\2\2\u016b\u0174\7\t\2\2\u016c\u0171"+ + "\5\34\17\2\u016d\u016e\7\f\2\2\u016e\u0170\5\34\17\2\u016f\u016d\3\2\2"+ + "\2\u0170\u0173\3\2\2\2\u0171\u016f\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0175"+ + "\3\2\2\2\u0173\u0171\3\2\2\2\u0174\u016c\3\2\2\2\u0174\u0175\3\2\2\2\u0175"+ + "\u0176\3\2\2\2\u0176\u0177\7\n\2\2\u0177\63\3\2\2\2\u0178\u0179\t\13\2"+ + "\2\u0179\65\3\2\2\2/9DLUZ^bgkosx\177\u0084\u0088\u008a\u0090\u0095\u0099"+ + "\u00a0\u00aa\u00b2\u00b8\u00c2\u00c6\u00ce\u00ee\u0114\u0116\u011e\u0126"+ + "\u012b\u0135\u013c\u0141\u0147\u014c\u0151\u0157\u015f\u0162\u0164\u0169"+ + "\u0171\u0174"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java index f1cc222edd9..824b83fe952 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java @@ -277,13 +277,6 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitBinary(PainlessParser.BinaryContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitChar(PainlessParser.CharContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java index 3873d6f1e08..915328a3f05 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java @@ -265,13 +265,6 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitBinary(PainlessParser.BinaryContext ctx); - /** - * Visit a parse tree produced by the {@code char} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitChar(PainlessParser.CharContext ctx); /** * Visit a parse tree produced by the {@code true} * labeled alternative in {@link PainlessParser#expression}. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java index 45c507b42ba..ee5a5f8cd1c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java @@ -248,6 +248,10 @@ public class Utility { return (double)value; } + public static String charToString(final char value) { + return String.valueOf(value); + } + public static boolean CharacterToboolean(final Character value) { return value != 0; } @@ -304,6 +308,10 @@ public class Utility { return value == null ? null : (double)value; } + public static String CharacterToString(final Character value) { + return value == null ? null : value.toString(); + } + public static boolean intToboolean(final int value) { return value != 0; } @@ -448,6 +456,26 @@ public class Utility { return (char)value.doubleValue(); } + public static char StringTochar(final String value) { + if (value.length() != 1) { + throw new ClassCastException("Cannot cast [String] with length greater than one to [char]."); + } + + return value.charAt(0); + } + + public static Character StringToCharacter(final String value) { + if (value == null) { + return null; + } + + if (value.length() != 1) { + throw new ClassCastException("Cannot cast [String] with length greater than one to [Character]."); + } + + return value.charAt(0); + } + // although divide by zero is guaranteed, the special overflow case is not caught. // its not needed for remainder because it is not possible there. // see https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.17.2 diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java index 3cc6a2aa22b..4490bf119b4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.PainlessParser.BinaryContext; import org.elasticsearch.painless.PainlessParser.BoolContext; import org.elasticsearch.painless.PainlessParser.BreakContext; import org.elasticsearch.painless.PainlessParser.CastContext; -import org.elasticsearch.painless.PainlessParser.CharContext; import org.elasticsearch.painless.PainlessParser.CompContext; import org.elasticsearch.painless.PainlessParser.ConditionalContext; import org.elasticsearch.painless.PainlessParser.ContinueContext; @@ -354,13 +353,6 @@ class Writer extends PainlessParserBaseVisitor { return null; } - @Override - public Void visitChar(final CharContext ctx) { - expression.processChar(ctx); - - return null; - } - @Override public Void visitTrue(final TrueContext ctx) { expression.processTrue(ctx); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java index c850031efa0..7d10a275690 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterExpression.java @@ -26,7 +26,6 @@ import org.elasticsearch.painless.PainlessParser.AssignmentContext; import org.elasticsearch.painless.PainlessParser.BinaryContext; import org.elasticsearch.painless.PainlessParser.BoolContext; import org.elasticsearch.painless.PainlessParser.CastContext; -import org.elasticsearch.painless.PainlessParser.CharContext; import org.elasticsearch.painless.PainlessParser.CompContext; import org.elasticsearch.painless.PainlessParser.ConditionalContext; import org.elasticsearch.painless.PainlessParser.ExpressionContext; @@ -104,20 +103,6 @@ class WriterExpression { utility.checkWriteBranch(ctx); } - void processChar(final CharContext ctx) { - final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); - final Object postConst = charemd.postConst; - - if (postConst == null) { - utility.writeNumeric(ctx, (int)(char)charemd.preConst); - caster.checkWriteCast(charemd); - } else { - utility.writeConstant(ctx, postConst); - } - - utility.checkWriteBranch(ctx); - } - void processTrue(final TrueContext ctx) { final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); final Object postConst = trueemd.postConst; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index ee3607242ca..d2a24ef9533 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -38,7 +38,7 @@ public class BasicExpressionTests extends ScriptTestCase { } public void testReturnConstantChar() { - assertEquals('x', exec("return 'x';")); + assertEquals('x', exec("return (char)'x';")); } public void testConstantCharTruncation() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java index 607da9f0f32..8043d9da915 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java @@ -131,51 +131,51 @@ public class EqualsTests extends ScriptTestCase { } public void testBranchEquals() { - assertEquals(0, exec("Character a = 'a'; Character b = 'b'; if (a == b) return 1; else return 0;")); - assertEquals(1, exec("Character a = 'a'; Character b = 'a'; if (a == b) return 1; else return 0;")); + assertEquals(0, exec("Character a = (char)'a'; Character b = (char)'b'; if (a == b) return 1; else return 0;")); + assertEquals(1, exec("Character a = (char)'a'; Character b = (char)'a'; if (a == b) return 1; else return 0;")); assertEquals(0, exec("Integer a = new Integer(1); Integer b = 1; if (a === b) return 1; else return 0;")); - assertEquals(0, exec("Character a = 'a'; Character b = new Character('a'); if (a === b) return 1; else return 0;")); - assertEquals(1, exec("Character a = 'a'; Object b = a; if (a === b) return 1; else return 0;")); + assertEquals(0, exec("Character a = (char)'a'; Character b = new Character((char)'a'); if (a === b) return 1; else return 0;")); + assertEquals(1, exec("Character a = (char)'a'; Object b = a; if (a === b) return 1; else return 0;")); assertEquals(1, exec("Integer a = 1; Number b = a; Number c = a; if (c === b) return 1; else return 0;")); - assertEquals(0, exec("Integer a = 1; Character b = 'a'; if (a === (Object)b) return 1; else return 0;")); + assertEquals(0, exec("Integer a = 1; Character b = (char)'a'; if (a === (Object)b) return 1; else return 0;")); } public void testBranchNotEquals() { - assertEquals(1, exec("Character a = 'a'; Character b = 'b'; if (a != b) return 1; else return 0;")); - assertEquals(0, exec("Character a = 'a'; Character b = 'a'; if (a != b) return 1; else return 0;")); + assertEquals(1, exec("Character a = (char)'a'; Character b = (char)'b'; if (a != b) return 1; else return 0;")); + assertEquals(0, exec("Character a = (char)'a'; Character b = (char)'a'; if (a != b) return 1; else return 0;")); assertEquals(1, exec("Integer a = new Integer(1); Integer b = 1; if (a !== b) return 1; else return 0;")); - assertEquals(1, exec("Character a = 'a'; Character b = new Character('a'); if (a !== b) return 1; else return 0;")); - assertEquals(0, exec("Character a = 'a'; Object b = a; if (a !== b) return 1; else return 0;")); + assertEquals(1, exec("Character a = (char)'a'; Character b = new Character((char)'a'); if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("Character a = (char)'a'; Object b = a; if (a !== b) return 1; else return 0;")); assertEquals(0, exec("Integer a = 1; Number b = a; Number c = a; if (c !== b) return 1; else return 0;")); - assertEquals(1, exec("Integer a = 1; Character b = 'a'; if (a !== (Object)b) return 1; else return 0;")); + assertEquals(1, exec("Integer a = 1; Character b = (char)'a'; if (a !== (Object)b) return 1; else return 0;")); } public void testRightHandNull() { - assertEquals(false, exec("Character a = 'a'; return a == null;")); - assertEquals(false, exec("Character a = 'a'; return a === null;")); - assertEquals(true, exec("Character a = 'a'; return a != null;")); - assertEquals(true, exec("Character a = 'a'; return a !== null;")); + assertEquals(false, exec("Character a = (char)'a'; return a == null;")); + assertEquals(false, exec("Character a = (char)'a'; return a === null;")); + assertEquals(true, exec("Character a = (char)'a'; return a != null;")); + assertEquals(true, exec("Character a = (char)'a'; return a !== null;")); assertEquals(true, exec("Character a = null; return a == null;")); assertEquals(false, exec("Character a = null; return a != null;")); - assertEquals(false, exec("Character a = 'a'; Character b = null; return a == b;")); + assertEquals(false, exec("Character a = (char)'a'; Character b = null; return a == b;")); assertEquals(true, exec("Character a = null; Character b = null; return a === b;")); - assertEquals(true, exec("Character a = 'a'; Character b = null; return a != b;")); + assertEquals(true, exec("Character a = (char)'a'; Character b = null; return a != b;")); assertEquals(false, exec("Character a = null; Character b = null; return a !== b;")); assertEquals(false, exec("Integer x = null; double y = 2.0; return x == y;")); assertEquals(true, exec("Integer x = null; Short y = null; return x == y;")); } public void testLeftHandNull() { - assertEquals(false, exec("Character a = 'a'; return null == a;")); - assertEquals(false, exec("Character a = 'a'; return null === a;")); - assertEquals(true, exec("Character a = 'a'; return null != a;")); - assertEquals(true, exec("Character a = 'a'; return null !== a;")); + assertEquals(false, exec("Character a = (char)'a'; return null == a;")); + assertEquals(false, exec("Character a = (char)'a'; return null === a;")); + assertEquals(true, exec("Character a = (char)'a'; return null != a;")); + assertEquals(true, exec("Character a = (char)'a'; return null !== a;")); assertEquals(true, exec("Character a = null; return null == a;")); assertEquals(false, exec("Character a = null; return null != a;")); - assertEquals(false, exec("Character a = null; Character b = 'a'; return a == b;")); + assertEquals(false, exec("Character a = null; Character b = (char)'a'; return a == b;")); assertEquals(true, exec("Character a = null; Character b = null; return a == b;")); assertEquals(true, exec("Character a = null; Character b = null; return b === a;")); - assertEquals(true, exec("Character a = null; Character b = 'a'; return a != b;")); + assertEquals(true, exec("Character a = null; Character b = (char)'a'; return a != b;")); assertEquals(false, exec("Character a = null; Character b = null; return b != a;")); assertEquals(false, exec("Character a = null; Character b = null; return b !== a;")); assertEquals(false, exec("Integer x = null; double y = 2.0; return y == x;")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java index 7d489332f92..f619d44ad81 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java @@ -41,6 +41,26 @@ public class StringTests extends ScriptTestCase { assertEquals("cat" + 2.0, exec("String s = \"cat\"; return s + 2.0;")); // String assertEquals("cat" + "cat", exec("String s = \"cat\"; return s + s;")); + + // boolean + assertEquals("cat" + true, exec("String s = 'cat'; return s + true;")); + // byte + assertEquals("cat" + (byte)3, exec("String s = 'cat'; return s + (byte)3;")); + // short + assertEquals("cat" + (short)3, exec("String s = 'cat'; return s + (short)3;")); + // char + assertEquals("cat" + 't', exec("String s = 'cat'; return s + 't';")); + assertEquals("cat" + (char)40, exec("String s = 'cat'; return s + (char)40;")); + // int + assertEquals("cat" + 2, exec("String s = 'cat'; return s + 2;")); + // long + assertEquals("cat" + 2L, exec("String s = 'cat'; return s + 2L;")); + // float + assertEquals("cat" + 2F, exec("String s = 'cat'; return s + 2F;")); + // double + assertEquals("cat" + 2.0, exec("String s = 'cat'; return s + 2.0;")); + // String + assertEquals("cat" + "cat", exec("String s = 'cat'; return s + s;")); } public void testStringAPI() { @@ -71,5 +91,99 @@ public class StringTests extends ScriptTestCase { assertEquals("e", exec("return \"abcde\".substring(4, 5);")); assertEquals(97, ((char[])exec("return \"a\".toCharArray();"))[0]); assertEquals("a", exec("return \" a \".trim();")); + + assertEquals("", exec("return new String();")); + assertEquals('x', exec("String s = 'x'; return s.charAt(0);")); + assertEquals(120, exec("String s = 'x'; return s.codePointAt(0);")); + assertEquals(0, exec("String s = 'x'; return s.compareTo('x');")); + assertEquals("xx", exec("String s = 'x'; return s.concat('x');")); + assertEquals(true, exec("String s = 'xy'; return s.endsWith('y');")); + assertEquals(2, exec("String t = 'abcde'; return t.indexOf('cd', 1);")); + assertEquals(false, exec("String t = 'abcde'; return t.isEmpty();")); + assertEquals(5, exec("String t = 'abcde'; return t.length();")); + assertEquals("cdcde", exec("String t = 'abcde'; return t.replace('ab', 'cd');")); + assertEquals(false, exec("String s = 'xy'; return s.startsWith('y');")); + assertEquals("e", exec("String t = 'abcde'; return t.substring(4, 5);")); + assertEquals(97, ((char[])exec("String s = 'a'; return s.toCharArray();"))[0]); + assertEquals("a", exec("String s = ' a '; return s.trim();")); + assertEquals('x', exec("return 'x'.charAt(0);")); + assertEquals(120, exec("return 'x'.codePointAt(0);")); + assertEquals(0, exec("return 'x'.compareTo('x');")); + assertEquals("xx", exec("return 'x'.concat('x');")); + assertEquals(true, exec("return 'xy'.endsWith('y');")); + assertEquals(2, exec("return 'abcde'.indexOf('cd', 1);")); + assertEquals(false, exec("return 'abcde'.isEmpty();")); + assertEquals(5, exec("return 'abcde'.length();")); + assertEquals("cdcde", exec("return 'abcde'.replace('ab', 'cd');")); + assertEquals(false, exec("return 'xy'.startsWith('y');")); + assertEquals("e", exec("return 'abcde'.substring(4, 5);")); + assertEquals(97, ((char[])exec("return 'a'.toCharArray();"))[0]); + assertEquals("a", exec("return ' a '.trim();")); + } + + public void testStringAndCharacter() { + assertEquals('c', exec("return (char)\"c\"")); + assertEquals('c', exec("return (char)'c'")); + assertEquals("c", exec("return (String)(char)\"c\"")); + assertEquals("c", exec("return (String)(char)'c'")); + + assertEquals('c', exec("String s = \"c\" (char)s")); + assertEquals('c', exec("String s = 'c' (char)s")); + + try { + assertEquals("cc", exec("return (String)(char)\"cc\"")); + } catch (final IllegalArgumentException ise) { + ise.getMessage().contains("Cannot cast constant from [String] to [char]."); + } + + try { + assertEquals("cc", exec("return (String)(char)'cc'")); + } catch (final IllegalArgumentException ise) { + ise.getMessage().contains("Cannot cast constant from [String] to [char]."); + } + + try { + assertEquals('c', exec("String s = \"cc\" (char)s")); + } catch (final ClassCastException cce) { + cce.getMessage().contains("Cannot cast [String] with length greater than one to [char]."); + } + + try { + assertEquals('c', exec("String s = 'cc' (char)s")); + } catch (final ClassCastException cce) { + cce.getMessage().contains("Cannot cast [String] with length greater than one to [char]."); + } + + assertEquals('c', exec("return (Character)\"c\"")); + assertEquals('c', exec("return (Character)'c'")); + assertEquals("c", exec("return (String)(Character)\"c\"")); + assertEquals("c", exec("return (String)(Character)'c'")); + + assertEquals('c', exec("String s = \"c\" (Character)s")); + assertEquals('c', exec("String s = 'c' (Character)s")); + + try { + assertEquals("cc", exec("return (String)(Character)\"cc\"")); + } catch (final ClassCastException ise) { + ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."); + } + + try { + assertEquals("cc", exec("return (String)(Character)'cc'")); + } catch (final ClassCastException ise) { + ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."); + } + + try { + assertEquals('c', exec("String s = \"cc\" (Character)s")); + } catch (final ClassCastException cce) { + cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."); + } + + try { + assertEquals('c', exec("String s = 'cc' (Character)s")); + } catch (final ClassCastException cce) { + cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."); + } } } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml index 66cff5df792..d53306b5d47 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml @@ -28,7 +28,7 @@ setup: script_fields: bar: script: - inline: "input.doc.foo.0 + input.x;" + inline: "input.doc['foo'].0 + input.x;" lang: painless params: x: "bbb" diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml index da28a1f4201..2dd1a6004ff 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml @@ -29,12 +29,12 @@ query: script: script: - inline: "input.doc.num1.0 > 1;" + inline: "input.doc['num1'].0 > 1;" lang: painless script_fields: sNum1: script: - inline: "input.doc.num1.0;" + inline: "input.doc['num1'].0;" lang: painless sort: num1: @@ -51,7 +51,7 @@ query: script: script: - inline: "input.doc.num1.0 > input.param1;" + inline: "input.doc['num1'].0 > input.param1;" lang: painless params: param1: 1 @@ -59,7 +59,7 @@ script_fields: sNum1: script: - inline: "return input.doc.num1.0;" + inline: "return input.doc['num1'].0;" lang: painless sort: num1: @@ -76,7 +76,7 @@ query: script: script: - inline: "input.doc.num1.0 > input.param1;" + inline: "input.doc['num1'].0 > input.param1;" lang: painless params: param1: -1 @@ -84,7 +84,7 @@ script_fields: sNum1: script: - inline: "input.doc.num1.0;" + inline: "input.doc['num1'].0;" lang: painless sort: num1: From 4b1c116461f91fd365eefc697fcaad6f27786321 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 29 Apr 2016 10:42:03 -0400 Subject: [PATCH 0084/1311] Generate and run tests from the docs Adds infrastructure so `gradle :docs:check` will extract tests from snippets in the documentation and execute the tests. This is included in `gradle check` so it should happen on CI and during a normal build. By default each `// AUTOSENSE` snippet creates a unique REST test. These tests are executed in a random order and the cluster is wiped between each one. If multiple snippets chain together into a test you can annotate all snippets after the first with `// TEST[continued]` to have the generated tests for both snippets joined. Snippets marked as `// TESTRESPONSE` are checked against the response of the last action. See docs/README.asciidoc for lots more. Closes #12583. That issue is about catching bugs in the docs during build. This catches *some* bugs in the docs during build which is a good start. --- .../gradle/RestTestsFromSnippetsTask.groovy | 214 +++++++++++++ .../elasticsearch/gradle/SnippetsTask.groovy | 299 ++++++++++++++++++ .../index/mapper/MapperService.java | 2 +- docs/README.asciidoc | 45 ++- docs/build.gradle | 84 +++++ .../analyzers/pattern-analyzer.asciidoc | 23 +- docs/reference/cluster/tasks.asciidoc | 21 +- docs/reference/docs/index_.asciidoc | 99 ++++-- docs/reference/docs/reindex.asciidoc | 83 +++-- docs/reference/docs/update-by-query.asciidoc | 64 +++- .../index-modules/allocation/delayed.asciidoc | 7 +- .../allocation/filtering.asciidoc | 5 + .../allocation/prioritization.asciidoc | 1 + docs/reference/indices/analyze.asciidoc | 3 +- docs/reference/indices/flush.asciidoc | 32 +- docs/reference/indices/put-mapping.asciidoc | 22 +- docs/reference/indices/templates.asciidoc | 2 +- docs/reference/ingest.asciidoc | 7 +- docs/reference/ingest/ingest-node.asciidoc | 47 +-- docs/reference/mapping.asciidoc | 1 + .../mapping/dynamic-mapping.asciidoc | 5 +- .../mapping/dynamic/templates.asciidoc | 16 +- .../mapping/fields/all-field.asciidoc | 7 +- .../mapping/fields/field-names-field.asciidoc | 16 +- .../mapping/fields/parent-field.asciidoc | 16 +- .../mapping/fields/routing-field.asciidoc | 28 +- .../mapping/fields/timestamp-field.asciidoc | 1 + .../mapping/fields/type-field.asciidoc | 29 +- docs/reference/mapping/params.asciidoc | 7 - .../mapping/params/analyzer.asciidoc | 15 +- docs/reference/mapping/params/boost.asciidoc | 21 +- docs/reference/mapping/params/coerce.asciidoc | 13 +- .../reference/mapping/params/copy-to.asciidoc | 6 +- .../reference/mapping/params/dynamic.asciidoc | 21 +- .../mapping/params/fielddata.asciidoc | 13 +- .../mapping/params/ignore-malformed.asciidoc | 12 +- .../mapping/params/include-in-all.asciidoc | 2 +- .../mapping/params/multi-fields.asciidoc | 9 +- docs/reference/mapping/params/norms.asciidoc | 3 +- .../params/position-increment-gap.asciidoc | 10 +- .../mapping/params/properties.asciidoc | 3 +- .../mapping/params/search-analyzer.asciidoc | 4 +- docs/reference/mapping/params/store.asciidoc | 5 +- docs/reference/mapping/types/date.asciidoc | 3 +- docs/reference/mapping/types/nested.asciidoc | 3 +- .../migration/migrate_5_0/mapping.asciidoc | 3 +- .../cluster/allocation_filtering.asciidoc | 4 +- .../modules/cluster/disk_allocator.asciidoc | 3 +- docs/reference/modules/node.asciidoc | 2 +- .../modules/scripting/painless.asciidoc | 37 ++- .../modules/scripting/using.asciidoc | 27 +- docs/reference/query-dsl/bool-query.asciidoc | 60 ++-- .../query-dsl/function-score-query.asciidoc | 3 +- docs/reference/query-dsl/term-query.asciidoc | 11 +- docs/reference/setup/cluster_restart.asciidoc | 8 +- docs/reference/setup/rolling_upgrade.asciidoc | 8 +- .../smoketest/SmokeTestDocsIT.java | 53 ++++ settings.gradle | 1 + .../test/rest/RestTestExecutionContext.java | 2 +- .../org/elasticsearch/test/rest/Stash.java | 27 +- .../test/rest/client/RestClient.java | 16 + .../test/rest/json/JsonPath.java | 6 +- .../rest/parser/RestTestSectionParser.java | 28 +- .../parser/RestTestSuiteParseContext.java | 30 +- .../rest/section/ResponseBodyAssertion.java | 167 ++++++++++ 65 files changed, 1385 insertions(+), 440 deletions(-) create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/SnippetsTask.groovy create mode 100644 docs/build.gradle create mode 100644 docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java create mode 100644 test/framework/src/main/java/org/elasticsearch/test/rest/section/ResponseBodyAssertion.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy new file mode 100644 index 00000000000..c4201a01f26 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle + +import org.elasticsearch.gradle.SnippetsTask.Snippet +import org.gradle.api.InvalidUserDataException +import org.gradle.api.tasks.Input +import org.gradle.api.tasks.OutputDirectory + +import java.nio.file.Files +import java.nio.file.Path +import java.util.regex.Matcher + +/** + * Generates REST tests for each snippet marked // TEST. + */ +public class RestTestsFromSnippetsTask extends SnippetsTask { + @Input + Map setups = new HashMap() + + /** + * Root directory of the tests being generated. To make rest tests happy + * we generate them in a testRoot() which is contained in this directory. + */ + @OutputDirectory + File testRoot = project.file('build/rest') + + public RestTestsFromSnippetsTask() { + project.afterEvaluate { + // Wait to set this so testRoot can be customized + project.sourceSets.test.output.dir(testRoot, builtBy: this) + } + TestBuilder builder = new TestBuilder() + doFirst { outputRoot().delete() } + perSnippet builder.&handleSnippet + doLast builder.&finishLastTest + } + + /** + * Root directory containing all the files generated by this task. It is + * contained withing testRoot. + */ + File outputRoot() { + return new File(testRoot, '/rest-api-spec/test') + } + + private class TestBuilder { + private static final String SYNTAX = { + String method = /(?GET|PUT|POST|HEAD|OPTIONS|DELETE)/ + String pathAndQuery = /(?[^\n]+)/ + String badBody = /GET|PUT|POST|HEAD|OPTIONS|DELETE|#/ + String body = /(?(?:\n(?!$badBody)[^\n]+)+)/ + String nonComment = /$method\s+$pathAndQuery$body?/ + String comment = /(?#.+)/ + /(?:$comment|$nonComment)\n+/ + }() + + /** + * The file in which we saw the last snippet that made a test. + */ + Path lastDocsPath + + /** + * The file we're building. + */ + PrintWriter current + + /** + * Called each time a snippet is encountered. Tracks the snippets and + * calls buildTest to actually build the test. + */ + void handleSnippet(Snippet snippet) { + if (snippet.testSetup) { + setup(snippet) + return + } + if (snippet.testResponse) { + response(snippet) + return + } + if (snippet.test || snippet.autoSense) { + test(snippet) + return + } + // Must be an unmarked snippet.... + } + + private void test(Snippet test) { + setupCurrent(test) + + if (false == test.continued) { + current.println('---') + current.println("\"$test.start\":") + } + if (test.skipTest) { + current.println(" - skip:") + current.println(" features: always_skip") + current.println(" reason: $test.skipTest") + } + if (test.setup != null) { + String setup = setups[test.setup] + if (setup == null) { + throw new InvalidUserDataException("Couldn't find setup " + + "for $test") + } + current.println(setup) + } + + body(test) + } + + private void response(Snippet response) { + current.println(" - response_body: |") + response.contents.eachLine { current.println(" $it") } + } + + void emitDo(String method, String pathAndQuery, + String body, String catchPart) { + def (String path, String query) = pathAndQuery.tokenize('?') + current.println(" - do:") + if (catchPart != null) { + current.println(" catch: $catchPart") + } + current.println(" raw:") + current.println(" method: $method") + current.println(" path: \"$path\"") + if (query != null) { + for (String param: query.tokenize('&')) { + def (String name, String value) = param.tokenize('=') + current.println(" $name: \"$value\"") + } + } + if (body != null) { + // Throw out the leading newline we get from parsing the body + body = body.substring(1) + current.println(" body: |") + body.eachLine { current.println(" $it") } + } + } + + private void setup(Snippet setup) { + if (lastDocsPath == setup.path) { + throw new InvalidUserDataException("$setup: wasn't first") + } + setupCurrent(setup) + current.println('---') + current.println("setup:") + body(setup) + } + + private void body(Snippet snippet) { + parse("$snippet", snippet.contents, SYNTAX) { matcher, last -> + if (matcher.group("comment") != null) { + // Comment + return + } + String method = matcher.group("method") + String pathAndQuery = matcher.group("pathAndQuery") + String body = matcher.group("body") + String catchPart = last ? snippet.catchPart : null + if (pathAndQuery.startsWith('/')) { + // Why not do some light linting while we're here? + throw new InvalidUserDataException( + "Path shouldn't start with a '/': $snippet\n" + + snippet.contents) + } + emitDo(method, pathAndQuery, body, catchPart) + } + } + + private PrintWriter setupCurrent(Snippet test) { + if (lastDocsPath == test.path) { + return + } + finishLastTest() + lastDocsPath = test.path + + // Make the destination file: + // Shift the path into the destination directory tree + Path dest = outputRoot().toPath().resolve(test.path) + // Replace the extension + String fileName = dest.getName(dest.nameCount - 1) + dest = dest.parent.resolve(fileName.replace('.asciidoc', '.yaml')) + + // Now setup the writer + Files.createDirectories(dest.parent) + current = dest.newPrintWriter() + } + + void finishLastTest() { + if (current != null) { + current.close() + current = null + } + } + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/SnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/SnippetsTask.groovy new file mode 100644 index 00000000000..73160cde7f1 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/SnippetsTask.groovy @@ -0,0 +1,299 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle + +import org.gradle.api.DefaultTask +import org.gradle.api.InvalidUserDataException +import org.gradle.api.file.ConfigurableFileTree +import org.gradle.api.tasks.InputFiles +import org.gradle.api.tasks.TaskAction + +import java.nio.file.Path +import java.util.regex.Matcher + +/** + * A task which will run a closure on each snippet in the documentation. + */ +public class SnippetsTask extends DefaultTask { + private static final String SCHAR = /(?:\\\/|[^\/])/ + private static final String SUBSTITUTION = /s\/($SCHAR+)\/($SCHAR*)\// + private static final String CATCH = /catch:\s*((?:\/[^\/]+\/)|[^ \]]+)/ + private static final String SKIP = /skip:([^\]]+)/ + private static final String SETUP = /setup:([^ \]]+)/ + private static final String TEST_SYNTAX = + /(?:$CATCH|$SUBSTITUTION|$SKIP|(continued)|$SETUP) ?/ + + /** + * Action to take on each snippet. Called with a single parameter, an + * instance of Snippet. + */ + Closure perSnippet + + /** + * The docs to scan. Defaults to every file in the directory exception the + * build.gradle file because that is appropriate for Elasticsearch's docs + * directory. + */ + @InputFiles + ConfigurableFileTree docs = project.fileTree(project.projectDir) { + // No snippets in the build file + exclude 'build.gradle' + } + + @TaskAction + public void executeTask() { + /* + * Walks each line of each file, building snippets as it encounters + * the lines that make up the snippet. + */ + for (File file: docs) { + String lastLanguage + int lastLanguageLine + Snippet snippet = null + StringBuilder contents = null + List substitutions = null + Closure emit = { + snippet.contents = contents.toString() + contents = null + if (substitutions != null) { + substitutions.each { String pattern, String subst -> + /* + * $body is really common but it looks like a + * backreference so we just escape it here to make the + * tests cleaner. + */ + subst = subst.replace('$body', '\\$body') + // \n is a new line.... + subst = subst.replace('\\n', '\n') + snippet.contents = snippet.contents.replaceAll( + pattern, subst) + } + substitutions = null + } + perSnippet(snippet) + snippet = null + } + file.eachLine('UTF-8') { String line, int lineNumber -> + Matcher matcher + if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet + if (snippet == null) { + Path path = docs.dir.toPath().relativize(file.toPath()) + snippet = new Snippet(path: path, start: lineNumber) + if (lastLanguageLine == lineNumber - 1) { + snippet.language = lastLanguage + } + } else { + snippet.end = lineNumber + } + return + } + matcher = line =~ /\[source,(\w+)]\s*/ + if (matcher.matches()) { + lastLanguage = matcher.group(1) + lastLanguageLine = lineNumber + return + } + if (line ==~ /\/\/ AUTOSENSE\s*/) { + if (snippet == null) { + throw new InvalidUserDataException("AUTOSENSE not " + + "paired with a snippet at $file:$lineNumber") + } + snippet.autoSense = true + return + } + matcher = line =~ /\/\/ TEST(\[(.+)\])?\s*/ + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException("TEST not " + + "paired with a snippet at $file:$lineNumber") + } + snippet.test = true + if (matcher.group(2) != null) { + String loc = "$file:$lineNumber" + parse(loc, matcher.group(2), TEST_SYNTAX) { + if (it.group(1) != null) { + snippet.catchPart = it.group(1) + return + } + if (it.group(2) != null) { + if (substitutions == null) { + substitutions = [] + } + substitutions.add([it.group(2), it.group(3)]) + return + } + if (it.group(4) != null) { + snippet.skipTest = it.group(4) + return + } + if (it.group(5) != null) { + snippet.continued = true + return + } + if (it.group(6) != null) { + snippet.setup = it.group(6) + return + } + throw new InvalidUserDataException( + "Invalid test marker: $line") + } + } + return + } + matcher = line =~ /\/\/ TESTRESPONSE(\[(.+)\])?\s*/ + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException("TESTRESPONSE not " + + "paired with a snippet at $file:$lineNumber") + } + snippet.testResponse = true + if (matcher.group(2) != null) { + substitutions = [] + String loc = "$file:$lineNumber" + parse(loc, matcher.group(2), /$SUBSTITUTION ?/) { + substitutions.add([it.group(1), it.group(2)]) + } + } + return + } + if (line ==~ /\/\/ TESTSETUP\s*/) { + snippet.testSetup = true + return + } + if (snippet == null) { + // Outside + return + } + if (snippet.end == Snippet.NOT_FINISHED) { + // Inside + if (contents == null) { + contents = new StringBuilder() + } + // We don't need the annotations + line = line.replaceAll(/<\d+>/, '') + // Nor any trailing spaces + line = line.replaceAll(/\s+$/, '') + contents.append(line).append('\n') + return + } + // Just finished + emit() + } + if (snippet != null) emit() + } + } + + static class Snippet { + static final int NOT_FINISHED = -1 + + /** + * Path to the file containing this snippet. Relative to docs.dir of the + * SnippetsTask that created it. + */ + Path path + int start + int end = NOT_FINISHED + String contents + + boolean autoSense = false + boolean test = false + boolean testResponse = false + boolean testSetup = false + String skipTest = null + boolean continued = false + String language = null + String catchPart = null + String setup = null + + @Override + public String toString() { + String result = "$path[$start:$end]" + if (language != null) { + result += "($language)" + } + if (autoSense) { + result += '// AUTOSENSE' + } + if (test) { + result += '// TEST' + if (catchPart) { + result += "[catch: $catchPart]" + } + if (skipTest) { + result += "[skip=$skipTest]" + } + if (continued) { + result += '[continued]' + } + if (setup) { + result += "[setup:$setup]" + } + } + if (testResponse) { + result += '// TESTRESPONSE' + } + if (testSetup) { + result += '// TESTSETUP' + } + return result + } + } + + /** + * Repeatedly match the pattern to the string, calling the closure with the + * matchers each time there is a match. If there are characters that don't + * match then blow up. If the closure takes two parameters then the second + * one is "is this the last match?". + */ + protected parse(String location, String s, String pattern, Closure c) { + if (s == null) { + return // Silly null, only real stuff gets to match! + } + Matcher m = s =~ pattern + int offset = 0 + Closure extraContent = { message -> + StringBuilder cutOut = new StringBuilder() + cutOut.append(s[offset - 6..offset - 1]) + cutOut.append('*') + cutOut.append(s[offset..Math.min(offset + 5, s.length() - 1)]) + String cutOutNoNl = cutOut.toString().replace('\n', '\\n') + throw new InvalidUserDataException("$location: Extra content " + + "$message ('$cutOutNoNl') matching [$pattern]: $s") + } + while (m.find()) { + if (m.start() != offset) { + extraContent("between [$offset] and [${m.start()}]") + } + offset = m.end() + if (c.maximumNumberOfParameters == 1) { + c(m) + } else { + c(m, offset == s.length()) + } + } + if (offset == 0) { + throw new InvalidUserDataException("$location: Didn't match " + + "$pattern: $s") + } + if (offset != s.length()) { + extraContent("after [$offset]") + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b1c8b03fe37..0635a532a58 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -92,7 +92,7 @@ public class MapperService extends AbstractIndexComponent { Setting.longSetting("index.mapping.depth.limit", 20L, 1, Property.Dynamic, Property.IndexScope); public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = - Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Property.IndexScope); + Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Property.Dynamic, Property.IndexScope); private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" diff --git a/docs/README.asciidoc b/docs/README.asciidoc index 3146d76f1eb..7317ce71a82 100644 --- a/docs/README.asciidoc +++ b/docs/README.asciidoc @@ -1,4 +1,47 @@ -The Elasticsearch docs are in AsciiDoc format and can be built using the Elasticsearch documentation build process +The Elasticsearch docs are in AsciiDoc format and can be built using the +Elasticsearch documentation build process. See: https://github.com/elastic/docs +Snippets marked with `// AUTOSENSE` are automatically annotated with "VIEW IN +SENSE" in the documentation and are automatically tested by the command +`gradle :docs:check`. By default `// AUTOSENSE` snippet runs as its own isolated +test. You can manipulate the test execution in the following ways: + +* `// TEST`: Explicitly marks a snippet as a test. Snippets marked this way +are tests even if they don't have `// AUTOSENSE`. + * `// TEST[s/foo/bar/]`: Replace `foo` with `bar` in the test. This should be + used sparingly because it makes the test "lie". Sometimes, though, you can use + it to make the tests more clear. + * `// TEST[catch:foo]`: Used to expect errors in the requests. Replace `foo` + with `request` to expect a 400 error, for example. If the snippet contains + multiple requests then only the last request will expect the error. + * `// TEST[continued]`: Continue the test started in the last snippet. Between + tests the nodes are cleaned: indexes are removed, etc. This will prevent that. + This is really useful when you have text and snippets that work together to + tell the story of some use case because it merges the snippets (and thus the + use case) into one big test. + * `// TEST[skip:reason]`: Skip this test. Replace `reason` with the actual + reason to skip the test. Snippets without `// TEST` or `// AUTOSENSE` aren't + considered tests anyway but this is useful for explicitly documenting the + reason why the test shouldn't be run. + * `// TEST[setup:name]`: Run some setup code before running the snippet. This + is useful for creating and populating indexes used in the snippet. The setup + code is defined in `docs/build.gradle`. +* `// TESTRESPONSE`: Matches this snippet against the body of the response of + the last test. If the response is JSON then order is ignored. With + `// TEST[continued]` you can make tests that contain multiple command snippets + and multiple response snippets. + * `// TESTRESPONSE[s/foo/bar/]`: Substitutions. See `// TEST[s/foo/bar]`. +* `// TESTSETUP`: Marks this snippet as the "setup" for all other snippets in + this file. This is a somewhat natural way of structuring documentation. You + say "this is the data we use to explain this feature" then you add the + snippet that you mark `// TESTSETUP` and then every snippet will turn into + a test that runs the setup snippet first. See the "painless" docs for a file + that puts this to good use. This is fairly similar to `// TEST[setup:name]` + but rather than the setup defined in `docs/build.gradle` the setup is defined + right in the documentation file. + +Any place you can use json you can use elements like `$body.path.to.thing` +which is replaced on the fly with the contents of the thing at `path.to.thing` +in the last response. diff --git a/docs/build.gradle b/docs/build.gradle new file mode 100644 index 00000000000..be4b10b1010 --- /dev/null +++ b/docs/build.gradle @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.gradle.SnippetsTask +import org.elasticsearch.gradle.SnippetsTask.Snippet +import org.elasticsearch.gradle.RestTestsFromSnippetsTask + +apply plugin: 'elasticsearch.rest-test' + +task listSnippets(type: SnippetsTask) { + group 'Docs' + description 'List each snippet' + perSnippet { println(it) } +} + +task listAutoSenseCandidates(type: SnippetsTask) { + group 'Docs' + description 'List snippets that probably should be marked // AUTOSENSE' + perSnippet { + if ( + it.autoSense // Already marked, nothing to do + || it.testResponse // Only commands are autosense + ) { + return + } + List languages = [ + 'js', 'json', // These languages should almost always be marked autosense + 'sh', 'shell', // These are often curl commands that should be converted + ] + if (false == languages.contains(it.language)) { + return + } + println(it) + } +} + +task buildRestTests(type: RestTestsFromSnippetsTask) { + docs = fileTree(project.projectDir) { + // No snippets in here! + exclude 'build.gradle' + // Remove plugins because they aren't installed during this test. Yet? + exclude 'plugins' + // This file simply doesn't pass yet. We should figure out how to fix it. + exclude 'reference/modules/snapshots.asciidoc' + } + Closure setupTwitter = { String name, int count -> + setups[name] = ''' + - do: + bulk: + index: twitter + type: tweet + refresh: true + body: |''' + for (int i = 0; i < count; i++) { + setups[name] += """ + {"index":{}} + {"msg": "some message with the number $i", "date": $i}""" + } + } + setupTwitter('twitter', 5) + setupTwitter('big_twitter', 120) +} + +integTest { + cluster { + setting 'script.inline', 'true' + } +} diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 873e6e3102f..b012d3a9673 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -33,9 +33,7 @@ before running each example. [source,js] -------------------------------------------------- -DELETE test - -PUT /test +PUT test { "settings": { "analysis": { @@ -49,8 +47,9 @@ PUT /test } } -GET /test/_analyze?analyzer=whitespace&text=foo,bar baz +GET _cluster/health?wait_for_status=yellow +GET test/_analyze?analyzer=whitespace&text=foo,bar baz # "foo,bar", "baz" -------------------------------------------------- // AUTOSENSE @@ -60,9 +59,7 @@ GET /test/_analyze?analyzer=whitespace&text=foo,bar baz [source,js] -------------------------------------------------- -DELETE test - -PUT /test +PUT test { "settings": { "analysis": { @@ -76,10 +73,12 @@ PUT /test } } -GET /test/_analyze?analyzer=nonword&text=foo,bar baz +GET _cluster/health?wait_for_status=yellow + +GET test/_analyze?analyzer=nonword&text=foo,bar baz # "foo,bar baz" becomes "foo", "bar", "baz" -GET /test/_analyze?analyzer=nonword&text=type_1-type_4 +GET test/_analyze?analyzer=nonword&text=type_1-type_4 # "type_1","type_4" -------------------------------------------------- // AUTOSENSE @@ -90,9 +89,7 @@ GET /test/_analyze?analyzer=nonword&text=type_1-type_4 [source,js] -------------------------------------------------- -DELETE test - -PUT /test?pretty=1 +PUT test?pretty=1 { "settings": { "analysis": { @@ -106,7 +103,7 @@ PUT /test?pretty=1 } } -GET /test/_analyze?analyzer=camel&text=MooseX::FTPClass2_beta +GET test/_analyze?analyzer=camel&text=MooseX::FTPClass2_beta # "moose","x","ftp","class","2","beta" -------------------------------------------------- // AUTOSENSE diff --git a/docs/reference/cluster/tasks.asciidoc b/docs/reference/cluster/tasks.asciidoc index 66d29f2bcfd..0ac888c953f 100644 --- a/docs/reference/cluster/tasks.asciidoc +++ b/docs/reference/cluster/tasks.asciidoc @@ -11,9 +11,9 @@ executing on one or more nodes in the cluster. [source,js] -------------------------------------------------- -GET /_tasks <1> -GET /_tasks?nodes=nodeId1,nodeId2 <2> -GET /_tasks?nodes=nodeId1,nodeId2&actions=cluster:* <3> +GET _tasks <1> +GET _tasks?nodes=nodeId1,nodeId2 <2> +GET _tasks?nodes=nodeId1,nodeId2&actions=cluster:* <3> -------------------------------------------------- // AUTOSENSE @@ -56,7 +56,6 @@ The result will look similar to the following: } } } - -------------------------------------------------- It is also possible to retrieve information for a particular task, or all children of a particular @@ -64,8 +63,8 @@ tasks using the following two commands: [source,js] -------------------------------------------------- -GET /_tasks/taskId1 -GET /_tasks?parent_task_id=parentTaskId1 +GET _tasks/taskId:1 +GET _tasks?parent_task_id=parentTaskId:1 -------------------------------------------------- // AUTOSENSE @@ -74,7 +73,7 @@ block for 10 seconds or until the task with id `oTUltX4IQMOUUVeiohTt8A:12345` is [source,js] -------------------------------------------------- -GET /_tasks/oTUltX4IQMOUUVeiohTt8A:12345?wait_for_completion=true&timeout=10s +GET _tasks/oTUltX4IQMOUUVeiohTt8A:12345?wait_for_completion=true&timeout=10s -------------------------------------------------- // AUTOSENSE @@ -83,7 +82,7 @@ as the standard list tasks command. [source,js] -------------------------------------------------- -GET /_cat/tasks +GET _cat/tasks -------------------------------------------------- // AUTOSENSE @@ -94,7 +93,7 @@ If a long-running task supports cancellation, it can be cancelled by the followi [source,js] -------------------------------------------------- -POST /_tasks/taskId1/_cancel +POST _tasks/taskId:1/_cancel -------------------------------------------------- // AUTOSENSE @@ -104,7 +103,7 @@ nodes `nodeId1` and `nodeId2`. [source,js] -------------------------------------------------- -POST /_tasks/_cancel?node_id=nodeId1,nodeId2&actions=*reindex +POST _tasks/_cancel?node_id=nodeId1,nodeId2&actions=*reindex -------------------------------------------------- // AUTOSENSE @@ -117,6 +116,6 @@ The following command will change the grouping to parent tasks: [source,js] -------------------------------------------------- -GET /_tasks?group_by=parents +GET _tasks?group_by=parents -------------------------------------------------- // AUTOSENSE diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 27ac85b9595..0acb148e82a 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -7,12 +7,14 @@ into the "twitter" index, under a type called "tweet" with an id of 1: [source,js] -------------------------------------------------- -$ curl -XPUT 'http://localhost:9200/twitter/tweet/1' -d '{ +PUT twitter/tweet/1 +{ "user" : "kimchy", "post_date" : "2009-11-15T14:12:12", "message" : "trying out Elasticsearch" -}' +} -------------------------------------------------- +// AUTOSENSE The result of the above index operation is: @@ -20,9 +22,9 @@ The result of the above index operation is: -------------------------------------------------- { "_shards" : { - "total" : 10, + "total" : 2, "failed" : 0, - "successful" : 10 + "successful" : 2 }, "_index" : "twitter", "_type" : "tweet", @@ -31,6 +33,7 @@ The result of the above index operation is: "created" : true } -------------------------------------------------- +// TESTRESPONSE[s/"successful" : 2/"successful" : 1/] The `_shards` header provides information about the replication process of the index operation. @@ -90,10 +93,13 @@ meantime (when reading in order to update, it is recommended to set [source,js] -------------------------------------------------- -curl -XPUT 'localhost:9200/twitter/tweet/1?version=2' -d '{ +PUT twitter/tweet/1?version=2 +{ "message" : "elasticsearch now has versioning support, double cool!" -}' +} -------------------------------------------------- +// AUTOSENSE +// TEST[catch: conflict] *NOTE:* versioning is completely real time, and is not affected by the near real time aspects of search operations. If no version is provided, @@ -160,23 +166,27 @@ Here is an example of using the `op_type` parameter: [source,js] -------------------------------------------------- -$ curl -XPUT 'http://localhost:9200/twitter/tweet/1?op_type=create' -d '{ +PUT twitter/tweet/1?op_type=create +{ "user" : "kimchy", "post_date" : "2009-11-15T14:12:12", "message" : "trying out Elasticsearch" -}' +} -------------------------------------------------- +// AUTOSENSE Another option to specify `create` is to use the following uri: [source,js] -------------------------------------------------- -$ curl -XPUT 'http://localhost:9200/twitter/tweet/1/_create' -d '{ +PUT twitter/tweet/1/_create +{ "user" : "kimchy", "post_date" : "2009-11-15T14:12:12", "message" : "trying out Elasticsearch" -}' +} -------------------------------------------------- +// AUTOSENSE [float] === Automatic ID Generation @@ -188,18 +198,25 @@ will automatically be set to `create`. Here is an example (note the [source,js] -------------------------------------------------- -$ curl -XPOST 'http://localhost:9200/twitter/tweet/' -d '{ +POST twitter/tweet/ +{ "user" : "kimchy", "post_date" : "2009-11-15T14:12:12", "message" : "trying out Elasticsearch" -}' +} -------------------------------------------------- +// AUTOSENSE The result of the above index operation is: [source,js] -------------------------------------------------- { + "_shards" : { + "total" : 2, + "failed" : 0, + "successful" : 2 + }, "_index" : "twitter", "_type" : "tweet", "_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32", @@ -207,6 +224,7 @@ The result of the above index operation is: "created" : true } -------------------------------------------------- +// TESTRESPONSE[s/6a8ca01c-7896-48e9-81cc-9f70661fcb32/$body._id/ s/"successful" : 2/"successful" : 1/] [float] [[index-routing]] @@ -219,12 +237,14 @@ on a per-operation basis using the `routing` parameter. For example: [source,js] -------------------------------------------------- -$ curl -XPOST 'http://localhost:9200/twitter/tweet?routing=kimchy' -d '{ +POST twitter/tweet?routing=kimchy +{ "user" : "kimchy", "post_date" : "2009-11-15T14:12:12", "message" : "trying out Elasticsearch" -}' +} -------------------------------------------------- +// AUTOSENSE In the example above, the "tweet" document is routed to a shard based on the `routing` parameter provided: "kimchy". @@ -245,10 +265,24 @@ For example: [source,js] -------------------------------------------------- -$ curl -XPUT localhost:9200/blogs/blog_tag/1122?parent=1111 -d '{ +PUT blogs +{ + "mappings": { + "tag_parent": {}, + "blog_tag": { + "_parent": { + "type": "tag_parent" + } + } + } +} + +PUT blogs/blog_tag/1122?parent=1111 +{ "tag" : "something" -}' +} -------------------------------------------------- +// AUTOSENSE When indexing a child document, the routing value is automatically set to be the same as its parent, unless the routing value is explicitly @@ -266,11 +300,13 @@ parameter. For example: [source,js] -------------------------------------------------- -$ curl -XPUT localhost:9200/twitter/tweet/1?timestamp=2009-11-15T14%3A12%3A12 -d '{ +PUT twitter/tweet/1?timestamp=2009-11-15T14:12:12 +{ "user" : "kimchy", "message" : "trying out Elasticsearch" -}' +} -------------------------------------------------- +// AUTOSENSE If the `timestamp` value is not provided externally or in the `_source`, the `timestamp` will be automatically set to the date the document was @@ -295,28 +331,23 @@ as shown in the following examples: [source,js] -------------------------------------------------- -curl -XPUT 'http://localhost:9200/twitter/tweet/1?ttl=86400000' -d '{ +PUT twitter/tweet/1?ttl=86400000ms +{ "user": "kimchy", "message": "Trying out elasticsearch, so far so good?" -}' +} -------------------------------------------------- +// AUTOSENSE [source,js] -------------------------------------------------- -curl -XPUT 'http://localhost:9200/twitter/tweet/1?ttl=1d' -d '{ +PUT twitter/tweet/1?ttl=1d +{ "user": "kimchy", "message": "Trying out elasticsearch, so far so good?" -}' --------------------------------------------------- - -[source,js] --------------------------------------------------- -curl -XPUT 'http://localhost:9200/twitter/tweet/1' -d '{ - "_ttl": "1d", - "user": "kimchy", - "message": "Trying out elasticsearch, so far so good?" -}' +} -------------------------------------------------- +// AUTOSENSE More information can be found on the <>. @@ -392,9 +423,11 @@ to 5 minutes: [source,js] -------------------------------------------------- -$ curl -XPUT 'http://localhost:9200/twitter/tweet/1?timeout=5m' -d '{ +PUT twitter/tweet/1?timeout=5m +{ "user" : "kimchy", "post_date" : "2009-11-15T14:12:12", "message" : "trying out Elasticsearch" -}' +} -------------------------------------------------- +// AUTOSENSE diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 8742e52c5c8..513281c07e3 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -8,7 +8,7 @@ This will copy documents from the `twitter` index into the `new_twitter` index: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "twitter" @@ -19,20 +19,29 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:big_twitter] That will return something like this: [source,js] -------------------------------------------------- { - "took" : 639, - "updated": 112, - "batches": 130, + "took" : 147, + "timed_out": false, + "created": 120, + "updated": 0, + "batches": 2, "version_conflicts": 0, - "failures" : [ ], - "created": 12344 + "noops": 0, + "retries": 0, + "throttled_millis": 0, + "requests_per_second": "unlimited", + "throttled_until_millis": 0, + "total": 120, + "failures" : [ ] } -------------------------------------------------- +// TESTRESPONSE[s/"took" : 147/"took" : "$body.took"/] Just like <>, `_reindex` gets a snapshot of the source index but its target must be a **different** index so @@ -44,7 +53,7 @@ the same type and id: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "twitter" @@ -56,6 +65,7 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] Setting `version_type` to `external` will cause Elasticsearch to preserve the `version` from the source, create any documents that are missing, and update @@ -64,7 +74,7 @@ in the source index: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "twitter" @@ -76,6 +86,7 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] Settings `op_type` to `create` will cause `_reindex` to only create missing documents in the target index. All existing documents will cause a version @@ -83,7 +94,7 @@ conflict: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "twitter" @@ -95,13 +106,14 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] By default version conflicts abort the `_reindex` process but you can just count them by settings `"conflicts": "proceed"` in the request body: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "conflicts": "proceed", "source": { @@ -114,13 +126,14 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] You can limit the documents by adding a type to the `source` or by adding a query. This will only copy ++tweet++'s made by `kimchy` into `new_twitter`: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "twitter", @@ -137,6 +150,7 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] `index` and `type` in `source` can both be lists, allowing you to copy from lots of sources in one request. This will copy documents from the `tweet` and @@ -148,7 +162,7 @@ which document will survive because the iteration order isn't well defined. [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": ["twitter", "blog"], @@ -160,6 +174,7 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[s/^/PUT twitter\nPUT blog\n/] It's also possible to limit the number of processed documents by setting `size`. This will only copy a single document from `twitter` to @@ -167,7 +182,7 @@ It's also possible to limit the number of processed documents by setting [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "size": 1, "source": { @@ -179,6 +194,7 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] If you want a particular set of documents from the twitter index you'll need to sort. Sorting makes the scroll less efficient but in some contexts @@ -187,7 +203,7 @@ This will copy 10000 documents from `twitter` into `new_twitter`: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "size": 10000, "source": { @@ -200,6 +216,7 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] Like `_update_by_query`, `_reindex` supports a script that modifies the document. Unlike `_update_by_query`, the script is allowed to modify the @@ -207,21 +224,22 @@ document's metadata. This example bumps the version of the source document: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { - "index": "twitter", + "index": "twitter" }, "dest": { "index": "new_twitter", "version_type": "external" - } + }, "script": { - "internal": "if (ctx._source.foo == 'bar') {ctx._version++; ctx._source.remove('foo')}" + "script": "if (ctx._source.foo == 'bar') {ctx._version++; ctx._source.remove('foo')}" } } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] Think of the possibilities! Just be careful! With great power.... You can change: @@ -264,7 +282,7 @@ routing set to `cat`. [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "source", @@ -281,13 +299,14 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[s/^/PUT source\n/] By default `_reindex` uses scroll batches of 100. You can change the batch size with the `size` field in the `source` element: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "source", @@ -300,13 +319,14 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[s/^/PUT source\n/] Reindex can also use the <> feature by specifying a `pipeline` like this: [source,js] -------------------------------------------------- -POST /_reindex +POST _reindex { "source": { "index": "source" @@ -318,6 +338,7 @@ POST /_reindex } -------------------------------------------------- // AUTOSENSE +// TEST[s/^/PUT source\n/] [float] === URL Parameters @@ -414,7 +435,7 @@ While Reindex is running you can fetch their status using the [source,js] -------------------------------------------------- -GET /_tasks/?pretty&detailed=true&actions=*reindex +GET _tasks/?pretty&detailed=true&actions=*reindex -------------------------------------------------- // AUTOSENSE @@ -473,7 +494,7 @@ Any Reindex can be canceled using the <>: [source,js] -------------------------------------------------- -POST /_tasks/{task_id}/_cancel +POST _tasks/taskid:1/_cancel -------------------------------------------------- // AUTOSENSE @@ -492,7 +513,7 @@ the `_rethrottle` API: [source,js] -------------------------------------------------- -POST /_reindex/{task_id}/_rethrottle?requests_per_second=unlimited +POST _reindex/taskid:1/_rethrottle?requests_per_second=unlimited -------------------------------------------------- // AUTOSENSE @@ -540,6 +561,7 @@ POST _reindex?pretty } -------------------------------------------------- // AUTOSENSE +// TEST[continued] Now you can get the new document: @@ -548,15 +570,24 @@ Now you can get the new document: GET test2/test/1?pretty -------------------------------------------------- // AUTOSENSE +// TEST[continued] and it'll look like: [source,js] -------------------------------------------------- { - "text": "words words", - "tag": "foo" + "found": true, + "_id": "1", + "_index": "test2", + "_type": "test", + "_version": 1, + "_source": { + "text": "words words", + "tag": "foo" + } } -------------------------------------------------- +// TESTRESPONSE Or you can search by `tag` or whatever you want. diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index e307d53e8f4..cb0c806f3a4 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -10,22 +10,31 @@ mapping change. Here is the API: [source,js] -------------------------------------------------- -POST /twitter/_update_by_query?conflicts=proceed +POST twitter/_update_by_query?conflicts=proceed -------------------------------------------------- // AUTOSENSE +// TEST[setup:big_twitter] That will return something like this: [source,js] -------------------------------------------------- { - "took" : 639, - "updated": 1235, - "batches": 13, - "version_conflicts": 2, + "took" : 147, + "timed_out": false, + "updated": 120, + "batches": 2, + "version_conflicts": 0, + "noops": 0, + "retries": 0, + "throttled_millis": 0, + "requests_per_second": "unlimited", + "throttled_until_millis": 0, + "total": 120, "failures" : [ ] } -------------------------------------------------- +// TESTRESPONSE[s/"took" : 147/"took" : "$body.took"/] `_update_by_query` gets a snapshot of the index when it starts and indexes what it finds using `internal` versioning. That means that you'll get a version @@ -53,9 +62,10 @@ will only update `tweet`s from the `twitter` index: [source,js] -------------------------------------------------- -POST /twitter/tweet/_update_by_query?conflicts=proceed +POST twitter/tweet/_update_by_query?conflicts=proceed -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] You can also limit `_update_by_query` using the <>. This will update all documents from the @@ -63,7 +73,7 @@ You can also limit `_update_by_query` using the [source,js] -------------------------------------------------- -POST /twitter/_update_by_query?conflicts=proceed +POST twitter/_update_by_query?conflicts=proceed { "query": { <1> "term": { @@ -73,6 +83,7 @@ POST /twitter/_update_by_query?conflicts=proceed } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] <1> The query must be passed as a value to the `query` key, in the same way as the <>. You can also use the `q` @@ -86,7 +97,7 @@ will increment the `likes` field on all of kimchy's tweets: [source,js] -------------------------------------------------- -POST /twitter/_update_by_query +POST twitter/_update_by_query { "script": { "inline": "ctx._source.likes++" @@ -99,6 +110,7 @@ POST /twitter/_update_by_query } -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] Just as in <> you can set `ctx.op = "noop"` if your script decides that it doesn't have to make any changes. That will cause @@ -119,36 +131,50 @@ types at once, just like the search API: [source,js] -------------------------------------------------- -POST /twitter,blog/tweet,post/_update_by_query +POST twitter,blog/tweet,post/_update_by_query -------------------------------------------------- // AUTOSENSE +// TEST[s/^/PUT twitter\nPUT blog\n/] If you provide `routing` then the routing is copied to the scroll query, limiting the process to the shards that match that routing value: [source,js] -------------------------------------------------- -POST /twitter/_update_by_query?routing=1 +POST twitter/_update_by_query?routing=1 -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] By default `_update_by_query` uses scroll batches of 100. You can change the batch size with the `scroll_size` URL parameter: [source,js] -------------------------------------------------- -POST /twitter/_update_by_query?scroll_size=1000 +POST twitter/_update_by_query?scroll_size=1000 -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] `_update_by_query` can also use the <> feature by specifying a `pipeline` like this: [source,js] -------------------------------------------------- -POST /twitter/_update_by_query?pipeline=some_ingest_pipeline +PUT _ingest/pipeline/set-foo +{ + "description" : "sets foo", + "processors" : [ { + "set" : { + "field": "foo", + "value": "bar" + } + } ] +} +POST twitter/_update_by_query?pipeline=set-foo -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] [float] === URL Parameters @@ -240,7 +266,7 @@ While Update By Query is running you can fetch their status using the [source,js] -------------------------------------------------- -POST /_tasks/?pretty&detailed=true&action=*byquery +GET _tasks/?pretty&detailed=true&action=*byquery -------------------------------------------------- // AUTOSENSE @@ -299,7 +325,7 @@ Any Update By Query can be canceled using the <>: [source,js] -------------------------------------------------- -POST /_tasks/{task_id}/_cancel +POST _tasks/taskid:1/_cancel -------------------------------------------------- // AUTOSENSE @@ -318,7 +344,7 @@ using the `_rethrottle` API: [source,js] -------------------------------------------------- -POST /_update_by_query/{task_id}/_rethrottle?requests_per_second=unlimited +POST _update_by_query/taskid:1/_rethrottle?requests_per_second=unlimited -------------------------------------------------- // AUTOSENSE @@ -356,12 +382,12 @@ POST test/test?refresh { "text": "words words", "flag": "bar" -}' +} POST test/test?refresh { "text": "words words", "flag": "foo" -}' +} PUT test/_mapping/test <2> { "properties": { @@ -391,6 +417,7 @@ POST test/_search?filter_path=hits.total } -------------------------------------------------- // AUTOSENSE +// TEST[continued] [source,js] -------------------------------------------------- @@ -400,6 +427,7 @@ POST test/_search?filter_path=hits.total } } -------------------------------------------------- +// TESTRESPONSE But you can issue an `_update_by_query` request to pick up the new mapping: @@ -416,6 +444,7 @@ POST test/_search?filter_path=hits.total } -------------------------------------------------- // AUTOSENSE +// TEST[continued] [source,js] -------------------------------------------------- @@ -425,5 +454,6 @@ POST test/_search?filter_path=hits.total } } -------------------------------------------------- +// TESTRESPONSE You can do the exact same thing when adding a field to a multifield. diff --git a/docs/reference/index-modules/allocation/delayed.asciidoc b/docs/reference/index-modules/allocation/delayed.asciidoc index baaa3cb944d..6bedba95586 100644 --- a/docs/reference/index-modules/allocation/delayed.asciidoc +++ b/docs/reference/index-modules/allocation/delayed.asciidoc @@ -38,7 +38,7 @@ This setting can be updated on a live index (or on all indices): [source,js] ------------------------------ -PUT /_all/_settings +PUT _all/_settings { "settings": { "index.unassigned.node_left.delayed_timeout": "5m" @@ -46,6 +46,7 @@ PUT /_all/_settings } ------------------------------ // AUTOSENSE +// TEST[s/^/PUT test\n/] With delayed allocation enabled, the above scenario changes to look like this: @@ -82,6 +83,7 @@ can be viewed with the <>: ------------------------------ GET _cluster/health <1> ------------------------------ +// AUTOSENSE <1> This request will return a `delayed_unassigned_shards` value. ==== Removing a node permanently @@ -92,7 +94,7 @@ the missing shards immediately, just update the timeout to zero: [source,js] ------------------------------ -PUT /_all/_settings +PUT _all/_settings { "settings": { "index.unassigned.node_left.delayed_timeout": "0" @@ -100,5 +102,6 @@ PUT /_all/_settings } ------------------------------ // AUTOSENSE +// TEST[s/^/PUT test\n/] You can reset the timeout as soon as the missing shards have started to recover. diff --git a/docs/reference/index-modules/allocation/filtering.asciidoc b/docs/reference/index-modules/allocation/filtering.asciidoc index 29ce2f535a3..f5b73a9f78e 100644 --- a/docs/reference/index-modules/allocation/filtering.asciidoc +++ b/docs/reference/index-modules/allocation/filtering.asciidoc @@ -23,6 +23,7 @@ These metadata attributes can be used with the group of nodes. For instance, we can move the index `test` to either `big` or `medium` nodes as follows: + [source,js] ------------------------ PUT test/_settings @@ -31,6 +32,7 @@ PUT test/_settings } ------------------------ // AUTOSENSE +// TEST[s/^/PUT test\n/] Alternatively, we can move the index `test` away from the `small` nodes with an `exclude` rule: @@ -43,6 +45,7 @@ PUT test/_settings } ------------------------ // AUTOSENSE +// TEST[s/^/PUT test\n/] Multiple rules can be specified, in which case all conditions must be satisfied. For instance, we could move the index `test` to `big` nodes in @@ -57,6 +60,7 @@ PUT test/_settings } ------------------------ // AUTOSENSE +// TEST[s/^/PUT test\n/] NOTE: If some conditions cannot be satisfied then shards will not be moved. @@ -97,3 +101,4 @@ PUT test/_settings } ------------------------ // AUTOSENSE +// TEST[skip:indexes don't assign] diff --git a/docs/reference/index-modules/allocation/prioritization.asciidoc b/docs/reference/index-modules/allocation/prioritization.asciidoc index b3307e90b79..149e43fd134 100644 --- a/docs/reference/index-modules/allocation/prioritization.asciidoc +++ b/docs/reference/index-modules/allocation/prioritization.asciidoc @@ -53,3 +53,4 @@ PUT index_4/_settings } ------------------------------ // AUTOSENSE +// TEST[continued] diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc index 47d73b7d3dc..3597ccc51e5 100644 --- a/docs/reference/indices/analyze.asciidoc +++ b/docs/reference/indices/analyze.asciidoc @@ -112,7 +112,7 @@ experimental[The format of the additional detail information is experimental and [source,js] -------------------------------------------------- -GET test/_analyze +GET _analyze { "tokenizer" : "standard", "token_filter" : ["snowball"], @@ -172,4 +172,3 @@ The request returns the following result: } -------------------------------------------------- <1> Output only "keyword" attribute, since specify "attributes" in the request. - diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index b02fb15135b..0279ee3abcf 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -10,9 +10,10 @@ trigger flush operations as required in order to clear memory. [source,js] -------------------------------------------------- -POST /twitter/_flush +POST twitter/_flush -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] [float] [[flush-parameters]] @@ -23,7 +24,7 @@ The flush API accepts the following request parameters: [horizontal] `wait_if_ongoing`:: If set to `true` the flush operation will block until the flush can be executed if another flush operation is already executing. -The default is `false` and will cause an exception to be thrown on +The default is `false` and will cause an exception to be thrown on the shard level if another flush operation is already running. `force`:: Whether a flush should be forced even if it is not necessarily needed ie. @@ -40,11 +41,12 @@ or even on `_all` the indices. [source,js] -------------------------------------------------- -POST /kimchy,elasticsearch/_flush +POST kimchy,elasticsearch/_flush -POST /_flush +POST _flush -------------------------------------------------- // AUTOSENSE +// TEST[s/^/PUT kimchy\nPUT elasticsearch\n/] [[indices-synced-flush]] === Synced Flush @@ -72,9 +74,10 @@ the <> API: [source,sh] -------------------------------------------------- -GET /twitter/_stats/commit?level=shards +GET twitter/_stats/commit?level=shards -------------------------------------------------- // AUTOSENSE +// TEST[s/^/PUT twitter\n/] which returns something similar to: @@ -136,9 +139,10 @@ NOTE: It is harmless to request a synced flush while there is ongoing indexing. [source,sh] -------------------------------------------------- -POST /twitter/_flush/synced +POST twitter/_flush/synced -------------------------------------------------- // AUTOSENSE +// TEST[setup:twitter] The response contains details about how many shards were successfully sync-flushed and information about any failure. @@ -149,18 +153,18 @@ sync-flushed: -------------------------------------------------- { "_shards": { - "total": 4, - "successful": 4, + "total": 10, + "successful": 10, "failed": 0 }, "twitter": { - "total": 4, - "successful": 4, + "total": 10, + "successful": 10, "failed": 0 } } -------------------------------------------------- - +// TESTRESPONSE[s/"successful": 10/"successful": 5/] Here is what it looks like when one shard group failed due to pending operations: @@ -230,8 +234,8 @@ or even on `_all` the indices. [source,js] -------------------------------------------------- -POST /kimchy,elasticsearch/_flush/synced +POST kimchy,elasticsearch/_flush/synced -POST /_flush/synced +POST _flush/synced -------------------------------------------------- -// AUTOSENSE \ No newline at end of file +// AUTOSENSE diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index cc94a08f626..70a5aa8bef0 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -135,9 +135,9 @@ exists in more than one type will throw an exception, unless you specify the across all fields with the same name in the same index. TIP: The only parameters which are exempt from this rule -- they can be set to -different values on each field -- can be found in <>. +different values on each field -- can be found in <>. -For example: +For example, this fails: [source,js] ----------------------------------- @@ -173,8 +173,17 @@ PUT my_index/_mapping/type_one <2> } } } +----------------------------------- +// AUTOSENSE +// TEST[catch:request] +<1> Create an index with two types, both of which contain a `text` field which have the same mapping. +<2> Trying to update the `search_analyzer` just for `type_one` throws an exception like `"Merge failed with failures..."`. -PUT my_index/_mapping/type_one?update_all_types <3> +But this then running this succeeds: + +[source,js] +----------------------------------- +PUT my_index/_mapping/type_one?update_all_types <1> { "properties": { "text": { @@ -186,8 +195,5 @@ PUT my_index/_mapping/type_one?update_all_types <3> } ----------------------------------- // AUTOSENSE -<1> Create an index with two types, both of which contain a `text` field which have the same mapping. -<2> Trying to update the `search_analyzer` just for `type_one` throws an exception like `"Merge failed with failures..."`. -<3> Adding the `update_all_types` parameter updates the `text` field in `type_one` and `type_two`. - - +// TEST[continued] +<1> Adding the `update_all_types` parameter updates the `text` field in `type_one` and `type_two`. diff --git a/docs/reference/indices/templates.asciidoc b/docs/reference/indices/templates.asciidoc index 9e97b0c5240..86bc77c52e4 100644 --- a/docs/reference/indices/templates.asciidoc +++ b/docs/reference/indices/templates.asciidoc @@ -13,7 +13,7 @@ For example: [source,js] -------------------------------------------------- -PUT /_template/template_1 +PUT _template/template_1 { "template": "te*", "settings": { diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index c565f3b2047..767b928d205 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -25,15 +25,16 @@ tell the ingest node which pipeline to use. For example: [source,js] -------------------------------------------------- -PUT /my-index/my-type/my-id?pipeline=my_pipeline_id +PUT my-index/my-type/my-id?pipeline=my_pipeline_id { - ... + "foo": "bar" } -------------------------------------------------- // AUTOSENSE +// TEST[catch:request] See <> for more information about creating, adding, and deleting pipelines. -- -include::ingest/ingest-node.asciidoc[] \ No newline at end of file +include::ingest/ingest-node.asciidoc[] diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 7831ee86290..631e4bb40f7 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -41,10 +41,11 @@ PUT _ingest/pipeline/my-pipeline-id "description" : "describe pipeline", "processors" : [ { - "simple" : { - // settings + "set" : { + "field": "foo", + "value": "bar" } - }, + } // other processors ] } @@ -64,28 +65,31 @@ The get pipeline API returns pipelines based on ID. This API always returns a lo GET _ingest/pipeline/my-pipeline-id -------------------------------------------------- // AUTOSENSE +// TEST[continued] Example response: [source,js] -------------------------------------------------- { - "my-pipeline-id": { - "_source" : { - "description": "describe pipeline", - "processors": [ - { - "simple" : { - // settings - } - }, - // other processors - ] - }, - "_version" : 0 - } + "pipelines": [ { + "id": "my-pipeline-id", + "config": { + "description": "describe pipeline", + "processors": [ + { + "set" : { + "field": "foo", + "value": "bar" + } + } + // other processors + ] + } + } ] } -------------------------------------------------- +// TESTRESPONSE For each returned pipeline, the source and the version are returned. The version is useful for knowing which version of the pipeline the node has. @@ -101,6 +105,7 @@ The delete pipeline API deletes pipelines by ID. DELETE _ingest/pipeline/my-pipeline-id -------------------------------------------------- // AUTOSENSE +// TEST[continued] [[simulate-pipeline-api]] === Simulate Pipeline API @@ -674,9 +679,9 @@ Specifying `boolean` will set the field to true if its string value is equal to false if its string value is equal to `false` (ignore case), or it will throw an exception otherwise. Specifying `auto` will attempt to convert the string-valued `field` into the closest non-string type. -For example, a field whose value is `"true"` will be converted to its respective boolean type: `true`. And -a value of `"242.15"` will "automatically" be converted to `242.15` of type `float`. If a provided field cannot -be appropriately converted, the Convert Processor will still process successfully and leave the field value as-is. In +For example, a field whose value is `"true"` will be converted to its respective boolean type: `true`. And +a value of `"242.15"` will "automatically" be converted to `242.15` of type `float`. If a provided field cannot +be appropriately converted, the Convert Processor will still process successfully and leave the field value as-is. In such a case, `target_field` will still be updated with the unconverted field value. [[convert-options]] @@ -1320,5 +1325,3 @@ Converts a string to its uppercase equivalent. } } -------------------------------------------------- - - diff --git a/docs/reference/mapping.asciidoc b/docs/reference/mapping.asciidoc index 8ead0436978..70de0a2735b 100644 --- a/docs/reference/mapping.asciidoc +++ b/docs/reference/mapping.asciidoc @@ -140,6 +140,7 @@ PUT my_index <1> } }, "blogpost": { <2> + "_all": { "enabled": false }, <3> "properties": { <4> "title": { "type": "text" }, <5> "body": { "type": "text" }, <5> diff --git a/docs/reference/mapping/dynamic-mapping.asciidoc b/docs/reference/mapping/dynamic-mapping.asciidoc index beb7d4360d4..81aa5858db3 100644 --- a/docs/reference/mapping/dynamic-mapping.asciidoc +++ b/docs/reference/mapping/dynamic-mapping.asciidoc @@ -46,12 +46,14 @@ setting to `false`, either by setting the default value in the [source,js] -------------------------------------------------- -PUT /_settings <1> +PUT data/_settings <1> { "index.mapper.dynamic":false } -------------------------------------------------- // AUTOSENSE +// TEST[continued] + <1> Disable automatic type creation for all indices. Regardless of the value of this setting, types can still be added explicitly @@ -64,4 +66,3 @@ include::dynamic/default-mapping.asciidoc[] include::dynamic/field-mapping.asciidoc[] include::dynamic/templates.asciidoc[] - diff --git a/docs/reference/mapping/dynamic/templates.asciidoc b/docs/reference/mapping/dynamic/templates.asciidoc index 315ed05d759..6396abe8522 100644 --- a/docs/reference/mapping/dynamic/templates.asciidoc +++ b/docs/reference/mapping/dynamic/templates.asciidoc @@ -347,7 +347,7 @@ PUT my_index -------------------------------------------------- The sub `keyword` field appears in this template to be consistent with the -default rules of dynamic mappings. Of course if you do not need them because +default rules of dynamic mappings. Of course if you do not need them because you don't need to perform exact search or aggregate on this field, you could remove it as described in the previous section. @@ -406,14 +406,12 @@ new indices, you could create the following index template: -------------------------------------------------- PUT _template/disable_all_field { - "disable_all_field": { - "order": 0, - "template": "*", <1> - "mappings": { - "_default_": { <2> - "_all": { <3> - "enabled": false - } + "order": 0, + "template": "*", <1> + "mappings": { + "_default_": { <2> + "_all": { <3> + "enabled": false } } } diff --git a/docs/reference/mapping/fields/all-field.asciidoc b/docs/reference/mapping/fields/all-field.asciidoc index 6c5f073aee8..58f0e783af4 100644 --- a/docs/reference/mapping/fields/all-field.asciidoc +++ b/docs/reference/mapping/fields/all-field.asciidoc @@ -116,6 +116,7 @@ PUT my_index } -------------------------------- // AUTOSENSE +// TEST[s/\.\.\.//] <1> The `_all` field in `type_1` is enabled. <2> The `_all` field in `type_2` is completely disabled. @@ -143,7 +144,7 @@ PUT my_index }, "settings": { "index.query.default_field": "content" <2> - }, + } } -------------------------------- // AUTOSENSE @@ -337,7 +338,7 @@ GET _search "highlight": { "fields": { "*_name": { <2> - "require_field_match": "false" <3> + "require_field_match": false <3> } } } @@ -348,5 +349,3 @@ GET _search <1> The query inspects the `_all` field to find matching documents. <2> Highlighting is performed on the two name fields, which are available from the `_source`. <3> The query wasn't run against the name fields, so set `require_field_match` to `false`. - - diff --git a/docs/reference/mapping/fields/field-names-field.asciidoc b/docs/reference/mapping/fields/field-names-field.asciidoc index bafc3e3f7d9..ebb228fca94 100644 --- a/docs/reference/mapping/fields/field-names-field.asciidoc +++ b/docs/reference/mapping/fields/field-names-field.asciidoc @@ -6,8 +6,7 @@ contains any value other than `null`. This field is used by the <> query to find documents that either have or don't have any non-+null+ value for a particular field. -The value of the `_field_name` field is accessible in queries, aggregations, and -scripts: +The value of the `_field_name` field is accessible in queries and scripts: [source,js] -------------------------- @@ -30,17 +29,9 @@ GET my_index/_search "_field_names": [ "title" ] <1> } }, - "aggs": { - "Field names": { - "terms": { - "field": "_field_names", <2> - "size": 10 - } - } - }, "script_fields": { "Field names": { - "script": "doc['_field_names']" <3> + "script": "doc['_field_names']" <2> } } } @@ -49,5 +40,4 @@ GET my_index/_search // AUTOSENSE <1> Querying on the `_field_names` field (also see the <> query) -<2> Aggregating on the `_field_names` field -<3> Accessing the `_field_names` field in scripts (inline scripts must be <> for this example to work) +<2> Accessing the `_field_names` field in scripts (inline scripts must be <> for this example to work) diff --git a/docs/reference/mapping/fields/parent-field.asciidoc b/docs/reference/mapping/fields/parent-field.asciidoc index fb066580044..c15a7bcd3b9 100644 --- a/docs/reference/mapping/fields/parent-field.asciidoc +++ b/docs/reference/mapping/fields/parent-field.asciidoc @@ -59,8 +59,8 @@ See the <> and the <> aggregation, and <> for more information. -The value of the `_parent` field is accessible in queries, aggregations, scripts, -and when sorting: +The value of the `_parent` field is accessible in queries, aggregations, +and scripts: [source,js] -------------------------- @@ -79,13 +79,6 @@ GET my_index/_search } } }, - "sort": [ - { - "_parent": { <3> - "order": "desc" - } - } - ], "script_fields": { "parent": { "script": "doc['_parent']" <4> @@ -94,11 +87,11 @@ GET my_index/_search } -------------------------- // AUTOSENSE +// TEST[continued] <1> Querying on the `_parent` field (also see the <> and the <>) <2> Aggregating on the `_parent` field (also see the <> aggregation) -<3> Sorting on the `_parent` field -<4> Accessing the `_parent` field in scripts (inline scripts must be <> for this example to work) +<3> Accessing the `_parent` field in scripts (inline scripts must be <> for this example to work) ==== Parent-child restrictions @@ -158,4 +151,3 @@ GET _stats/fielddata?human&fields=_parent GET _nodes/stats/indices/fielddata?human&fields=_parent -------------------------------------------------- // AUTOSENSE - diff --git a/docs/reference/mapping/fields/routing-field.asciidoc b/docs/reference/mapping/fields/routing-field.asciidoc index deb46178ccf..496d8dcf56e 100644 --- a/docs/reference/mapping/fields/routing-field.asciidoc +++ b/docs/reference/mapping/fields/routing-field.asciidoc @@ -22,14 +22,14 @@ PUT my_index/my_type/1?routing=user1 <1> GET my_index/my_type/1?routing=user1 <2> ------------------------------ // AUTOSENSE +// TESTSETUP <1> This document uses `user1` as its routing value, instead of its ID. <2> The same `routing` value needs to be provided when <>, <>, or <> the document. -The value of the `_routing` field is accessible in queries, aggregations, scripts, -and when sorting: +The value of the `_routing` field is accessible in queries and scripts: [source,js] -------------------------- @@ -40,21 +40,6 @@ GET my_index/_search "_routing": [ "user1" ] <1> } }, - "aggs": { - "Routing values": { - "terms": { - "field": "_routing", <2> - "size": 10 - } - } - }, - "sort": [ - { - "_routing": { <3> - "order": "desc" - } - } - ], "script_fields": { "Routing value": { "script": "doc['_routing']" <4> @@ -65,9 +50,7 @@ GET my_index/_search // AUTOSENSE <1> Querying on the `_routing` field (also see the <>) -<2> Aggregating on the `_routing` field -<3> Sorting on the `_routing` field -<4> Accessing the `_routing` field in scripts (inline scripts must be <> for this example to work) +<2> Accessing the `_routing` field in scripts (inline scripts must be <> for this example to work) ==== Searching with custom routing @@ -104,7 +87,7 @@ custom `routing` value required for all CRUD operations: [source,js] ------------------------------ -PUT my_index +PUT my_index2 { "mappings": { "my_type": { @@ -115,12 +98,13 @@ PUT my_index } } -PUT my_index/my_type/1 <2> +PUT my_index2/my_type/1 <2> { "text": "No routing value provided" } ------------------------------ // AUTOSENSE +// TEST[catch:request] <1> Routing is required for `my_type` documents. <2> This index request throws a `routing_missing_exception`. diff --git a/docs/reference/mapping/fields/timestamp-field.asciidoc b/docs/reference/mapping/fields/timestamp-field.asciidoc index 3f4bf8a8134..c6b3887f889 100644 --- a/docs/reference/mapping/fields/timestamp-field.asciidoc +++ b/docs/reference/mapping/fields/timestamp-field.asciidoc @@ -89,6 +89,7 @@ GET my_index/_search } -------------------------- // AUTOSENSE +// TEST[continued] <1> Querying on the `_timestamp` field <2> Aggregating on the `_timestamp` field diff --git a/docs/reference/mapping/fields/type-field.asciidoc b/docs/reference/mapping/fields/type-field.asciidoc index c8f3817bcbf..8a569d22f53 100644 --- a/docs/reference/mapping/fields/type-field.asciidoc +++ b/docs/reference/mapping/fields/type-field.asciidoc @@ -1,12 +1,11 @@ -[[mapping-type-field]] +\[[mapping-type-field]] === `_type` field Each document indexed is associated with a <> (see <>) and an <>. The `_type` field is indexed in order to make searching by type name fast. -The value of the `_type` field is accessible in queries, aggregations, -scripts, and when sorting: +The value of the `_type` field is accessible in queries and scripts: [source,js] -------------------------- @@ -21,31 +20,16 @@ PUT my_index/type_2/2 "text": "Document with type 2" } -GET my_index/_search/type_* +GET my_index/type_*/_search { "query": { "terms": { "_type": [ "type_1", "type_2" ] <1> } }, - "aggs": { - "types": { - "terms": { - "field": "_type", <2> - "size": 10 - } - } - }, - "sort": [ - { - "_type": { <3> - "order": "desc" - } - } - ], "script_fields": { "type": { - "script": "doc['_type']" <4> + "script": "doc['_type']" <2> } } } @@ -54,7 +38,4 @@ GET my_index/_search/type_* // AUTOSENSE <1> Querying on the `_type` field -<2> Aggregating on the `_type` field -<3> Sorting on the `_type` field -<4> Accessing the `_type` field in scripts (inline scripts must be <> for this example to work) - +<2> Accessing the `_type` field in scripts (inline scripts must be <> for this example to work) diff --git a/docs/reference/mapping/params.asciidoc b/docs/reference/mapping/params.asciidoc index 4071c383ba1..d0591a7479f 100644 --- a/docs/reference/mapping/params.asciidoc +++ b/docs/reference/mapping/params.asciidoc @@ -91,10 +91,3 @@ include::params/similarity.asciidoc[] include::params/store.asciidoc[] include::params/term-vector.asciidoc[] - - -[source,js] --------------------------------------------------- --------------------------------------------------- -// AUTOSENSE - diff --git a/docs/reference/mapping/params/analyzer.asciidoc b/docs/reference/mapping/params/analyzer.asciidoc index 2a452465e2d..5f564f9a668 100644 --- a/docs/reference/mapping/params/analyzer.asciidoc +++ b/docs/reference/mapping/params/analyzer.asciidoc @@ -80,7 +80,7 @@ GET my_index/_analyze?field=text.english <4> [[search-quote-analyzer]] ==== `search_quote_analyzer` -The `search_quote_analyzer` setting allows you to specify an analyzer for phrases, this is particularly useful when dealing with disabling +The `search_quote_analyzer` setting allows you to specify an analyzer for phrases, this is particularly useful when dealing with disabling stop words for phrase queries. To disable stop words for phrases a field utilising three analyzer settings will be required: @@ -91,7 +91,7 @@ To disable stop words for phrases a field utilising three analyzer settings will [source,js] -------------------------------------------------- -PUT /my_index +PUT my_index { "settings":{ "analysis":{ @@ -128,7 +128,6 @@ PUT /my_index "analyzer":"my_analyzer", <3> "search_analyzer":"my_stop_analyzer", <4> "search_quote_analyzer":"my_analyzer" <5> - } } } } @@ -162,10 +161,10 @@ GET my_index/my_type/_search <2> `my_stop_analyzer` analyzer which removes stop words <3> `analyzer` setting that points to the `my_analyzer` analyzer which will be used at index time <4> `search_analyzer` setting that points to the `my_stop_analyzer` and removes stop words for non-phrase queries -<5> `search_quote_analyzer` setting that points to the `my_analyzer` analyzer and ensures that stop words are not removed from phrase queries +<5> `search_quote_analyzer` setting that points to the `my_analyzer` analyzer and ensures that stop words are not removed from phrase queries <6> Since the query is wrapped in quotes it is detected as a phrase query therefore the `search_quote_analyzer` kicks in and ensures the stop words -are not removed from the query. The `my_analyzer` analyzer will then return the following tokens [`the`, `quick`, `brown`, `fox`] which will match one -of the documents. Meanwhile term queries will be analyzed with the `my_stop_analyzer` analyzer which will filter out stop words. So a search for either -`The quick brown fox` or `A quick brown fox` will return both documents since both documents contain the following tokens [`quick`, `brown`, `fox`]. -Without the `search_quote_analyzer` it would not be possible to do exact matches for phrase queries as the stop words from phrase queries would be +are not removed from the query. The `my_analyzer` analyzer will then return the following tokens [`the`, `quick`, `brown`, `fox`] which will match one +of the documents. Meanwhile term queries will be analyzed with the `my_stop_analyzer` analyzer which will filter out stop words. So a search for either +`The quick brown fox` or `A quick brown fox` will return both documents since both documents contain the following tokens [`quick`, `brown`, `fox`]. +Without the `search_quote_analyzer` it would not be possible to do exact matches for phrase queries as the stop words from phrase queries would be removed resulting in both documents matching. diff --git a/docs/reference/mapping/params/boost.asciidoc b/docs/reference/mapping/params/boost.asciidoc index add6f806844..a3379d581c7 100644 --- a/docs/reference/mapping/params/boost.asciidoc +++ b/docs/reference/mapping/params/boost.asciidoc @@ -34,24 +34,31 @@ You can achieve the same effect by using the boost parameter directly in the que [source,js] -------------------------------------------------- +POST _search { - "match" : { - "title": { - "query": "quick brown fox" + "query": { + "match" : { + "title": { + "query": "quick brown fox" + } } } } -------------------------------------------------- +// AUTOSENSE is equivalent to: [source,js] -------------------------------------------------- +POST _search { - "match" : { - "title": { - "query": "quick brown fox", - "boost": 2 + "query": { + "match" : { + "title": { + "query": "quick brown fox", + "boost": 2 + } } } } diff --git a/docs/reference/mapping/params/coerce.asciidoc b/docs/reference/mapping/params/coerce.asciidoc index 0121c307230..9d55a8b80c8 100644 --- a/docs/reference/mapping/params/coerce.asciidoc +++ b/docs/reference/mapping/params/coerce.asciidoc @@ -45,6 +45,7 @@ PUT my_index/my_type/2 } -------------------------------------------------- // AUTOSENSE +// TEST[catch:request] <1> The `number_one` field will contain the integer `10`. <2> This document will be rejected because coercion is disabled. @@ -69,11 +70,11 @@ PUT my_index "my_type": { "properties": { "number_one": { - "type": "integer" - }, - "number_two": { "type": "integer", "coerce": true + }, + "number_two": { + "type": "integer" } } } @@ -87,6 +88,6 @@ PUT my_index/my_type/2 { "number_two": "10" } <2> -------------------------------------------------- // AUTOSENSE -<1> This document will be rejected because the `number_one` field inherits the index-level coercion setting. -<2> The `number_two` field overrides the index level setting to enable coercion. - +// TEST[catch:request] +<1> The `number_one` field overrides the index level setting to enable coercion. +<2> This document will be rejected because the `number_two` field inherits the index-level coercion setting. diff --git a/docs/reference/mapping/params/copy-to.asciidoc b/docs/reference/mapping/params/copy-to.asciidoc index 863bf1996cd..a6288bcbc48 100644 --- a/docs/reference/mapping/params/copy-to.asciidoc +++ b/docs/reference/mapping/params/copy-to.asciidoc @@ -9,7 +9,7 @@ the `full_name` field as follows: [source,js] -------------------------------------------------- -PUT /my_index +PUT my_index { "mappings": { "my_type": { @@ -30,13 +30,13 @@ PUT /my_index } } -PUT /my_index/my_type/1 +PUT my_index/my_type/1 { "first_name": "John", "last_name": "Smith" } -GET /my_index/_search +GET my_index/_search { "query": { "match": { diff --git a/docs/reference/mapping/params/dynamic.asciidoc b/docs/reference/mapping/params/dynamic.asciidoc index 72bbd369d7f..16bb89fbf6b 100644 --- a/docs/reference/mapping/params/dynamic.asciidoc +++ b/docs/reference/mapping/params/dynamic.asciidoc @@ -7,9 +7,7 @@ containing the new field. For instance: [source,js] -------------------------------------------------- -DELETE my_index <1> - -PUT my_index/my_type/1 <2> +PUT my_index/my_type/1 <1> { "username": "johnsmith", "name": { @@ -18,9 +16,9 @@ PUT my_index/my_type/1 <2> } } -GET my_index/_mapping <3> +GET my_index/_mapping <2> -PUT my_index/my_type/2 <4> +PUT my_index/my_type/2 <3> { "username": "marywhite", "email": "mary@white.com", @@ -31,16 +29,15 @@ PUT my_index/my_type/2 <4> } } -GET my_index/_mapping <5> +GET my_index/_mapping <4> -------------------------------------------------- // AUTOSENSE -<1> First delete the index, in case it already exists. -<2> This document introduces the string field `username`, the object field +<1> This document introduces the string field `username`, the object field `name`, and two string fields under the `name` object which can be referred to as `name.first` and `name.last`. -<3> Check the mapping to verify the above. -<4> This document adds two string fields: `email` and `name.middle`. -<5> Check the mapping to verify the changes. +<2> Check the mapping to verify the above. +<3> This document adds two string fields: `email` and `name.middle`. +<4> Check the mapping to verify the changes. The details of how new fields are detected and added to the mapping is explained in <>. @@ -88,5 +85,3 @@ PUT my_index TIP: The `dynamic` setting is allowed to have different settings for fields of the same name in the same index. Its value can be updated on existing fields using the <>. - - diff --git a/docs/reference/mapping/params/fielddata.asciidoc b/docs/reference/mapping/params/fielddata.asciidoc index e67b47a831f..e20a82bfd5c 100644 --- a/docs/reference/mapping/params/fielddata.asciidoc +++ b/docs/reference/mapping/params/fielddata.asciidoc @@ -100,14 +100,11 @@ PUT my_index "properties": { "tag": { "type": "text", - "fielddata": { - "filter": { - "frequency": { - "min": 0.001, - "max": 0.1, - "min_segment_size": 500 - } - } + "fielddata": true, + "fielddata_frequency_filter": { + "min": 0.001, + "max": 0.1, + "min_segment_size": 500 } } } diff --git a/docs/reference/mapping/params/ignore-malformed.asciidoc b/docs/reference/mapping/params/ignore-malformed.asciidoc index b5e5ee99b2c..00b57cb32e9 100644 --- a/docs/reference/mapping/params/ignore-malformed.asciidoc +++ b/docs/reference/mapping/params/ignore-malformed.asciidoc @@ -20,11 +20,11 @@ PUT my_index "my_type": { "properties": { "number_one": { - "type": "integer" - }, - "number_two": { "type": "integer", "ignore_malformed": true + }, + "number_two": { + "type": "integer" } } } @@ -44,8 +44,9 @@ PUT my_index/my_type/2 } -------------------------------------------------- // AUTOSENSE -<1> This document will be rejected because `number_one` does not allow malformed values. -<2> This document will have the `text` field indexed, but not the `number_two` field. +// TEST[catch:request] +<1> This document will have the `text` field indexed, but not the `number_one` field. +<2> This document will be rejected because `number_two` does not allow malformed values. TIP: The `ignore_malformed` setting is allowed to have different settings for fields of the same name in the same index. Its value can be updated on @@ -84,4 +85,3 @@ PUT my_index <1> The `number_one` field inherits the index-level setting. <2> The `number_two` field overrides the index-level setting to turn off `ignore_malformed`. - diff --git a/docs/reference/mapping/params/include-in-all.asciidoc b/docs/reference/mapping/params/include-in-all.asciidoc index 18c454ae70c..bd2b5b8a179 100644 --- a/docs/reference/mapping/params/include-in-all.asciidoc +++ b/docs/reference/mapping/params/include-in-all.asciidoc @@ -15,7 +15,7 @@ PUT my_index "properties": { "title": { <1> "type": "text" - } + }, "content": { <1> "type": "text" }, diff --git a/docs/reference/mapping/params/multi-fields.asciidoc b/docs/reference/mapping/params/multi-fields.asciidoc index 8ca2809c7f2..bee9dae1763 100644 --- a/docs/reference/mapping/params/multi-fields.asciidoc +++ b/docs/reference/mapping/params/multi-fields.asciidoc @@ -8,7 +8,7 @@ search, and as a `keyword` field for sorting or aggregations: [source,js] -------------------------------------------------- -PUT /my_index +PUT my_index { "mappings": { "my_type": { @@ -26,17 +26,17 @@ PUT /my_index } } -PUT /my_index/my_type/1 +PUT my_index/my_type/1 { "city": "New York" } -PUT /my_index/my_type/2 +PUT my_index/my_type/2 { "city": "York" } -GET /my_index/_search +GET my_index/_search { "query": { "match": { @@ -132,4 +132,3 @@ stemmed field allows a query for `foxes` to also match the document containing just `fox`. This allows us to match as many documents as possible. By also querying the unstemmed `text` field, we improve the relevance score of the document which matches `foxes` exactly. - diff --git a/docs/reference/mapping/params/norms.asciidoc b/docs/reference/mapping/params/norms.asciidoc index f6e42219a1f..9f78e0d62d8 100644 --- a/docs/reference/mapping/params/norms.asciidoc +++ b/docs/reference/mapping/params/norms.asciidoc @@ -31,11 +31,10 @@ PUT my_index/_mapping/my_type } ------------ // AUTOSENSE +// TEST[s/^/PUT my_index\n/] NOTE: Norms will not be removed instantly, but will be removed as old segments are merged into new segments as you continue indexing new documents. Any score computation on a field that has had norms removed might return inconsistent results since some documents won't have norms anymore while other documents might still have norms. - - diff --git a/docs/reference/mapping/params/position-increment-gap.asciidoc b/docs/reference/mapping/params/position-increment-gap.asciidoc index d2cf1360080..871b6275a1c 100644 --- a/docs/reference/mapping/params/position-increment-gap.asciidoc +++ b/docs/reference/mapping/params/position-increment-gap.asciidoc @@ -13,12 +13,12 @@ For example: [source,js] -------------------------------------------------- -PUT /my_index/groups/1 +PUT my_index/groups/1 { "names": [ "John Abraham", "Lincoln Smith"] } -GET /my_index/groups/_search +GET my_index/groups/_search { "query": { "match_phrase": { @@ -29,7 +29,7 @@ GET /my_index/groups/_search } } -GET /my_index/groups/_search +GET my_index/groups/_search { "query": { "match_phrase": { @@ -65,12 +65,12 @@ PUT my_index } } -PUT /my_index/groups/1 +PUT my_index/groups/1 { "names": [ "John Abraham", "Lincoln Smith"] } -GET /my_index/groups/_search +GET my_index/groups/_search { "query": { "match_phrase": { diff --git a/docs/reference/mapping/params/properties.asciidoc b/docs/reference/mapping/params/properties.asciidoc index a4f5277649a..d9e3a2db75d 100644 --- a/docs/reference/mapping/params/properties.asciidoc +++ b/docs/reference/mapping/params/properties.asciidoc @@ -99,7 +99,6 @@ GET my_index/_search } -------------------------------------------------- // AUTOSENSE +// TEST[continued] IMPORTANT: The full path to the inner field must be specified. - - diff --git a/docs/reference/mapping/params/search-analyzer.asciidoc b/docs/reference/mapping/params/search-analyzer.asciidoc index 5a732ee3aac..3e51aad14af 100644 --- a/docs/reference/mapping/params/search-analyzer.asciidoc +++ b/docs/reference/mapping/params/search-analyzer.asciidoc @@ -14,7 +14,7 @@ this can be overridden with the `search_analyzer` setting: [source,js] -------------------------------------------------- -PUT /my_index +PUT my_index { "settings": { "analysis": { @@ -81,5 +81,3 @@ type] for a full explanation of this example. TIP: The `search_analyzer` setting must have the same setting for fields of the same name in the same index. Its value can be updated on existing fields using the <>. - - diff --git a/docs/reference/mapping/params/store.asciidoc b/docs/reference/mapping/params/store.asciidoc index 9f10b25724c..9c30217c146 100644 --- a/docs/reference/mapping/params/store.asciidoc +++ b/docs/reference/mapping/params/store.asciidoc @@ -18,7 +18,7 @@ to extract those fields from a large `_source` field: [source,js] -------------------------------------------------- -PUT /my_index +PUT my_index { "mappings": { "my_type": { @@ -39,7 +39,7 @@ PUT /my_index } } -PUT /my_index/my_type/1 +PUT my_index/my_type/1 { "title": "Some short title", "date": "2015-01-01", @@ -70,4 +70,3 @@ field instead. Another situation where it can make sense to make a field stored is for those that don't appear in the `_source` field (such as <>). - diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index 85e86e6a6a1..3756717b0d1 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -40,7 +40,7 @@ PUT my_index/my_type/1 { "date": "2015-01-01" } <2> PUT my_index/my_type/2 -{ "date": "2015-01-01T12:10:30Z" } <3> +{ "date": "2015-01-01T12:10:30Z" } <3> PUT my_index/my_type/3 { "date": 1420070400001 } <4> @@ -134,4 +134,3 @@ The following parameters are accepted by `date` fields: Whether the field value should be stored and retrievable separately from the <> field. Accepts `true` or `false` (default). - diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index ed0bb47e9d4..90b54fafb04 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -62,6 +62,7 @@ GET my_index/_search } -------------------------------------------------- // AUTOSENSE +// TEST[continued] ==== Using `nested` fields for arrays of objects @@ -137,7 +138,7 @@ GET my_index/_search "user.first": {} } } - + } } } } diff --git a/docs/reference/migration/migrate_5_0/mapping.asciidoc b/docs/reference/migration/migrate_5_0/mapping.asciidoc index 4d1e2653356..2da16a62309 100644 --- a/docs/reference/migration/migrate_5_0/mapping.asciidoc +++ b/docs/reference/migration/migrate_5_0/mapping.asciidoc @@ -53,7 +53,7 @@ you could map it both as a number and a `keyword` using <>: [source,js] -------------------------------------------------- -PUT /my_index +PUT my_index { "mappings": { "my_type": { @@ -149,4 +149,3 @@ Per-field boosts on the `_all` are now compressed into a single byte instead of the 4 bytes used previously. While this will make the index much more space-efficient, it also means that index time boosts will be less accurately encoded. - diff --git a/docs/reference/modules/cluster/allocation_filtering.asciidoc b/docs/reference/modules/cluster/allocation_filtering.asciidoc index 051c88ac9cd..fbdeb64909f 100644 --- a/docs/reference/modules/cluster/allocation_filtering.asciidoc +++ b/docs/reference/modules/cluster/allocation_filtering.asciidoc @@ -14,7 +14,7 @@ For instance, we could decommission a node using its IP address as follows: [source,js] -------------------------------------------------- -PUT /_cluster/settings +PUT _cluster/settings { "transient" : { "cluster.routing.allocation.exclude._ip" : "10.0.0.1" @@ -67,4 +67,4 @@ PUT _cluster/settings } ------------------------ // AUTOSENSE - +// TEST[skip:indexes don't assign] diff --git a/docs/reference/modules/cluster/disk_allocator.asciidoc b/docs/reference/modules/cluster/disk_allocator.asciidoc index 9baf8a379fb..fdd8fc5036b 100644 --- a/docs/reference/modules/cluster/disk_allocator.asciidoc +++ b/docs/reference/modules/cluster/disk_allocator.asciidoc @@ -56,7 +56,7 @@ the cluster every minute: [source,js] -------------------------------------------------- -PUT /_cluster/settings +PUT _cluster/settings { "transient": { "cluster.routing.allocation.disk.watermark.low": "80%", @@ -73,4 +73,3 @@ data paths, one with 50b out of 100b free (50% used) and another with 40b out of 50b free (80% used) it would see the node's disk usage as 90b out of 150b). In 2.0.0, the minimum and maximum disk usages are tracked separately. - diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 5d7e5a10482..2cb59c6de3a 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -158,6 +158,7 @@ PUT _cluster/settings } ---------------------------- // AUTOSENSE +// TEST[catch:/cannot set discovery.zen.minimum_master_nodes to more than the current master nodes/] TIP: An advantage of splitting the master and data roles between dedicated nodes is that you can have just three master-eligible nodes and set @@ -299,4 +300,3 @@ same data directory. This can lead to unexpected data loss. More node settings can be found in <>. Of particular note are the <>, the <> and the <>. - diff --git a/docs/reference/modules/scripting/painless.asciidoc b/docs/reference/modules/scripting/painless.asciidoc index b96e1f099de..98cb052d7a3 100644 --- a/docs/reference/modules/scripting/painless.asciidoc +++ b/docs/reference/modules/scripting/painless.asciidoc @@ -40,11 +40,9 @@ to `painless`. To illustrate how Painless works, let's load some hockey stats into an Elasticsearch index: -[source,sh] +[source,js] ---------------------------------------------------------------- -DELETE /hockey-stats - -PUT /hockey-stats/player/_bulk +PUT hockey/player/_bulk?refresh {"index":{"_id":1}} {"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} {"index":{"_id":2}} @@ -69,6 +67,7 @@ PUT /hockey-stats/player/_bulk {"first":"joe","last":"colborne","goals":[3,18,13],"assists":[6,20,24],"gp":[26,67,82]} ---------------------------------------------------------------- // AUTOSENSE +// TESTSETUP [float] === Accessing Doc Values from Painless @@ -77,9 +76,9 @@ All Painless scripts take in a `Map` of values called `input`. Docu For example, the following script calculates a player's total goals. This example uses a strongly typed `int` and a `for` loop. -[source,sh] +[source,js] ---------------------------------------------------------------- -GET /hockey-stats/_search +GET hockey/_search { "query": { "function_score": { @@ -97,9 +96,9 @@ GET /hockey-stats/_search Alternatively, you could do the same thing using a script field instead of a function score: -[source,sh] +[source,js] ---------------------------------------------------------------- -GET /hockey-stats/_search +GET hockey/_search { "query": { "match_all": {} @@ -120,16 +119,16 @@ You must always specify the index of the field value you want, even if there's o All fields in Elasticsearch are multi-valued and Painless does not provide a `.value` shortcut. The following example uses a Painless script to sort the players by their combined first and last names. The names are accessed using `input.doc['first'].0` and `input.doc['last'].0`. -[source,sh] +[source,js] ---------------------------------------------------------------- -GET /hockey-stats/_search +GET hockey/_search { "query": { "match_all": {} }, "sort": { "_script": { - "type": "keyword", + "type": "string", "order": "asc", "script": { "lang": "painless", @@ -148,9 +147,9 @@ You can also easily update fields. You access the original source for a field as First, let's look at the source data for a player by submitting the following request: -[source,sh] +[source,js] ---------------------------------------------------------------- -GET /hockey-stats/_search +GET hockey/_search { "fields": [ "_id", @@ -167,9 +166,9 @@ GET /hockey-stats/_search To change player 1's last name to `hockey`, simply set `input.ctx._source.last` to the new value: -[source,sh] +[source,js] ---------------------------------------------------------------- -POST /hockey-stats/player/1/_update +POST hockey/player/1/_update { "script": { "lang": "painless", @@ -185,9 +184,9 @@ POST /hockey-stats/player/1/_update You can also add fields to a document. For example, this script adds a new field that contains the player's nickname, _hockey_. -[source,sh] +[source,js] ---------------------------------------------------------------- -POST /hockey-stats/player/1/_update +POST hockey/player/1/_update { "script": { "lang": "painless", @@ -208,9 +207,9 @@ If you explicitly specify types, the compiler doesn't have to perform type looku improve performance. For example, the following script performs the same first name, last name sort we showed before, but it's fully type-safe. -[source,sh] +[source,js] ---------------------------------------------------------------- -GET /hockey-stats/_search +GET hockey/_search { "query": { "match_all": {} diff --git a/docs/reference/modules/scripting/using.asciidoc b/docs/reference/modules/scripting/using.asciidoc index 71eba9386e3..81636f6189e 100644 --- a/docs/reference/modules/scripting/using.asciidoc +++ b/docs/reference/modules/scripting/using.asciidoc @@ -186,35 +186,27 @@ state: [source,js] ----------------------------------- -POST /_scripts/groovy/calculate-score +POST _scripts/groovy/calculate-score { "script": "log(_score * 2) + my_modifier" } ----------------------------------- // AUTOSENSE - This same script can be retrieved with: [source,js] ----------------------------------- -GET /_scripts/groovy/calculate-score ------------------------------------ -// AUTOSENSE - -or deleted with: - -[source,js] ------------------------------------ -DELETE /_scripts/groovy/calculate-score +GET _scripts/groovy/calculate-score ----------------------------------- // AUTOSENSE +// TEST[continued] Stored scripts can be used by specifying the `lang` and `id` parameters as follows: [source,js] -------------------------------------------------- -GET my_index/_search +GET _search { "query": { "script": { @@ -229,10 +221,19 @@ GET my_index/_search } } -------------------------------------------------- +// AUTOSENSE +// TEST[continued] +And deleted with: + +[source,js] +----------------------------------- +DELETE _scripts/groovy/calculate-score +----------------------------------- +// AUTOSENSE +// TEST[continued] NOTE: The size of stored scripts is limited to 65,535 bytes. This can be changed by setting `script.max_size_in_bytes` setting to increase that soft limit, but if scripts are really large then alternatives like <> scripts should be considered instead. - diff --git a/docs/reference/query-dsl/bool-query.asciidoc b/docs/reference/query-dsl/bool-query.asciidoc index d3895826c68..ab39eb49611 100644 --- a/docs/reference/query-dsl/bool-query.asciidoc +++ b/docs/reference/query-dsl/bool-query.asciidoc @@ -39,47 +39,47 @@ on the fraction of all query terms that a document contains. See Lucene `BooleanQuery` for more details. The `bool` query takes a _more-matches-is-better_ approach, so the score from -each matching `must` or `should` clause will be added together to provide the +each matching `must` or `should` clause will be added together to provide the final `_score` for each document. [source,js] -------------------------------------------------- +POST _search { + "query": { "bool" : { - "must" : { - "term" : { "user" : "kimchy" } - }, - "filter": { - "term" : { "tag" : "tech" } - }, - "must_not" : { - "range" : { - "age" : { "from" : 10, "to" : 20 } - } - }, - "should" : [ - { - "term" : { "tag" : "wow" } - }, - { - "term" : { "tag" : "elasticsearch" } - } - ], - "minimum_should_match" : 1, - "boost" : 1.0 + "must" : { + "term" : { "user" : "kimchy" } + }, + "filter": { + "term" : { "tag" : "tech" } + }, + "must_not" : { + "range" : { + "age" : { "from" : 10, "to" : 20 } + } + }, + "should" : [ + { "term" : { "tag" : "wow" } }, + { "term" : { "tag" : "elasticsearch" } } + ], + "minimum_should_match" : 1, + "boost" : 1.0 } + } } -------------------------------------------------- +// AUTOSENSE -==== Scoring with `bool.filter` +==== Scoring with `bool.filter` Queries specified under the `filter` element have no effect on scoring -- scores are returned as `0`. Scores are only affected by the query that has -been specified. For instance, all three of the following queries return -all documents where the `status` field contains the term `active`. +been specified. For instance, all three of the following queries return +all documents where the `status` field contains the term `active`. This first query assigns a score of `0` to all documents, as no scoring -query has been specified: +query has been specified: [source,json] --------------------------------- @@ -98,7 +98,7 @@ GET _search --------------------------------- // AUTOSENSE -This `bool` query has a `match_all` query, which assigns a score of `1.0` to +This `bool` query has a `match_all` query, which assigns a score of `1.0` to all documents. [source,json] @@ -107,7 +107,7 @@ GET _search { "query": { "bool": { - "query": { + "must": { "match_all": {} }, "filter": { @@ -121,9 +121,9 @@ GET _search --------------------------------- // AUTOSENSE -This `constant_score` query behaves in exactly the same way as the second example above. +This `constant_score` query behaves in exactly the same way as the second example above. The `constant_score` query assigns a score of `1.0` to all documents matched -by the filter. +by the filter. [source,json] --------------------------------- diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 402eeb44550..3f9943d90d0 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -459,7 +459,7 @@ the request would look like this: [source,js] -------------------------------------------------- -GET /hotels/_search/ +GET _search { "query": { "function_score": { @@ -546,4 +546,3 @@ Only numeric, date, and geo-point fields are supported. If the numeric field is missing in the document, the function will return 1. - diff --git a/docs/reference/query-dsl/term-query.asciidoc b/docs/reference/query-dsl/term-query.asciidoc index 801abf65af8..b27885ac1ed 100644 --- a/docs/reference/query-dsl/term-query.asciidoc +++ b/docs/reference/query-dsl/term-query.asciidoc @@ -6,10 +6,14 @@ in the inverted index. For instance: [source,js] -------------------------------------------------- +POST _search { + "query": { "term" : { "user" : "Kimchy" } <1> + } } -------------------------------------------------- +// AUTOSENSE <1> Finds documents which contain the exact term `Kimchy` in the inverted index of the `user` field. @@ -18,7 +22,7 @@ relevance score than another query, for instance: [source,js] -------------------------------------------------- -GET /_search +GET _search { "query": { "bool": { @@ -41,6 +45,7 @@ GET /_search } } -------------------------------------------------- +// AUTOSENSE <1> The `urgent` query clause has a boost of `2.0`, meaning it is twice as important as the query clause for `normal`. @@ -113,7 +118,6 @@ Now, compare the results for the `term` query and the `match` query: [source,js] -------------------------------------------------- - GET my_index/my_type/_search { "query": { @@ -151,6 +155,7 @@ GET my_index/my_type/_search } -------------------------------------------------- // AUTOSENSE +// TEST[continued] <1> This query matches because the `exact_value` field contains the exact term `Quick Foxes!`. @@ -161,5 +166,3 @@ GET my_index/my_type/_search <4> This `match` query on the `full_text` field first analyzes the query string, then looks for documents containing `quick` or `foxes` or both. ************************************************** - - diff --git a/docs/reference/setup/cluster_restart.asciidoc b/docs/reference/setup/cluster_restart.asciidoc index 5d93633972b..1dbb6e7616d 100644 --- a/docs/reference/setup/cluster_restart.asciidoc +++ b/docs/reference/setup/cluster_restart.asciidoc @@ -17,7 +17,7 @@ before shutting down a node: [source,js] -------------------------------------------------- -PUT /_cluster/settings +PUT _cluster/settings { "persistent": { "cluster.routing.allocation.enable": "none" @@ -25,6 +25,7 @@ PUT /_cluster/settings } -------------------------------------------------- // AUTOSENSE +// TEST[skip:indexes don't assign] ==== Step 2: Perform a synced flush @@ -33,7 +34,7 @@ Shard recovery will be much faster if you stop indexing and issue a [source,sh] -------------------------------------------------- -POST /_flush/synced +POST _flush/synced -------------------------------------------------- // AUTOSENSE @@ -96,7 +97,7 @@ reenable shard allocation: [source,js] ------------------------------------------------------ -PUT /_cluster/settings +PUT _cluster/settings { "persistent": { "cluster.routing.allocation.enable": "all" @@ -123,4 +124,3 @@ GET _cat/recovery Once the `status` column in the `_cat/health` output has reached `green`, all primary and replica shards have been successfully allocated. - diff --git a/docs/reference/setup/rolling_upgrade.asciidoc b/docs/reference/setup/rolling_upgrade.asciidoc index 07c3060adf6..8c098a1ac04 100644 --- a/docs/reference/setup/rolling_upgrade.asciidoc +++ b/docs/reference/setup/rolling_upgrade.asciidoc @@ -21,7 +21,7 @@ allocation before shutting down a node: [source,js] -------------------------------------------------- -PUT /_cluster/settings +PUT _cluster/settings { "transient": { "cluster.routing.allocation.enable": "none" @@ -29,6 +29,7 @@ PUT /_cluster/settings } -------------------------------------------------- // AUTOSENSE +// TEST[skip:indexes don't assign] ==== Step 2: Stop non-essential indexing and perform a synced flush (Optional) @@ -38,7 +39,7 @@ will be much faster if you temporarily stop non-essential indexing and issue a [source,js] -------------------------------------------------- -POST /_flush/synced +POST _flush/synced -------------------------------------------------- // AUTOSENSE @@ -111,7 +112,7 @@ the node: [source,js] -------------------------------------------------- -PUT /_cluster/settings +PUT _cluster/settings { "transient": { "cluster.routing.allocation.enable": "all" @@ -172,4 +173,3 @@ recovery has completed. When the cluster is stable and the node has recovered, repeat the above steps for all remaining nodes. - diff --git a/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java b/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java new file mode 100644 index 00000000000..452bea4a647 --- /dev/null +++ b/docs/src/test/java/org/elasticsearch/smoketest/SmokeTestDocsIT.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.smoketest; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; +import java.util.List; + +public class SmokeTestDocsIT extends ESRestTestCase { + + public SmokeTestDocsIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } + + @Override + protected void afterIfFailed(List errors) { + super.afterIfFailed(errors); + String name = getTestName().split("=")[1]; + name = name.substring(0, name.length() - 1); + name = name.replaceAll("/([^/]+)$", ".asciidoc:$1"); + logger.error("This failing test was generated by documentation starting at {}. It may include many snippets. " + + "See docs/README.asciidoc for an explanation of test generation.", name); + } +} + diff --git a/settings.gradle b/settings.gradle index d03cac653ee..1fc74a0ef2e 100644 --- a/settings.gradle +++ b/settings.gradle @@ -3,6 +3,7 @@ rootProject.name = 'elasticsearch' List projects = [ 'rest-api-spec', 'core', + 'docs', 'distribution:integ-test-zip', 'distribution:zip', 'distribution:tar', diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index 83860b18bd9..4a7f83953de 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -78,7 +78,7 @@ public class RestTestExecutionContext implements Closeable { try { response = callApiInternal(apiName, requestParams, body, headers); //we always stash the last response body - stash.stashValue("body", response.getBody()); + stash.stashResponse(response); return response; } catch(RestException e) { response = e.restResponse(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java index 855cbb4d974..9eae902c3e3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java @@ -19,16 +19,17 @@ package org.elasticsearch.test.rest; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import org.elasticsearch.test.rest.client.RestResponse; /** * Allows to cache the last obtained test response and or part of it within variables @@ -41,6 +42,7 @@ public class Stash implements ToXContent { public static final Stash EMPTY = new Stash(); private final Map stash = new HashMap<>(); + private RestResponse response; /** * Allows to saved a specific field in the stash as key-value pair @@ -53,6 +55,12 @@ public class Stash implements ToXContent { } } + public void stashResponse(RestResponse response) throws IOException { + // TODO we can almost certainly save time by lazily evaluating the body + stashValue("body", response.getBody()); + this.response = response; + } + /** * Clears the previously stashed values */ @@ -78,7 +86,10 @@ public class Stash implements ToXContent { * The stash contains fields eventually extracted from previous responses that can be reused * as arguments for following requests (e.g. scroll_id) */ - public Object unstashValue(String value) { + public Object unstashValue(String value) throws IOException { + if (value.startsWith("$body.")) { + return response.evaluate(value.substring("$body".length()), this); + } Object stashedValue = stash.get(value.substring(1)); if (stashedValue == null) { throw new IllegalArgumentException("stashed value not found for key [" + value + "]"); @@ -89,14 +100,14 @@ public class Stash implements ToXContent { /** * Recursively unstashes map values if needed */ - public Map unstashMap(Map map) { + public Map unstashMap(Map map) throws IOException { Map copy = new HashMap<>(map); unstashObject(copy); return copy; } @SuppressWarnings("unchecked") - private void unstashObject(Object obj) { + private void unstashObject(Object obj) throws IOException { if (obj instanceof List) { List list = (List) obj; for (int i = 0; i < list.size(); i++) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index e798fd8c8ab..5fb6e199b17 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -19,6 +19,7 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; + import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.socket.ConnectionSocketFactory; @@ -61,6 +62,8 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; +import static java.util.Objects.requireNonNull; + /** * REST client used to test the elasticsearch REST layer * Holds the {@link RestSpec} used to translate api calls into REST calls @@ -186,6 +189,19 @@ public class RestClient implements Closeable { } private HttpRequestBuilder callApiBuilder(String apiName, Map params, String body) { + if ("raw".equals(apiName)) { + // Raw requests are bit simpler.... + HttpRequestBuilder httpRequestBuilder = httpRequestBuilder(); + httpRequestBuilder.method(requireNonNull(params.remove("method"), "Method must be set to use raw request")); + httpRequestBuilder.path("/"+ requireNonNull(params.remove("path"), "Path must be set to use raw request")); + httpRequestBuilder.body(body); + + // And everything else is a url parameter! + for (Map.Entry entry : params.entrySet()) { + httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); + } + return httpRequestBuilder; + } //create doesn't exist in the spec but is supported in the clients (index with op_type=create) boolean indexCreateApi = "create".equals(apiName); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java index c3dbd583430..b338d76d985 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java @@ -49,14 +49,14 @@ public class JsonPath { /** * Returns the object corresponding to the provided path if present, null otherwise */ - public Object evaluate(String path) { + public Object evaluate(String path) throws IOException { return evaluate(path, Stash.EMPTY); } /** * Returns the object corresponding to the provided path if present, null otherwise */ - public Object evaluate(String path, Stash stash) { + public Object evaluate(String path, Stash stash) throws IOException { String[] parts = parsePath(path); Object object = jsonMap; for (String part : parts) { @@ -69,7 +69,7 @@ public class JsonPath { } @SuppressWarnings("unchecked") - private Object evaluate(String key, Object object, Stash stash) { + private Object evaluate(String key, Object object, Stash stash) throws IOException { if (stash.isStashedValue(key)) { key = stash.unstashValue(key).toString(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java index 07636151119..e1d2011e231 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSectionParser.java @@ -33,19 +33,23 @@ public class RestTestSectionParser implements RestTestFragmentParser PARSER = new RestTestFragmentParser() { + @Override + public ResponseBodyAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + try (XContentParser parser = JsonXContent.jsonXContent.createParser(parseContext.parseField())) { + return new ResponseBodyAssertion("$body", parser.map()); + } + } + }; + + private ResponseBodyAssertion(String field, Map expectedValue) { + super(field, expectedValue); + } + + @Override + protected void doAssert(Object actualValue, Object expectedValue) { + if (false == expectedValue.equals(actualValue)) { + @SuppressWarnings("unchecked") + Map actual = (Map) actualValue; + @SuppressWarnings("unchecked") + Map expected = (Map) expectedValue; + FailureMessage message = new FailureMessage(); + message.compareMaps(actual, expected); + throw new AssertionError(message.message); + } + } + + private class FailureMessage { + private final StringBuilder message = new StringBuilder("body didn't match the expected value:\n"); + private int indent = 0; + + private void compareMaps(Map actual, Map expected) { + actual = new TreeMap<>(actual); + expected = new TreeMap<>(expected); + for (Map.Entry expectedEntry : expected.entrySet()) { + compare(expectedEntry.getKey(), expectedEntry.getValue(), actual.remove(expectedEntry.getKey())); + } + for (Map.Entry unmatchedEntry : actual.entrySet()) { + field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]"); + } + } + + private void compareLists(List actual, List expected) { + int i = 0; + while (i < actual.size() && i < expected.size()) { + compare(i, actual.get(i), expected.get(i)); + } + if (actual.size() == expected.size()) { + return; + } + indent(); + if (actual.size() < expected.size()) { + message.append("expected [").append(expected.size() - i).append("] more entries\n"); + return; + } + message.append("received [").append(actual.size() - i).append("] more entries than expected\n"); + } + + private void compare(Object field, Object expected, @Nullable Object actual) { + if (expected instanceof Map) { + if (actual == null) { + field(field, "expected map but not found"); + return; + } + if (false == actual instanceof Map) { + field(field, "expected map but found [" + actual + "]"); + return; + } + @SuppressWarnings("unchecked") + Map expectedMap = (Map) expected; + @SuppressWarnings("unchecked") + Map actualMap = (Map) actual; + if (expectedMap.isEmpty() && actualMap.isEmpty()) { + field(field, "same [empty map]"); + } + field(field, null); + indent += 1; + compareMaps(expectedMap, actualMap); + indent -= 1; + return; + } + if (expected instanceof List) { + if (actual == null) { + field(field, "expected list but not found"); + return; + } + if (false == actual instanceof List) { + field(field, "expected list but found [" + actual + "]"); + return; + } + @SuppressWarnings("unchecked") + List expectedList = (List) expected; + @SuppressWarnings("unchecked") + List actualList = (List) actual; + if (expectedList.isEmpty() && actualList.isEmpty()) { + field(field, "same [empty list]"); + return; + } + field(field, null); + indent += 1; + compareLists(expectedList, actualList); + indent -= 1; + return; + } + if (actual == null) { + field(field, "expected [" + expected + "] but not found"); + return; + } + if (expected.equals(actual)) { + field(field, "same [" + expected + "]"); + return; + } + field(field, "expected [" + expected + "] but was [" + actual + "]"); + } + + private void indent() { + for (int i = 0; i < indent; i++) { + message.append(" "); + } + } + + private void field(Object name, String info) { + indent(); + message.append(String.format(Locale.ROOT, "%30s: ", name)); + if (info != null) { + message.append(info); + } + message.append('\n'); + } + } +} From 3912761572f42765365851541cb1f9a9c3eabd55 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 5 May 2016 15:59:21 -0400 Subject: [PATCH 0085/1311] [docs] Add wait_until_yellow to fix build failure The snippet in the docs creates and index and uses it with the _analyze api. The trouble is that if the index hasn't been created fully the _analyze API will fail. This adds a GET _cluster/health?wait_for_status=yellow which fixes the issue. While this does make the docs more cluttered, it also makes the snippets actually runnable. Closes #18165 --- docs/reference/analysis/analyzers/pattern-analyzer.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index b012d3a9673..2aaa0d6bb16 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -103,6 +103,8 @@ PUT test?pretty=1 } } +GET _cluster/health?wait_for_status=yellow + GET test/_analyze?analyzer=camel&text=MooseX::FTPClass2_beta # "moose","x","ftp","class","2","beta" -------------------------------------------------- From c4090a184170875dfce4780a0a31060e969c162a Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Thu, 5 May 2016 16:43:20 -0400 Subject: [PATCH 0086/1311] Remove the Snapshot class in favor of using SnapshotInfo o/e/snapshots/Snapshot and o/e/snapshots/SnapshotInfo contain the same fields and represent the same information. Snapshot was used to maintain snapshot information to the snapshot repository, while SnapshotInfo was used to represent the snapshot information as presented through the REST layer. This removes the Snapshot class and combines all uses into the SnapshotInfo class. Closes #18167 --- .../resources/checkstyle_suppressions.xml | 1 - .../create/CreateSnapshotResponse.java | 6 +- .../snapshots/get/GetSnapshotsResponse.java | 4 +- .../get/TransportGetSnapshotsAction.java | 17 +- .../TransportSnapshotsStatusAction.java | 4 +- .../repositories/Repository.java | 8 +- .../blobstore/BlobStoreRepository.java | 22 +- .../snapshots/RestoreService.java | 6 +- .../org/elasticsearch/snapshots/Snapshot.java | 358 ----------------- .../elasticsearch/snapshots/SnapshotInfo.java | 376 ++++++++++++------ .../snapshots/SnapshotsService.java | 22 +- 11 files changed, 305 insertions(+), 519 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/snapshots/Snapshot.java diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 152405fcffc..0473f58cf73 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -778,7 +778,6 @@ - diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index 0be07c703f1..0a7a8a9ce80 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -57,13 +57,13 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - snapshotInfo = SnapshotInfo.readOptionalSnapshotInfo(in); + snapshotInfo = in.readOptionalWriteable(SnapshotInfo::new); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeOptionalStreamable(snapshotInfo); + out.writeOptionalWriteable(snapshotInfo); } /** @@ -90,7 +90,7 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (snapshotInfo != null) { builder.field(Fields.SNAPSHOT); - snapshotInfo.toXContent(builder, params); + snapshotInfo.toExternalXContent(builder, params); } else { builder.field(Fields.ACCEPTED, true); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index 65b0e4faa4a..a5db19684b2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -60,7 +60,7 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContent { int size = in.readVInt(); List builder = new ArrayList<>(); for (int i = 0; i < size; i++) { - builder.add(SnapshotInfo.readSnapshotInfo(in)); + builder.add(new SnapshotInfo(in)); } snapshots = Collections.unmodifiableList(builder); } @@ -82,7 +82,7 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContent { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startArray(Fields.SNAPSHOTS); for (SnapshotInfo snapshotInfo : snapshots) { - snapshotInfo.toXContent(builder, params); + snapshotInfo.toExternalXContent(builder, params); } builder.endArray(); return builder; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 0198102a200..833b1a62289 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -31,7 +31,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; @@ -77,18 +76,12 @@ public class TransportGetSnapshotsAction extends TransportMasterNodeAction snapshotInfoBuilder = new ArrayList<>(); if (isAllSnapshots(request.snapshots())) { - List snapshots = snapshotsService.snapshots(request.repository(), request.ignoreUnavailable()); - for (Snapshot snapshot : snapshots) { - snapshotInfoBuilder.add(new SnapshotInfo(snapshot)); - } + snapshotInfoBuilder.addAll(snapshotsService.snapshots(request.repository(), request.ignoreUnavailable())); } else if (isCurrentSnapshots(request.snapshots())) { - List snapshots = snapshotsService.currentSnapshots(request.repository()); - for (Snapshot snapshot : snapshots) { - snapshotInfoBuilder.add(new SnapshotInfo(snapshot)); - } + snapshotInfoBuilder.addAll(snapshotsService.currentSnapshots(request.repository())); } else { Set snapshotsToGet = new LinkedHashSet<>(); // to keep insertion order - List snapshots = null; + List snapshots = null; for (String snapshotOrPattern : request.snapshots()) { if (Regex.isSimpleMatchPattern(snapshotOrPattern) == false) { snapshotsToGet.add(snapshotOrPattern); @@ -96,7 +89,7 @@ public class TransportGetSnapshotsAction extends TransportMasterNodeAction shardStatusBuilder = new ArrayList<>(); if (snapshot.state().completed()) { Map shardStatues = snapshotsService.snapshotShards(snapshotId); diff --git a/core/src/main/java/org/elasticsearch/repositories/Repository.java b/core/src/main/java/org/elasticsearch/repositories/Repository.java index 294b36df491..f3c44f29226 100644 --- a/core/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/core/src/main/java/org/elasticsearch/repositories/Repository.java @@ -24,7 +24,7 @@ import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; -import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotShardFailure; import java.io.IOException; @@ -54,7 +54,7 @@ public interface Repository extends LifecycleComponent { * @param snapshotId snapshot ID * @return information about snapshot */ - Snapshot readSnapshot(SnapshotId snapshotId); + SnapshotInfo readSnapshot(SnapshotId snapshotId); /** * Returns global metadata associate with the snapshot. @@ -65,7 +65,7 @@ public interface Repository extends LifecycleComponent { * @param indices list of indices * @return information about snapshot */ - MetaData readSnapshotMetaData(SnapshotId snapshotId, Snapshot snapshot, List indices) throws IOException; + MetaData readSnapshotMetaData(SnapshotId snapshotId, SnapshotInfo snapshot, List indices) throws IOException; /** * Returns the list of snapshots currently stored in the repository @@ -94,7 +94,7 @@ public interface Repository extends LifecycleComponent { * @param shardFailures list of shard failures * @return snapshot description */ - Snapshot finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures); + SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures); /** * Deletes snapshot diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index ef0cab5c156..121df3e5832 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -57,9 +57,9 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.snapshots.InvalidSnapshotNameException; -import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotCreationException; import org.elasticsearch.snapshots.SnapshotException; +import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotShardFailure; @@ -165,9 +165,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent indexMetaDataLegacyFormat; - private ChecksumBlobStoreFormat snapshotFormat; + private ChecksumBlobStoreFormat snapshotFormat; - private LegacyBlobStoreFormat snapshotLegacyFormat; + private LegacyBlobStoreFormat snapshotLegacyFormat; private final boolean readOnly; @@ -202,8 +202,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT, IndexMetaData.PROTO, parseFieldMatcher, isCompress()); indexMetaDataLegacyFormat = new LegacyBlobStoreFormat<>(LEGACY_SNAPSHOT_NAME_FORMAT, IndexMetaData.PROTO, parseFieldMatcher); - snapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, Snapshot.PROTO, parseFieldMatcher, isCompress()); - snapshotLegacyFormat = new LegacyBlobStoreFormat<>(LEGACY_SNAPSHOT_NAME_FORMAT, Snapshot.PROTO, parseFieldMatcher); + snapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, SnapshotInfo.PROTO, parseFieldMatcher, isCompress()); + snapshotLegacyFormat = new LegacyBlobStoreFormat<>(LEGACY_SNAPSHOT_NAME_FORMAT, SnapshotInfo.PROTO, parseFieldMatcher); } /** @@ -294,7 +294,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent indices = Collections.emptyList(); - Snapshot snapshot = null; + SnapshotInfo snapshot = null; try { snapshot = readSnapshot(snapshotId); indices = snapshot.indices(); @@ -368,9 +368,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent indices, long startTime, String failure, int totalShards, List shardFailures) { + public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures) { try { - Snapshot blobStoreSnapshot = new Snapshot(snapshotId.getSnapshot(), indices, startTime, failure, System.currentTimeMillis(), totalShards, shardFailures); + SnapshotInfo blobStoreSnapshot = new SnapshotInfo(snapshotId.getSnapshot(), indices, startTime, failure, System.currentTimeMillis(), totalShards, shardFailures); snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, snapshotId.getSnapshot()); List snapshotIds = snapshots(); if (!snapshotIds.contains(snapshotId)) { @@ -425,7 +425,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent indices) throws IOException { + public MetaData readSnapshotMetaData(SnapshotId snapshotId, SnapshotInfo snapshot, List indices) throws IOException { return readSnapshotMetaData(snapshotId, snapshot.version(), indices, false); } @@ -433,7 +433,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent snapshotFormat(Version version) { + private BlobStoreFormat snapshotFormat(Version version) { if(legacyMetaData(version)) { return snapshotLegacyFormat; } else { diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 8419b7c2667..5338f927005 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -190,7 +190,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis // Read snapshot info and metadata from the repository Repository repository = repositoriesService.repository(request.repository()); final SnapshotId snapshotId = new SnapshotId(request.repository(), request.name()); - final Snapshot snapshot = repository.readSnapshot(snapshotId); + final SnapshotInfo snapshot = repository.readSnapshot(snapshotId); List filteredIndices = SnapshotUtils.filterIndices(snapshot.indices(), request.indices(), request.indicesOptions()); MetaData metaDataIn = repository.readSnapshotMetaData(snapshotId, snapshot, filteredIndices); @@ -708,7 +708,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis * @param snapshotId snapshot id * @param snapshot snapshot metadata */ - private void validateSnapshotRestorable(SnapshotId snapshotId, Snapshot snapshot) { + private void validateSnapshotRestorable(SnapshotId snapshotId, SnapshotInfo snapshot) { if (!snapshot.state().restorable()) { throw new SnapshotRestoreException(snapshotId, "unsupported snapshot state [" + snapshot.state() + "]"); } @@ -765,7 +765,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis UPDATE_RESTORE_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME); } - private boolean failed(Snapshot snapshot, String index) { + private boolean failed(SnapshotInfo snapshot, String index) { for (SnapshotShardFailure failure : snapshot.shardFailures()) { if (index.equals(failure.index())) { return true; diff --git a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java deleted file mode 100644 index 13ec659b629..00000000000 --- a/core/src/main/java/org/elasticsearch/snapshots/Snapshot.java +++ /dev/null @@ -1,358 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.snapshots; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.FromXContentBuilder; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -/** - * Represent information about snapshot - */ -public class Snapshot implements Comparable, ToXContent, FromXContentBuilder { - - private final String name; - - private final Version version; - - private final SnapshotState state; - - private final String reason; - - private final List indices; - - private final long startTime; - - private final long endTime; - - private final int totalShard; - - private final int successfulShards; - - private final List shardFailures; - - private final static List NO_FAILURES = Collections.emptyList(); - - public final static Snapshot PROTO = new Snapshot(); - - private Snapshot(String name, List indices, SnapshotState state, String reason, Version version, long startTime, long endTime, - int totalShard, int successfulShards, List shardFailures) { - assert name != null; - assert indices != null; - assert state != null; - assert shardFailures != null; - this.name = name; - this.indices = indices; - this.state = state; - this.reason = reason; - this.version = version; - this.startTime = startTime; - this.endTime = endTime; - this.totalShard = totalShard; - this.successfulShards = successfulShards; - this.shardFailures = shardFailures; - } - - - public Snapshot(String name, List indices, long startTime) { - this(name, indices, SnapshotState.IN_PROGRESS, null, Version.CURRENT, startTime, 0L, 0, 0, NO_FAILURES); - } - - public Snapshot(String name, List indices, long startTime, String reason, long endTime, - int totalShard, List shardFailures) { - this(name, indices, snapshotState(reason, shardFailures), reason, Version.CURRENT, - startTime, endTime, totalShard, totalShard - shardFailures.size(), shardFailures); - } - - /** - * Special constructor for the prototype object - */ - private Snapshot() { - this("", Collections.emptyList(), 0); - } - - private static SnapshotState snapshotState(String reason, List shardFailures) { - if (reason == null) { - if (shardFailures.isEmpty()) { - return SnapshotState.SUCCESS; - } else { - return SnapshotState.PARTIAL; - } - } else { - return SnapshotState.FAILED; - } - } - - /** - * Returns snapshot name - * - * @return snapshot name - */ - public String name() { - return name; - } - - /** - * Returns current snapshot state - * - * @return snapshot state - */ - public SnapshotState state() { - return state; - } - - /** - * Returns reason for complete snapshot failure - * - * @return snapshot failure reason - */ - public String reason() { - return reason; - } - - /** - * Returns version of Elasticsearch that was used to create this snapshot - * - * @return Elasticsearch version - */ - public Version version() { - return version; - } - - /** - * Returns indices that were included into this snapshot - * - * @return list of indices - */ - public List indices() { - return indices; - } - - /** - * Returns time when snapshot started - * - * @return snapshot start time - */ - public long startTime() { - return startTime; - } - - /** - * Returns time when snapshot ended - *

- * Can be 0L if snapshot is still running - * - * @return snapshot end time - */ - public long endTime() { - return endTime; - } - - /** - * Returns total number of shards that were snapshotted - * - * @return number of shards - */ - public int totalShard() { - return totalShard; - } - - /** - * Returns total number of shards that were successfully snapshotted - * - * @return number of successful shards - */ - public int successfulShards() { - return successfulShards; - } - - /** - * Returns shard failures - */ - public List shardFailures() { - return shardFailures; - } - - /** - * Compares two snapshots by their start time - * - * @param o other snapshot - * @return the value {@code 0} if snapshots were created at the same time; - * a value less than {@code 0} if this snapshot was created before snapshot {@code o}; and - * a value greater than {@code 0} if this snapshot was created after snapshot {@code o}; - */ - @Override - public int compareTo(Snapshot o) { - return Long.compare(startTime, o.startTime); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Snapshot that = (Snapshot) o; - - if (startTime != that.startTime) return false; - if (!name.equals(that.name)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = name.hashCode(); - result = 31 * result + Long.hashCode(startTime); - return result; - } - - @Override - public Snapshot fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { - return fromXContent(parser); - } - - static final class Fields { - static final String SNAPSHOT = "snapshot"; - static final String NAME = "name"; - static final String VERSION_ID = "version_id"; - static final String INDICES = "indices"; - static final String STATE = "state"; - static final String REASON = "reason"; - static final String START_TIME = "start_time"; - static final String END_TIME = "end_time"; - static final String TOTAL_SHARDS = "total_shards"; - static final String SUCCESSFUL_SHARDS = "successful_shards"; - static final String FAILURES = "failures"; - } - - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(Fields.SNAPSHOT); - builder.field(Fields.NAME, name); - builder.field(Fields.VERSION_ID, version.id); - builder.startArray(Fields.INDICES); - for (String index : indices) { - builder.value(index); - } - builder.endArray(); - builder.field(Fields.STATE, state); - if (reason != null) { - builder.field(Fields.REASON, reason); - } - builder.field(Fields.START_TIME, startTime); - builder.field(Fields.END_TIME, endTime); - builder.field(Fields.TOTAL_SHARDS, totalShard); - builder.field(Fields.SUCCESSFUL_SHARDS, successfulShards); - builder.startArray(Fields.FAILURES); - for (SnapshotShardFailure shardFailure : shardFailures) { - builder.startObject(); - shardFailure.toXContent(builder, params); - builder.endObject(); - } - builder.endArray(); - builder.endObject(); - return builder; - } - - - public static Snapshot fromXContent(XContentParser parser) throws IOException { - String name = null; - Version version = Version.CURRENT; - SnapshotState state = SnapshotState.IN_PROGRESS; - String reason = null; - List indices = Collections.emptyList(); - long startTime = 0; - long endTime = 0; - int totalShard = 0; - int successfulShards = 0; - List shardFailures = NO_FAILURES; - if (parser.currentToken() == null) { // fresh parser? move to the first token - parser.nextToken(); - } - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { // on a start object move to next token - parser.nextToken(); - } - XContentParser.Token token; - if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) { - String currentFieldName = parser.currentName(); - if ("snapshot".equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - token = parser.nextToken(); - if (token.isValue()) { - if ("name".equals(currentFieldName)) { - name = parser.text(); - } else if ("state".equals(currentFieldName)) { - state = SnapshotState.valueOf(parser.text()); - } else if ("reason".equals(currentFieldName)) { - reason = parser.text(); - } else if ("start_time".equals(currentFieldName)) { - startTime = parser.longValue(); - } else if ("end_time".equals(currentFieldName)) { - endTime = parser.longValue(); - } else if ("total_shards".equals(currentFieldName)) { - totalShard = parser.intValue(); - } else if ("successful_shards".equals(currentFieldName)) { - successfulShards = parser.intValue(); - } else if ("version_id".equals(currentFieldName)) { - version = Version.fromId(parser.intValue()); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if ("indices".equals(currentFieldName)) { - ArrayList indicesArray = new ArrayList<>(); - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - indicesArray.add(parser.text()); - } - indices = Collections.unmodifiableList(indicesArray); - } else if ("failures".equals(currentFieldName)) { - ArrayList shardFailureArrayList = new ArrayList<>(); - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - shardFailureArrayList.add(SnapshotShardFailure.fromXContent(parser)); - } - shardFailures = Collections.unmodifiableList(shardFailureArrayList); - } else { - // It was probably created by newer version - ignoring - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - // It was probably created by newer version - ignoring - parser.skipChildren(); - } - } - } - } - } else { - throw new ElasticsearchParseException("unexpected token [" + token + "]"); - } - return new Snapshot(name, indices, state, reason, version, startTime, endTime, totalShard, successfulShards, shardFailures); - } - -} diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 354094404ae..871e765cfd0 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -18,15 +18,19 @@ */ package org.elasticsearch.snapshots; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.xcontent.FromXContentBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -35,52 +39,109 @@ import java.util.Collections; import java.util.List; /** - * Information about snapshot + * Information about a snapshot */ -public class SnapshotInfo implements ToXContent, Streamable { +public final class SnapshotInfo implements Comparable, ToXContent, FromXContentBuilder, Writeable { + public static final SnapshotInfo PROTO = new SnapshotInfo("", Collections.emptyList(), 0); private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strictDateOptionalTime"); + private static final String SNAPSHOT = "snapshot"; + private static final String INDICES = "indices"; + private static final String STATE = "state"; + private static final String REASON = "reason"; + private static final String START_TIME = "start_time"; + private static final String START_TIME_IN_MILLIS = "start_time_in_millis"; + private static final String END_TIME = "end_time"; + private static final String END_TIME_IN_MILLIS = "end_time_in_millis"; + private static final String DURATION = "duration"; + private static final String DURATION_IN_MILLIS = "duration_in_millis"; + private static final String FAILURES = "failures"; + private static final String SHARDS = "shards"; + private static final String TOTAL = "total"; + private static final String FAILED = "failed"; + private static final String SUCCESSFUL = "successful"; + private static final String VERSION_ID = "version_id"; + private static final String VERSION = "version"; + private static final String NAME = "name"; + private static final String TOTAL_SHARDS = "total_shards"; + private static final String SUCCESSFUL_SHARDS = "successful_shards"; - private String name; + private final String name; - private SnapshotState state; + private final SnapshotState state; - private String reason; + private final String reason; - private List indices; + private final List indices; - private long startTime; + private final long startTime; - private long endTime; + private final long endTime; - private int totalShards; + private final int totalShards; - private int successfulShards; + private final int successfulShards; - private Version version; + private final Version version; - private List shardFailures; + private final List shardFailures; - SnapshotInfo() { + public SnapshotInfo(String name, List indices, long startTime) { + this(name, indices, SnapshotState.IN_PROGRESS, null, Version.CURRENT, startTime, 0L, 0, 0, Collections.emptyList()); + } + public SnapshotInfo(String name, List indices, long startTime, String reason, long endTime, + int totalShards, List shardFailures) { + this(name, indices, snapshotState(reason, shardFailures), reason, Version.CURRENT, + startTime, endTime, totalShards, totalShards - shardFailures.size(), shardFailures); + } + + private SnapshotInfo(String name, List indices, SnapshotState state, String reason, Version version, long startTime, + long endTime, int totalShards, int successfulShards, List shardFailures) { + assert name != null; + assert indices != null; + assert state != null; + assert shardFailures != null; + this.name = name; + this.indices = indices; + this.state = state; + this.reason = reason; + this.version = version; + this.startTime = startTime; + this.endTime = endTime; + this.totalShards = totalShards; + this.successfulShards = successfulShards; + this.shardFailures = shardFailures; } /** - * Creates a new snapshot information from a {@link Snapshot} - * - * @param snapshot snapshot information returned by repository + * Constructs snapshot information from stream input */ - public SnapshotInfo(Snapshot snapshot) { - name = snapshot.name(); - state = snapshot.state(); - reason = snapshot.reason(); - indices = snapshot.indices(); - startTime = snapshot.startTime(); - endTime = snapshot.endTime(); - totalShards = snapshot.totalShard(); - successfulShards = snapshot.successfulShards(); - shardFailures = snapshot.shardFailures(); - version = snapshot.version(); + public SnapshotInfo(final StreamInput in) throws IOException { + name = in.readString(); + int size = in.readVInt(); + List indicesListBuilder = new ArrayList<>(); + for (int i = 0; i < size; i++) { + indicesListBuilder.add(in.readString()); + } + indices = Collections.unmodifiableList(indicesListBuilder); + state = SnapshotState.fromValue(in.readByte()); + reason = in.readOptionalString(); + startTime = in.readVLong(); + endTime = in.readVLong(); + totalShards = in.readVInt(); + successfulShards = in.readVInt(); + size = in.readVInt(); + if (size > 0) { + List failureBuilder = new ArrayList<>(); + for (int i = 0; i < size; i++) { + failureBuilder.add(SnapshotShardFailure.readSnapshotShardFailure(in)); + } + shardFailures = Collections.unmodifiableList(failureBuilder); + } else { + shardFailures = Collections.emptyList(); + } + version = Version.readVersion(in); } /** @@ -184,6 +245,39 @@ public class SnapshotInfo implements ToXContent, Streamable { return version; } + /** + * Compares two snapshots by their start time + * + * @param o other snapshot + * @return the value {@code 0} if snapshots were created at the same time; + * a value less than {@code 0} if this snapshot was created before snapshot {@code o}; and + * a value greater than {@code 0} if this snapshot was created after snapshot {@code o}; + */ + @Override + public int compareTo(final SnapshotInfo o) { + return Long.compare(startTime, o.startTime); + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + final SnapshotInfo that = (SnapshotInfo) o; + return startTime == that.startTime && name.equals(that.name); + } + + @Override + public int hashCode() { + int result = name.hashCode(); + result = 31 * result + Long.hashCode(startTime); + return result; + } + /** * Returns snapshot REST status */ @@ -194,98 +288,166 @@ public class SnapshotInfo implements ToXContent, Streamable { if (shardFailures.size() == 0) { return RestStatus.OK; } - return RestStatus.status(successfulShards, totalShards, shardFailures.toArray(new ShardOperationFailedException[shardFailures.size()])); - } - - static final class Fields { - static final String INDICES = "indices"; - static final String STATE = "state"; - static final String REASON = "reason"; - static final String START_TIME = "start_time"; - static final String START_TIME_IN_MILLIS = "start_time_in_millis"; - static final String END_TIME = "end_time"; - static final String END_TIME_IN_MILLIS = "end_time_in_millis"; - static final String DURATION = "duration"; - static final String DURATION_IN_MILLIS = "duration_in_millis"; - static final String FAILURES = "failures"; - static final String SHARDS = "shards"; - static final String TOTAL = "total"; - static final String FAILED = "failed"; - static final String SUCCESSFUL = "successful"; - static final String VERSION_ID = "version_id"; - static final String VERSION = "version"; + return RestStatus.status(successfulShards, totalShards, + shardFailures.toArray(new ShardOperationFailedException[shardFailures.size()])); } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("snapshot", name); - builder.field(Fields.VERSION_ID, version.id); - builder.field(Fields.VERSION, version.toString()); - builder.startArray(Fields.INDICES); + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(SNAPSHOT); + builder.field(NAME, name); + builder.field(VERSION_ID, version.id); + builder.startArray(INDICES); for (String index : indices) { builder.value(index); } builder.endArray(); - builder.field(Fields.STATE, state); + builder.field(STATE, state); if (reason != null) { - builder.field(Fields.REASON, reason); + builder.field(REASON, reason); } - if (startTime != 0) { - builder.field(Fields.START_TIME, DATE_TIME_FORMATTER.printer().print(startTime)); - builder.field(Fields.START_TIME_IN_MILLIS, startTime); - } - if (endTime != 0) { - builder.field(Fields.END_TIME, DATE_TIME_FORMATTER.printer().print(endTime)); - builder.field(Fields.END_TIME_IN_MILLIS, endTime); - builder.timeValueField(Fields.DURATION_IN_MILLIS, Fields.DURATION, endTime - startTime); - } - builder.startArray(Fields.FAILURES); + builder.field(START_TIME, startTime); + builder.field(END_TIME, endTime); + builder.field(TOTAL_SHARDS, totalShards); + builder.field(SUCCESSFUL_SHARDS, successfulShards); + builder.startArray(FAILURES); for (SnapshotShardFailure shardFailure : shardFailures) { builder.startObject(); shardFailure.toXContent(builder, params); builder.endObject(); } builder.endArray(); - builder.startObject(Fields.SHARDS); - builder.field(Fields.TOTAL, totalShards); - builder.field(Fields.FAILED, failedShards()); - builder.field(Fields.SUCCESSFUL, successfulShards); + builder.endObject(); + return builder; + } + + /** + * Produces the external X-content that is delivered through the REST layer. + */ + public XContentBuilder toExternalXContent(final XContentBuilder builder, final ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(SNAPSHOT, name); + builder.field(VERSION_ID, version.id); + builder.field(VERSION, version.toString()); + builder.startArray(INDICES); + for (String index : indices) { + builder.value(index); + } + builder.endArray(); + builder.field(STATE, state); + if (reason != null) { + builder.field(REASON, reason); + } + if (startTime != 0) { + builder.field(START_TIME, DATE_TIME_FORMATTER.printer().print(startTime)); + builder.field(START_TIME_IN_MILLIS, startTime); + } + if (endTime != 0) { + builder.field(END_TIME, DATE_TIME_FORMATTER.printer().print(endTime)); + builder.field(END_TIME_IN_MILLIS, endTime); + builder.timeValueField(DURATION_IN_MILLIS, DURATION, endTime - startTime); + } + builder.startArray(FAILURES); + for (SnapshotShardFailure shardFailure : shardFailures) { + builder.startObject(); + shardFailure.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + builder.startObject(SHARDS); + builder.field(TOTAL, totalShards); + builder.field(FAILED, failedShards()); + builder.field(SUCCESSFUL, successfulShards); builder.endObject(); builder.endObject(); return builder; } @Override - public void readFrom(StreamInput in) throws IOException { - name = in.readString(); - int size = in.readVInt(); - List indicesListBuilder = new ArrayList<>(); - for (int i = 0; i < size; i++) { - indicesListBuilder.add(in.readString()); + public SnapshotInfo fromXContent(final XContentParser parser, final ParseFieldMatcher matcher) throws IOException { + return fromXContent(parser); + } + + /** + * This method creates a SnapshotInfo from internal x-content. It does not + * handle x-content written with the external version as external x-content + * is only for display purposes and does not need to be parsed. + */ + public static SnapshotInfo fromXContent(final XContentParser parser) throws IOException { + String name = null; + Version version = Version.CURRENT; + SnapshotState state = SnapshotState.IN_PROGRESS; + String reason = null; + List indices = Collections.emptyList(); + long startTime = 0; + long endTime = 0; + int totalShard = 0; + int successfulShards = 0; + List shardFailures = Collections.emptyList(); + if (parser.currentToken() == null) { // fresh parser? move to the first token + parser.nextToken(); } - indices = Collections.unmodifiableList(indicesListBuilder); - state = SnapshotState.fromValue(in.readByte()); - reason = in.readOptionalString(); - startTime = in.readVLong(); - endTime = in.readVLong(); - totalShards = in.readVInt(); - successfulShards = in.readVInt(); - size = in.readVInt(); - if (size > 0) { - List failureBuilder = new ArrayList<>(); - for (int i = 0; i < size; i++) { - failureBuilder.add(SnapshotShardFailure.readSnapshotShardFailure(in)); + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { // on a start object move to next token + parser.nextToken(); + } + XContentParser.Token token; + if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) { + String currentFieldName = parser.currentName(); + if (SNAPSHOT.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + token = parser.nextToken(); + if (token.isValue()) { + if (NAME.equals(currentFieldName)) { + name = parser.text(); + } else if (STATE.equals(currentFieldName)) { + state = SnapshotState.valueOf(parser.text()); + } else if (REASON.equals(currentFieldName)) { + reason = parser.text(); + } else if (START_TIME.equals(currentFieldName)) { + startTime = parser.longValue(); + } else if (END_TIME.equals(currentFieldName)) { + endTime = parser.longValue(); + } else if (TOTAL_SHARDS.equals(currentFieldName)) { + totalShard = parser.intValue(); + } else if (SUCCESSFUL_SHARDS.equals(currentFieldName)) { + successfulShards = parser.intValue(); + } else if (VERSION_ID.equals(currentFieldName)) { + version = Version.fromId(parser.intValue()); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (INDICES.equals(currentFieldName)) { + ArrayList indicesArray = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + indicesArray.add(parser.text()); + } + indices = Collections.unmodifiableList(indicesArray); + } else if (FAILURES.equals(currentFieldName)) { + ArrayList shardFailureArrayList = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + shardFailureArrayList.add(SnapshotShardFailure.fromXContent(parser)); + } + shardFailures = Collections.unmodifiableList(shardFailureArrayList); + } else { + // It was probably created by newer version - ignoring + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + // It was probably created by newer version - ignoring + parser.skipChildren(); + } + } + } } - shardFailures = Collections.unmodifiableList(failureBuilder); } else { - shardFailures = Collections.emptyList(); + throw new ElasticsearchParseException("unexpected token [" + token + "]"); } - version = Version.readVersion(in); + return new SnapshotInfo(name, indices, state, reason, version, startTime, endTime, totalShard, successfulShards, shardFailures); } @Override - public void writeTo(StreamOutput out) throws IOException { + public void writeTo(final StreamOutput out) throws IOException { out.writeString(name); out.writeVInt(indices.size()); for (String index : indices) { @@ -304,26 +466,16 @@ public class SnapshotInfo implements ToXContent, Streamable { Version.writeVersion(version, out); } - /** - * Reads snapshot information from stream input - * - * @param in stream input - * @return deserialized snapshot info - */ - public static SnapshotInfo readSnapshotInfo(StreamInput in) throws IOException { - SnapshotInfo snapshotInfo = new SnapshotInfo(); - snapshotInfo.readFrom(in); - return snapshotInfo; - } - - /** - * Reads optional snapshot information from stream input - * - * @param in stream input - * @return deserialized snapshot info or null - */ - public static SnapshotInfo readOptionalSnapshotInfo(StreamInput in) throws IOException { - return in.readOptionalStreamable(SnapshotInfo::new); + private static SnapshotState snapshotState(final String reason, final List shardFailures) { + if (reason == null) { + if (shardFailures.isEmpty()) { + return SnapshotState.SUCCESS; + } else { + return SnapshotState.PARTIAL; + } + } else { + return SnapshotState.FAILED; + } } } diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 186a1965a96..8e6681893c9 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -126,7 +126,7 @@ public class SnapshotsService extends AbstractLifecycleComponent entries = currentSnapshots(snapshotId.getRepository(), new String[]{snapshotId.getSnapshot()}); if (!entries.isEmpty()) { @@ -141,8 +141,8 @@ public class SnapshotsService extends AbstractLifecycleComponent snapshots(String repositoryName, boolean ignoreUnavailable) { - Set snapshotSet = new HashSet<>(); + public List snapshots(String repositoryName, boolean ignoreUnavailable) { + Set snapshotSet = new HashSet<>(); List entries = currentSnapshots(repositoryName, null); for (SnapshotsInProgress.Entry entry : entries) { snapshotSet.add(inProgressSnapshot(entry)); @@ -161,7 +161,7 @@ public class SnapshotsService extends AbstractLifecycleComponent snapshotList = new ArrayList<>(snapshotSet); + ArrayList snapshotList = new ArrayList<>(snapshotSet); CollectionUtil.timSort(snapshotList); return Collections.unmodifiableList(snapshotList); } @@ -172,8 +172,8 @@ public class SnapshotsService extends AbstractLifecycleComponent currentSnapshots(String repositoryName) { - List snapshotList = new ArrayList<>(); + public List currentSnapshots(String repositoryName) { + List snapshotList = new ArrayList<>(); List entries = currentSnapshots(repositoryName, null); for (SnapshotsInProgress.Entry entry : entries) { snapshotList.add(inProgressSnapshot(entry)); @@ -408,8 +408,8 @@ public class SnapshotsService extends AbstractLifecycleComponent shardStatus = new HashMap<>(); Repository repository = repositoriesService.repository(snapshotId.getRepository()); IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(snapshotId.getRepository()); - Snapshot snapshot = repository.readSnapshot(snapshotId); + SnapshotInfo snapshot = repository.readSnapshot(snapshotId); MetaData metaData = repository.readSnapshotMetaData(snapshotId, snapshot, snapshot.indices()); for (String index : snapshot.indices()) { IndexMetaData indexMetaData = metaData.indices().get(index); @@ -800,8 +800,8 @@ public class SnapshotsService extends AbstractLifecycleComponent Date: Thu, 5 May 2016 14:21:14 -0700 Subject: [PATCH 0087/1311] Packaging: Make rpm not include parent dirs closes #18162 --- distribution/rpm/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/distribution/rpm/build.gradle b/distribution/rpm/build.gradle index 0d9f658f488..72ed58e52c3 100644 --- a/distribution/rpm/build.gradle +++ b/distribution/rpm/build.gradle @@ -40,6 +40,7 @@ task buildRpm(type: Rpm) { vendor 'Elasticsearch' dirMode 0755 fileMode 0644 + addParentDirs false // TODO ospackage doesn't support icon but we used to have one } From e3ce6c9048d95e6e19b02ccb209f17d28543170c Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 5 May 2016 18:31:48 -0400 Subject: [PATCH 0088/1311] Painless: add fielddata accessors (.value/.values/.distance()/etc) This gives better coverage and consistency with the scripting APIs, by whitelisting the primary search scripting API classes and using them instead of only Map and List methods. For example, accessing fields can now be done with `.value` instead of `.0` because `getValue()` is whitelisted. For now, access to a document's fields in this way (loads) are fast-pathed in the code, to avoid dynamic overhead. Access to geo fields and geo distance functions is now supported. TODO: date support (e.g. whitelist ReadableDateTime methods as a start) TODO: improve docs (like expressions and groovy have for document's fields) TODO: remove fast-path hack Closes #18169 Squashed commit of the following: commit ec9f24b2424891a7429bb4c0a03f9868cba0a213 Author: Robert Muir Date: Thu May 5 17:59:37 2016 -0400 cutover to instead of here commit 9edb1550438acd209733bc36f0d2e0aecf190ecb Author: Robert Muir Date: Thu May 5 17:03:02 2016 -0400 add fast-path for docvalues field loads commit f8e38c0932fccc0cfa217516130ad61522e59fe5 Author: Robert Muir Date: Thu May 5 16:47:31 2016 -0400 Painless: add fielddata accessors (.value/.values/.distance()/etc) --- .../elasticsearch/common/geo/GeoPoint.java | 2 +- .../modules/scripting/painless.asciidoc | 9 +- .../java/org/elasticsearch/painless/Def.java | 22 ++++- .../elasticsearch/painless/Definition.java | 96 +++++++++++++++++++ .../test/plan_a/20_scriptfield.yaml | 2 +- .../rest-api-spec/test/plan_a/30_search.yaml | 12 +-- 6 files changed, 126 insertions(+), 17 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index 7713157422e..5d1250a5148 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -38,7 +38,7 @@ public final class GeoPoint { } /** - * Create a new Geopointform a string. This String must either be a geohash + * Create a new Geopoint from a string. This String must either be a geohash * or a lat-lon tuple. * * @param value String to create the point from diff --git a/docs/reference/modules/scripting/painless.asciidoc b/docs/reference/modules/scripting/painless.asciidoc index 98cb052d7a3..b8c4c2cc81f 100644 --- a/docs/reference/modules/scripting/painless.asciidoc +++ b/docs/reference/modules/scripting/painless.asciidoc @@ -115,9 +115,8 @@ GET hockey/_search ---------------------------------------------------------------- // AUTOSENSE -You must always specify the index of the field value you want, even if there's only a single item in the field. -All fields in Elasticsearch are multi-valued and Painless does not provide a `.value` shortcut. The following example uses a Painless script to sort the players by their combined first and last names. The names are accessed using -`input.doc['first'].0` and `input.doc['last'].0`. +The following example uses a Painless script to sort the players by their combined first and last names. The names are accessed using +`input.doc['first'].value` and `input.doc['last'].value`. [source,js] ---------------------------------------------------------------- @@ -132,7 +131,7 @@ GET hockey/_search "order": "asc", "script": { "lang": "painless", - "inline": "input.doc['first'].0 + ' ' + input.doc['last'].0" + "inline": "input.doc['first'].value + ' ' + input.doc['last'].value" } } } @@ -218,7 +217,7 @@ GET hockey/_search "full_name_dynamic": { "script": { "lang": "painless", - "inline": "def first = input.doc['first'].0; def last = input.doc['last'].0; return first + ' ' + last;" + "inline": "def first = input.doc['first'].value; def last = input.doc['last'].value; return first + ' ' + last;" } }, "full_name_static": { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 4e170c93e49..136746026fa 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Field; import org.elasticsearch.painless.Definition.Method; @@ -119,9 +120,22 @@ public class Def { @SuppressWarnings("rawtypes") public static Object fieldLoad(final Object owner, final String name, final Definition definition) { - if (owner.getClass().isArray() && "length".equals(name)) { + final Class clazz = owner.getClass(); + if (clazz.isArray() && "length".equals(name)) { return Array.getLength(owner); } else { + // TODO: remove this fast-path, once we speed up dynamics some more + if ("value".equals(name) && owner instanceof ScriptDocValues) { + if (clazz == ScriptDocValues.Doubles.class) { + return ((ScriptDocValues.Doubles)owner).getValue(); + } else if (clazz == ScriptDocValues.Longs.class) { + return ((ScriptDocValues.Longs)owner).getValue(); + } else if (clazz == ScriptDocValues.Strings.class) { + return ((ScriptDocValues.Strings)owner).getValue(); + } else if (clazz == ScriptDocValues.GeoPoints.class) { + return ((ScriptDocValues.GeoPoints)owner).getValue(); + } + } final Field field = getField(owner, name, definition); MethodHandle handle; @@ -143,7 +157,7 @@ public class Def { } } else { throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " + - "for class [" + owner.getClass().getCanonicalName() + "]."); + "for class [" + clazz.getCanonicalName() + "]."); } } else { handle = field.getter; @@ -151,13 +165,13 @@ public class Def { if (handle == null) { throw new IllegalArgumentException( - "Unable to read from field [" + name + "] with owner class [" + owner.getClass() + "]."); + "Unable to read from field [" + name + "] with owner class [" + clazz + "]."); } else { try { return handle.invoke(owner); } catch (final Throwable throwable) { throw new IllegalArgumentException("Error loading value from " + - "field [" + name + "] with owner class [" + owner.getClass() + "].", throwable); + "field [" + name + "] with owner class [" + clazz + "].", throwable); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 450f2aa6fa4..5a0e0c1e636 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -33,6 +33,9 @@ import java.util.List; import java.util.Map; import java.util.Set; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.index.fielddata.ScriptDocValues; + class Definition { enum Sort { VOID( void.class , 0 , true , false , false , false ), @@ -393,6 +396,14 @@ class Definition { final Type iargexcepType; final Type istateexceptType; final Type nfexcepType; + + // docvalues accessors + final Type geoPointType; + final Type stringsType; + // TODO: add ReadableDateTime? or don't expose the joda stuff? + final Type longsType; + final Type doublesType; + final Type geoPointsType; public Definition() { structs = new HashMap<>(); @@ -471,6 +482,12 @@ class Definition { istateexceptType = getType("IllegalStateException"); nfexcepType = getType("NumberFormatException"); + geoPointType = getType("GeoPoint"); + stringsType = getType("Strings"); + longsType = getType("Longs"); + doublesType = getType("Doubles"); + geoPointsType = getType("GeoPoints"); + addDefaultElements(); copyDefaultStructs(); addDefaultTransforms(); @@ -564,6 +581,12 @@ class Definition { iargexcepType = definition.iargexcepType; istateexceptType = definition.istateexceptType; nfexcepType = definition.nfexcepType; + + geoPointType = definition.geoPointType; + stringsType = definition.stringsType; + longsType = definition.longsType; + doublesType = definition.doublesType; + geoPointsType = definition.geoPointsType; } private void addDefaultStructs() { @@ -634,6 +657,12 @@ class Definition { addStruct( "IllegalArgumentException" , IllegalArgumentException.class); addStruct( "IllegalStateException" , IllegalStateException.class); addStruct( "NumberFormatException" , NumberFormatException.class); + + addStruct( "GeoPoint" , GeoPoint.class); + addStruct( "Strings" , ScriptDocValues.Strings.class); + addStruct( "Longs" , ScriptDocValues.Longs.class); + addStruct( "Doubles" , ScriptDocValues.Doubles.class); + addStruct( "GeoPoints" , ScriptDocValues.GeoPoints.class); } private void addDefaultClasses() { @@ -670,6 +699,12 @@ class Definition { addClass("HashMap"); addClass("Exception"); + + addClass("GeoPoint"); + addClass("Strings"); + addClass("Longs"); + addClass("Doubles"); + addClass("GeoPoints"); } private void addDefaultElements() { @@ -1032,6 +1067,61 @@ class Definition { addConstructor("IllegalStateException", "new", new Type[] {stringType}, null); addConstructor("NumberFormatException", "new", new Type[] {stringType}, null); + + addMethod("GeoPoint", "getLat", null, false, doubleType, new Type[] {}, null, null); + addMethod("GeoPoint", "getLon", null, false, doubleType, new Type[] {}, null, null); + addMethod("Strings", "getValue", null, false, stringType, new Type[] {}, null, null); + addMethod("Strings", "getValues", null, false, slistType, new Type[] {}, null, null); + addMethod("Longs", "getValue", null, false, longType, new Type[] {}, null, null); + addMethod("Longs", "getValues", null, false, olistType, new Type[] {}, null, null); + // TODO: add better date support for Longs here? (carefully?) + addMethod("Doubles", "getValue", null, false, doubleType, new Type[] {}, null, null); + addMethod("Doubles", "getValues", null, false, olistType, new Type[] {}, null, null); + addMethod("GeoPoints", "getValue", null, false, geoPointType, new Type[] {}, null, null); + addMethod("GeoPoints", "getValues", null, false, olistType, new Type[] {}, null, null); + addMethod("GeoPoints", "getLat", null, false, doubleType, new Type[] {}, null, null); + addMethod("GeoPoints", "getLon", null, false, doubleType, new Type[] {}, null, null); + addMethod("GeoPoints", "getLats", null, false, getType(doubleType.struct, 1), new Type[] {}, null, null); + addMethod("GeoPoints", "getLons", null, false, getType(doubleType.struct, 1), new Type[] {}, null, null); + // geo distance functions... so many... + addMethod("GeoPoints", "factorDistance", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "factorDistanceWithDefault", null, false, doubleType, + new Type[] { doubleType, doubleType, doubleType }, null, null); + addMethod("GeoPoints", "factorDistance02", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "factorDistance13", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "arcDistance", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "arcDistanceWithDefault", null, false, doubleType, + new Type[] { doubleType, doubleType, doubleType }, null, null); + addMethod("GeoPoints", "arcDistanceInKm", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "arcDistanceInKmWithDefault", null, false, doubleType, + new Type[] { doubleType, doubleType, doubleType }, null, null); + addMethod("GeoPoints", "arcDistanceInMiles", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "arcDistanceInMilesWithDefault", null, false, doubleType, + new Type[] { doubleType, doubleType, doubleType }, null, null); + addMethod("GeoPoints", "distance", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "distanceWithDefault", null, false, doubleType, + new Type[] { doubleType, doubleType, doubleType }, null, null); + addMethod("GeoPoints", "distanceInKm", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "distanceInKmWithDefault", null, false, doubleType, + new Type[] { doubleType, doubleType, doubleType }, null, null); + addMethod("GeoPoints", "distanceInMiles", null, false, doubleType, + new Type[] { doubleType, doubleType }, null, null); + addMethod("GeoPoints", "distanceInMilesWithDefault", null, false, doubleType, + new Type[] { doubleType, doubleType, doubleType }, null, null); + addMethod("GeoPoints", "geohashDistance", null, false, doubleType, + new Type[] { stringType }, null, null); + addMethod("GeoPoints", "geohashDistanceInKm", null, false, doubleType, + new Type[] { stringType }, null, null); + addMethod("GeoPoints", "geohashDistanceInMiles", null, false, doubleType, + new Type[] { stringType }, null, null); } private void copyDefaultStructs() { @@ -1079,6 +1169,12 @@ class Definition { copyStruct("IllegalArgumentException", "Exception", "Object"); copyStruct("IllegalStateException", "Exception", "Object"); copyStruct("NumberFormatException", "Exception", "Object"); + + copyStruct("GeoPoint", "Object"); + copyStruct("Strings", "List", "Collection", "Object"); + copyStruct("Longs", "List", "Collection", "Object"); + copyStruct("Doubles", "List", "Collection", "Object"); + copyStruct("GeoPoints", "List", "Collection", "Object"); } private void addDefaultTransforms() { diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml index d53306b5d47..a1087f17d4e 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml @@ -28,7 +28,7 @@ setup: script_fields: bar: script: - inline: "input.doc['foo'].0 + input.x;" + inline: "input.doc['foo'].value + input.x;" lang: painless params: x: "bbb" diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml index 2dd1a6004ff..4a1ec86a267 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml @@ -29,12 +29,12 @@ query: script: script: - inline: "input.doc['num1'].0 > 1;" + inline: "input.doc['num1'].value > 1;" lang: painless script_fields: sNum1: script: - inline: "input.doc['num1'].0;" + inline: "input.doc['num1'].value;" lang: painless sort: num1: @@ -51,7 +51,7 @@ query: script: script: - inline: "input.doc['num1'].0 > input.param1;" + inline: "input.doc['num1'].value > input.param1;" lang: painless params: param1: 1 @@ -59,7 +59,7 @@ script_fields: sNum1: script: - inline: "return input.doc['num1'].0;" + inline: "return input.doc['num1'].value;" lang: painless sort: num1: @@ -76,7 +76,7 @@ query: script: script: - inline: "input.doc['num1'].0 > input.param1;" + inline: "input.doc['num1'].value > input.param1;" lang: painless params: param1: -1 @@ -84,7 +84,7 @@ script_fields: sNum1: script: - inline: "input.doc['num1'].0;" + inline: "input.doc['num1'].value;" lang: painless sort: num1: From f3b2ab822d3812c6853ca031f128325711d4d71d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 5 May 2016 19:03:23 -0400 Subject: [PATCH 0089/1311] Another wait_for_yellow to the docs All in service of the snippets passing consistently. --- docs/reference/mapping/params/analyzer.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/mapping/params/analyzer.asciidoc b/docs/reference/mapping/params/analyzer.asciidoc index 5f564f9a668..89d83f2b369 100644 --- a/docs/reference/mapping/params/analyzer.asciidoc +++ b/docs/reference/mapping/params/analyzer.asciidoc @@ -60,6 +60,8 @@ PUT my_index } } +GET _cluster/health?wait_for_status=yellow + GET my_index/_analyze?field=text <3> { "text": "The quick Brown Foxes." From 2d2b232a7d7ddafc830b4e7dbf3c0398da4a50eb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 5 May 2016 16:08:51 -0700 Subject: [PATCH 0090/1311] Tests: Delay starting progress loggers for vagrant until test is running This was broken recently as part of making the vagrant tasks extend LoggedExec. This change fixes the progress logger to not be started until we start seeing output from vagrant. --- .../vagrant/TapLoggerOutputStream.groovy | 136 +++++++++--------- .../vagrant/VagrantLoggerOutputStream.groovy | 47 ++---- 2 files changed, 84 insertions(+), 99 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy index 6b87ceeaf96..3f980c57a49 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy @@ -19,9 +19,11 @@ package org.elasticsearch.gradle.vagrant import com.carrotsearch.gradle.junit4.LoggingOutputStream +import groovy.transform.PackageScope import org.gradle.api.GradleScriptException import org.gradle.api.logging.Logger import org.gradle.logging.ProgressLogger +import org.gradle.logging.ProgressLoggerFactory import java.util.regex.Matcher @@ -35,73 +37,77 @@ import java.util.regex.Matcher * There is a Tap4j project but we can't use it because it wants to parse the * entire TAP stream at once and won't parse it stream-wise. */ -class TapLoggerOutputStream extends LoggingOutputStream { - ProgressLogger progressLogger - Logger logger - int testsCompleted = 0 - int testsFailed = 0 - int testsSkipped = 0 - Integer testCount - String countsFormat +public class TapLoggerOutputStream extends LoggingOutputStream { + private final ProgressLogger progressLogger + private boolean isStarted = false + private final Logger logger + private int testsCompleted = 0 + private int testsFailed = 0 + private int testsSkipped = 0 + private Integer testCount + private String countsFormat - TapLoggerOutputStream(Map args) { - logger = args.logger - progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) - progressLogger.setDescription("TAP output for `$args.command`") - progressLogger.started() - progressLogger.progress("Starting `$args.command`...") - } - - void flush() { - if (end == start) return - line(new String(buffer, start, end - start)) - start = end - } - - void line(String line) { - // System.out.print "===> $line\n" - if (testCount == null) { - try { - testCount = line.split('\\.').last().toInteger() - def length = (testCount as String).length() - countsFormat = "%0${length}d" - countsFormat = "[$countsFormat|$countsFormat|$countsFormat/$countsFormat]" - return - } catch (Exception e) { - throw new GradleScriptException( - 'Error parsing first line of TAP stream!!', e) - } - } - Matcher m = line =~ /(?ok|not ok) \d+(? # skip (?\(.+\))?)? \[(?.+)\] (?.+)/ - if (!m.matches()) { - /* These might be failure report lines or comments or whatever. Its hard - to tell and it doesn't matter. */ - logger.warn(line) - return - } - boolean skipped = m.group('skip') != null - boolean success = !skipped && m.group('status') == 'ok' - String skipReason = m.group('skipReason') - String suiteName = m.group('suite') - String testName = m.group('test') - - String status - if (skipped) { - status = "SKIPPED" - testsSkipped++ - } else if (success) { - status = " OK" - testsCompleted++ - } else { - status = " FAILED" - testsFailed++ + TapLoggerOutputStream(Map args) { + logger = args.logger + progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) + progressLogger.setDescription("TAP output for `${args.command}`") } - String counts = sprintf(countsFormat, - [testsCompleted, testsFailed, testsSkipped, testCount]) - progressLogger.progress("Tests $counts, $status [$suiteName] $testName") - if (!success) { - logger.warn(line) + @Override + public void flush() { + if (isStarted == false) { + progressLogger.started() + isStarted = true + } + if (end == start) return + line(new String(buffer, start, end - start)) + start = end + } + + void line(String line) { + // System.out.print "===> $line\n" + if (testCount == null) { + try { + testCount = line.split('\\.').last().toInteger() + def length = (testCount as String).length() + countsFormat = "%0${length}d" + countsFormat = "[$countsFormat|$countsFormat|$countsFormat/$countsFormat]" + return + } catch (Exception e) { + throw new GradleScriptException( + 'Error parsing first line of TAP stream!!', e) + } + } + Matcher m = line =~ /(?ok|not ok) \d+(? # skip (?\(.+\))?)? \[(?.+)\] (?.+)/ + if (!m.matches()) { + /* These might be failure report lines or comments or whatever. Its hard + to tell and it doesn't matter. */ + logger.warn(line) + return + } + boolean skipped = m.group('skip') != null + boolean success = !skipped && m.group('status') == 'ok' + String skipReason = m.group('skipReason') + String suiteName = m.group('suite') + String testName = m.group('test') + + String status + if (skipped) { + status = "SKIPPED" + testsSkipped++ + } else if (success) { + status = " OK" + testsCompleted++ + } else { + status = " FAILED" + testsFailed++ + } + + String counts = sprintf(countsFormat, + [testsCompleted, testsFailed, testsSkipped, testCount]) + progressLogger.progress("Tests $counts, $status [$suiteName] $testName") + if (!success) { + logger.warn(line) + } } - } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy index 22008fa1a0e..331a638b5ca 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle.vagrant import com.carrotsearch.gradle.junit4.LoggingOutputStream +import org.gradle.api.logging.Logger import org.gradle.logging.ProgressLogger import org.gradle.logging.ProgressLoggerFactory @@ -46,31 +47,31 @@ import org.gradle.logging.ProgressLoggerFactory public class VagrantLoggerOutputStream extends LoggingOutputStream { private static final String HEADING_PREFIX = '==> ' - ProgressLoggerFactory progressLoggerFactory - - - private ProgressLogger progressLogger - String squashedPrefix - String lastLine = '' - boolean inProgressReport = false - String heading = '' + private final ProgressLogger progressLogger + private boolean isStarted = false + private String squashedPrefix + private String lastLine = '' + private boolean inProgressReport = false + private String heading = '' VagrantLoggerOutputStream(Map args) { progressLogger = args.factory.newOperation(VagrantLoggerOutputStream) progressLogger.setDescription("Vagrant output for `$args.command`") - progressLogger.started() - progressLogger.progress("Starting `$args.command`...") squashedPrefix = args.squashedPrefix } - void flush() { + @Override + public void flush() { + if (isStarted == false) { + progressLogger.started() + isStarted = true + } if (end == start) return line(new String(buffer, start, end - start)) start = end } void line(String line) { - // debugPrintLine(line) // Uncomment me to log every incoming line if (line.startsWith('\r\u001b')) { /* We don't want to try to be a full terminal emulator but we want to keep the escape sequences from leaking and catch _some_ of the @@ -97,28 +98,6 @@ public class VagrantLoggerOutputStream extends LoggingOutputStream { } else { return } - // debugLogLine(line) // Uncomment me to log every line we add to the logger progressLogger.progress(line) } - - void debugPrintLine(line) { - System.out.print '----------> ' - for (int i = start; i < end; i++) { - switch (buffer[i] as char) { - case ' '..'~': - System.out.print buffer[i] as char - break - default: - System.out.print '%' - System.out.print Integer.toHexString(buffer[i]) - } - } - System.out.print '\n' - } - - void debugLogLine(line) { - System.out.print '>>>>>>>>>>> ' - System.out.print line - System.out.print '\n' - } } From f1fb6a37c0e348ef0898782d58acf989f2b5c9be Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 5 May 2016 19:15:01 -0400 Subject: [PATCH 0091/1311] Fix error message on ES_MAX_MEM set This commit fixes a typo in the error message when the now unsupported environment variable ES_MAX_MEM is set on Windows. --- distribution/src/main/resources/bin/elasticsearch.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/main/resources/bin/elasticsearch.bat b/distribution/src/main/resources/bin/elasticsearch.bat index 3a9d561e2e3..9f1d871d0a2 100644 --- a/distribution/src/main/resources/bin/elasticsearch.bat +++ b/distribution/src/main/resources/bin/elasticsearch.bat @@ -19,7 +19,7 @@ if %bad_env_var% == 1 ( echo Error: encountered environment variables that are no longer supported echo Use jvm.options or ES_JAVA_OPTS to configure the JVM if not "%ES_MIN_MEM%" == "" echo ES_MIN_MEM=%ES_MIN_MEM%: set -Xms%ES_MIN_MEM% in jvm.options or add "-Xms%ES_MIN_MEM%" to ES_JAVA_OPTS - if not "%ES_MAX_MEM%" == "" echo ES_MAX_MEM=%ES_MAX_MEM%: set -Xms%ES_MAX_MEM% in jvm.options or add "-Xmx%ES_MAX_MEM%" to ES_JAVA_OPTS + if not "%ES_MAX_MEM%" == "" echo ES_MAX_MEM=%ES_MAX_MEM%: set -Xmx%ES_MAX_MEM% in jvm.options or add "-Xmx%ES_MAX_MEM%" to ES_JAVA_OPTS if not "%ES_HEAP_SIZE%" == "" echo ES_HEAP_SIZE=%ES_HEAP_SIZE%: set -Xms%ES_HEAP_SIZE% and -Xmx%ES_HEAP_SIZE% in jvm.options or add "-Xms%ES_HEAP_SIZE% -Xmx%ES_HEAP_SIZE%" to ES_JAVA_OPTS if not "%ES_HEAP_NEWSIZE%" == "" echo ES_HEAP_NEWSIZE=%ES_HEAP_NEWSIZE%: set -Xmn%ES_HEAP_NEWSIZE% in jvm.options or add "-Xmn%ES_HEAP_SIZE%" to ES_JAVA_OPTS if not "%ES_DIRECT_SIZE%" == "" echo ES_DIRECT_SIZE=%ES_DIRECT_SIZE%: set -XX:MaxDirectMemorySize=%ES_DIRECT_SIZE% in jvm.options or add "-XX:MaxDirectMemorySize=%ES_DIRECT_SIZE%" to ES_JAVA_OPTS From 443976d03c8dc9a9527b25f62e735922e842848b Mon Sep 17 00:00:00 2001 From: Radovan Ondas Date: Fri, 6 May 2016 01:17:31 +0200 Subject: [PATCH 0092/1311] Fix typo in message for variable setup ES_MAX_MEM Small typo fix in startup script. Relates #18168 --- distribution/src/main/resources/bin/elasticsearch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/main/resources/bin/elasticsearch b/distribution/src/main/resources/bin/elasticsearch index 2f62223cfe9..0101a108d4b 100755 --- a/distribution/src/main/resources/bin/elasticsearch +++ b/distribution/src/main/resources/bin/elasticsearch @@ -72,7 +72,7 @@ if test -n "$ES_MIN_MEM" || echo "Error: encountered environment variables that are no longer supported" echo "Use jvm.options or ES_JAVA_OPTS to configure the JVM" unsupported_environment_variable "$ES_MIN_MEM" ES_MIN_MEM "set -Xms$ES_MIN_MEM in jvm.options or add \"-Xms$ES_MIN_MEM\" to ES_JAVA_OPTS" - unsupported_environment_variable "$ES_MAX_MEM" ES_MAX_MEM "set -Xms$ES_MAX_MEM in jvm.options or add \"-Xms$ES_MAX_MEM\" to ES_JAVA_OPTS" + unsupported_environment_variable "$ES_MAX_MEM" ES_MAX_MEM "set -Xmx$ES_MAX_MEM in jvm.options or add \"-Xmx$ES_MAX_MEM\" to ES_JAVA_OPTS" unsupported_environment_variable "$ES_HEAP_SIZE" ES_HEAP_SIZE "set -Xms$ES_HEAP_SIZE and -Xmx$ES_HEAP_SIZE in jvm.options or add \"-Xms$ES_HEAP_SIZE -Xmx$ES_HEAP_SIZE\" to ES_JAVA_OPTS" unsupported_environment_variable "$ES_HEAP_NEWSIZE" ES_HEAP_NEWSIZE "set -Xmn$ES_HEAP_NEWSIZE in jvm.options or add \"-Xmn$ES_HEAP_SIZE\" to ES_JAVA_OPTS" unsupported_environment_variable "$ES_DIRECT_SIZE" ES_DIRECT_SIZE "set -XX:MaxDirectMemorySize=$ES_DIRECT_SIZE in jvm.options or add \"-XX:MaxDirectMemorySize=$ES_DIRECT_SIZE\" to ES_JAVA_OPTS" From 1199cd8e2a1343ba74f208c96d6798ab0da4bdfd Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 5 May 2016 20:16:57 -0400 Subject: [PATCH 0093/1311] Mark IHBT#testFromAndToXContent as awaits fix This commit marks InnerHitsBuilderTests#testFromAndToXContent as awaiting a fix. --- .../java/org/elasticsearch/index/query/InnerHitBuilderTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index d2cbec890e8..4bc95a1c3ce 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -87,6 +87,7 @@ public class InnerHitBuilderTests extends ESTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/18166") public void testFromAndToXContent() throws Exception { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { InnerHitBuilder innerHit = randomInnerHits(true, false); From e16af604bf74442625d84b0b09b9cd8da67c336d Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 5 May 2016 17:53:01 -0700 Subject: [PATCH 0094/1311] Build: Add pom generation to assemble task In preparation for a unified release process, we need to be able to generate the pom files independently of trying to actually publish. This change adds back the maven-publish plugin just for that purpose. The nexus plugin still exists for now, so that we do not break snapshots, but that can be removed at a later time once snapshots are happenign through the unified tools. Note I also changed the dir jars are written into so that all our artifacts are under build/distributions. --- build.gradle | 20 +++ buildSrc/build.gradle | 1 + .../elasticsearch/gradle/BuildPlugin.groovy | 116 +++++++++++------- .../gradle/plugin/PluginBuildPlugin.groovy | 32 ++++- core/build.gradle | 12 ++ distribution/build.gradle | 13 ++ distribution/integ-test-zip/build.gradle | 8 ++ distribution/tar/build.gradle | 1 + distribution/zip/build.gradle | 8 ++ test/build.gradle | 2 + 10 files changed, 171 insertions(+), 42 deletions(-) diff --git a/build.gradle b/build.gradle index ad1f2456dea..459c8c8ae2e 100644 --- a/build.gradle +++ b/build.gradle @@ -28,6 +28,26 @@ subprojects { group = 'org.elasticsearch' version = org.elasticsearch.gradle.VersionProperties.elasticsearch + // we only use maven publish to add tasks for pom generation + plugins.withType(MavenPublishPlugin).whenPluginAdded { + publishing { + publications { + // add license information to generated poms + all { + pom.withXml { XmlProvider xml -> + Node node = xml.asNode() + node.appendNode('inceptionYear', '2009') + + Node license = node.appendNode('licenses').appendNode('license') + license.appendNode('name', 'The Apache Software License, Version 2.0') + license.appendNode('url', 'http://www.apache.org/licenses/LICENSE-2.0.txt') + license.appendNode('distribution', 'repo') + } + } + } + } + } + plugins.withType(NexusPlugin).whenPluginAdded { modifyPom { project { diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index e36451311e7..1b0d3c52127 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -69,6 +69,7 @@ dependencies { transitive = false } compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' + compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index ab2ba5abfef..de5dbb73af6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -33,6 +33,9 @@ import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.artifacts.maven.MavenPom +import org.gradle.api.publish.maven.MavenPublication +import org.gradle.api.publish.maven.plugins.MavenPublishPlugin +import org.gradle.api.publish.maven.tasks.GenerateMavenPom import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.JavaCompile import org.gradle.internal.jvm.Jvm @@ -54,7 +57,7 @@ class BuildPlugin implements Plugin { project.pluginManager.apply('java') project.pluginManager.apply('carrotsearch.randomized-testing') // these plugins add lots of info to our jars - configureJarManifest(project) // jar config must be added before info broker + configureJars(project) // jar config must be added before info broker project.pluginManager.apply('nebula.info-broker') project.pluginManager.apply('nebula.info-basic') project.pluginManager.apply('nebula.info-java') @@ -68,6 +71,7 @@ class BuildPlugin implements Plugin { configureConfigurations(project) project.ext.versions = VersionProperties.versions configureCompile(project) + configurePomGeneration(project) configureTest(project) configurePrecommit(project) @@ -266,44 +270,7 @@ class BuildPlugin implements Plugin { // add exclusions to the pom directly, for each of the transitive deps of this project's deps project.modifyPom { MavenPom pom -> - pom.withXml { XmlProvider xml -> - // first find if we have dependencies at all, and grab the node - NodeList depsNodes = xml.asNode().get('dependencies') - if (depsNodes.isEmpty()) { - return - } - - // check each dependency for any transitive deps - for (Node depNode : depsNodes.get(0).children()) { - String groupId = depNode.get('groupId').get(0).text() - String artifactId = depNode.get('artifactId').get(0).text() - String version = depNode.get('version').get(0).text() - - // collect the transitive deps now that we know what this dependency is - String depConfig = transitiveDepConfigName(groupId, artifactId, version) - Configuration configuration = project.configurations.findByName(depConfig) - if (configuration == null) { - continue // we did not make this dep non-transitive - } - Set artifacts = configuration.resolvedConfiguration.resolvedArtifacts - if (artifacts.size() <= 1) { - // this dep has no transitive deps (or the only artifact is itself) - continue - } - - // we now know we have something to exclude, so add the exclusion elements - Node exclusions = depNode.appendNode('exclusions') - for (ResolvedArtifact transitiveArtifact : artifacts) { - ModuleVersionIdentifier transitiveDep = transitiveArtifact.moduleVersion.id - if (transitiveDep.group == groupId && transitiveDep.name == artifactId) { - continue; // don't exclude the dependency itself! - } - Node exclusion = exclusions.appendNode('exclusion') - exclusion.appendNode('groupId', transitiveDep.group) - exclusion.appendNode('artifactId', transitiveDep.name) - } - } - } + pom.withXml(removeTransitiveDependencies(project)) } } @@ -332,6 +299,70 @@ class BuildPlugin implements Plugin { } } + /** Returns a closure which can be used with a MavenPom for removing transitive dependencies. */ + private static Closure removeTransitiveDependencies(Project project) { + // TODO: remove this when enforcing gradle 2.13+, it now properly handles exclusions + return { XmlProvider xml -> + // first find if we have dependencies at all, and grab the node + NodeList depsNodes = xml.asNode().get('dependencies') + if (depsNodes.isEmpty()) { + return + } + + // check each dependency for any transitive deps + for (Node depNode : depsNodes.get(0).children()) { + String groupId = depNode.get('groupId').get(0).text() + String artifactId = depNode.get('artifactId').get(0).text() + String version = depNode.get('version').get(0).text() + + // collect the transitive deps now that we know what this dependency is + String depConfig = transitiveDepConfigName(groupId, artifactId, version) + Configuration configuration = project.configurations.findByName(depConfig) + if (configuration == null) { + continue // we did not make this dep non-transitive + } + Set artifacts = configuration.resolvedConfiguration.resolvedArtifacts + if (artifacts.size() <= 1) { + // this dep has no transitive deps (or the only artifact is itself) + continue + } + + // we now know we have something to exclude, so add the exclusion elements + Node exclusions = depNode.appendNode('exclusions') + for (ResolvedArtifact transitiveArtifact : artifacts) { + ModuleVersionIdentifier transitiveDep = transitiveArtifact.moduleVersion.id + if (transitiveDep.group == groupId && transitiveDep.name == artifactId) { + continue; // don't exclude the dependency itself! + } + Node exclusion = exclusions.appendNode('exclusion') + exclusion.appendNode('groupId', transitiveDep.group) + exclusion.appendNode('artifactId', transitiveDep.name) + } + } + } + } + + /**Configuration generation of maven poms. */ + private static void configurePomGeneration(Project project) { + project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded { + project.publishing { + publications { + all { MavenPublication publication -> // we only deal with maven + // add exclusions to the pom directly, for each of the transitive deps of this project's deps + publication.pom.withXml(removeTransitiveDependencies(project)) + } + } + } + + project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t -> + // place the pom next to the jar it is for + t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom") + // build poms with assemble + project.assemble.dependsOn(t) + } + } + } + /** Adds compiler settings to the project */ static void configureCompile(Project project) { project.ext.compactProfile = 'compact3' @@ -364,9 +395,12 @@ class BuildPlugin implements Plugin { } } - /** Adds additional manifest info to jars */ - static void configureJarManifest(Project project) { + /** Adds additional manifest info to jars, and adds source and javadoc jars */ + static void configureJars(Project project) { project.tasks.withType(Jar) { Jar jarTask -> + // we put all our distributable files under distributions + jarTask.destinationDir = new File(project.buildDir, 'distributions') + // fixup the jar manifest jarTask.doFirst { boolean isSnapshot = VersionProperties.elasticsearch.endsWith("-SNAPSHOT"); String version = VersionProperties.elasticsearch; diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index b04f959e068..36770ab35f7 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -18,11 +18,12 @@ */ package org.elasticsearch.gradle.plugin +import nebula.plugin.publishing.maven.MavenManifestPlugin +import nebula.plugin.publishing.maven.MavenScmPlugin import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask import org.gradle.api.Project -import org.gradle.api.artifacts.Dependency import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Zip @@ -50,6 +51,7 @@ public class PluginBuildPlugin extends BuildPlugin { } else { project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files) + configurePomGeneration(project) } project.namingConventions { @@ -125,4 +127,32 @@ public class PluginBuildPlugin extends BuildPlugin { project.configurations.getByName('default').extendsFrom = [] project.artifacts.add('default', bundle) } + + /** + * Adds the plugin jar and zip as publications. + */ + private static void configurePomGeneration(Project project) { + project.plugins.apply(MavenScmPlugin.class) + project.plugins.apply(MavenManifestPlugin.class) + + project.publishing { + publications { + nebula { + artifact project.bundlePlugin + pom.withXml { + // overwrite the name/description in the pom nebula set up + Node root = asNode() + for (Node node : root.children()) { + if (node.name() == 'name') { + node.setValue(project.pluginProperties.extension.name) + } else if (node.name() == 'description') { + node.setValue(project.pluginProperties.extension.description) + } + } + } + } + } + } + + } } diff --git a/core/build.gradle b/core/build.gradle index ab3754e72ff..e12a80fcbf7 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -24,6 +24,18 @@ import org.elasticsearch.gradle.BuildPlugin apply plugin: 'elasticsearch.build' apply plugin: 'com.bmuschko.nexus' apply plugin: 'nebula.optional-base' +apply plugin: 'nebula.maven-base-publish' +apply plugin: 'nebula.maven-scm' +//apply plugin: 'nebula.source-jar' +//apply plugin: 'nebula.javadoc-jar' + +publishing { + publications { + nebula { + artifactId 'elasticsearch' + } + } +} archivesBaseName = 'elasticsearch' diff --git a/distribution/build.gradle b/distribution/build.gradle index 09050db2159..bb4cc167f10 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -158,6 +158,19 @@ subprojects { MavenFilteringHack.filter(it, expansions) } } + + /***************************************************************************** + * Publishing setup * + *****************************************************************************/ + apply plugin: 'nebula.maven-base-publish' + apply plugin: 'nebula.maven-scm' + publishing { + publications { + nebula { + artifactId 'elasticsearch' + } + } + } } /***************************************************************************** diff --git a/distribution/integ-test-zip/build.gradle b/distribution/integ-test-zip/build.gradle index 23191ff03a4..67f99aa884a 100644 --- a/distribution/integ-test-zip/build.gradle +++ b/distribution/integ-test-zip/build.gradle @@ -27,5 +27,13 @@ artifacts { archives buildZip } +publishing { + publications { + nebula { + artifact buildZip + } + } +} + integTest.dependsOn buildZip diff --git a/distribution/tar/build.gradle b/distribution/tar/build.gradle index 9edba6c11a2..72425659056 100644 --- a/distribution/tar/build.gradle +++ b/distribution/tar/build.gradle @@ -33,3 +33,4 @@ artifacts { project.signArchives.singleSignature.type = 'tar.gz.asc' } } + diff --git a/distribution/zip/build.gradle b/distribution/zip/build.gradle index 23191ff03a4..67f99aa884a 100644 --- a/distribution/zip/build.gradle +++ b/distribution/zip/build.gradle @@ -27,5 +27,13 @@ artifacts { archives buildZip } +publishing { + publications { + nebula { + artifact buildZip + } + } +} + integTest.dependsOn buildZip diff --git a/test/build.gradle b/test/build.gradle index 7feb332b717..a80ca59978c 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -25,6 +25,8 @@ subprojects { group = 'org.elasticsearch.test' apply plugin: 'elasticsearch.build' + apply plugin: 'nebula.maven-base-publish' + apply plugin: 'nebula.maven-scm' // the main files are actually test files, so use the appropriate forbidden api sigs From 7d8708716e0f92a4ea5073b76a308cd226266d36 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 4 May 2016 15:18:13 +0200 Subject: [PATCH 0095/1311] QueryBuilder does not need generics. #18133 QueryBuilder has generics, but those are never used: all call sites use `QueryBuilder`. Only `AbstractQueryBuilder` needs generics so that the base class can contain a default implementation for setters that returns `this`. --- .../query/ShardValidateQueryRequest.java | 4 +- .../validate/query/ValidateQueryRequest.java | 6 +- .../action/explain/ExplainRequest.java | 6 +- .../percolate/PercolateSourceBuilder.java | 4 +- .../percolate/TransportPercolateAction.java | 2 +- .../action/search/SearchRequestBuilder.java | 4 +- .../cluster/metadata/AliasValidator.java | 2 +- .../percolator/PercolatorFieldMapper.java | 4 +- .../index/query/AbstractQueryBuilder.java | 18 +++--- .../index/query/BoolQueryBuilder.java | 64 +++++++++---------- .../index/query/BoostingQueryBuilder.java | 8 +-- .../query/ConstantScoreQueryBuilder.java | 12 ++-- .../index/query/DisMaxQueryBuilder.java | 16 ++--- .../query/FieldMaskingSpanQueryBuilder.java | 8 +-- .../index/query/FuzzyQueryBuilder.java | 2 +- .../index/query/GeoShapeQueryBuilder.java | 2 +- .../index/query/HasChildQueryBuilder.java | 14 ++-- .../index/query/HasParentQueryBuilder.java | 14 ++-- .../index/query/IndicesQueryBuilder.java | 26 ++++---- .../index/query/InnerHitBuilder.java | 6 +- .../index/query/MoreLikeThisQueryBuilder.java | 3 +- .../index/query/MultiTermQueryBuilder.java | 2 +- .../index/query/NestedQueryBuilder.java | 6 +- .../index/query/PercolateQueryBuilder.java | 2 +- .../index/query/PrefixQueryBuilder.java | 2 +- .../index/query/QueryBuilder.java | 16 +++-- .../index/query/QueryParseContext.java | 8 +-- .../index/query/QueryParser.java | 2 +- .../index/query/RangeQueryBuilder.java | 4 +- .../index/query/RegexpQueryBuilder.java | 2 +- .../query/SpanContainingQueryBuilder.java | 24 +++---- .../index/query/SpanFirstQueryBuilder.java | 10 +-- .../query/SpanMultiTermQueryBuilder.java | 10 +-- .../index/query/SpanNearQueryBuilder.java | 16 ++--- .../index/query/SpanNotQueryBuilder.java | 12 ++-- .../index/query/SpanOrQueryBuilder.java | 16 ++--- .../index/query/SpanQueryBuilder.java | 2 +- .../index/query/SpanTermQueryBuilder.java | 2 +- .../index/query/SpanWithinQueryBuilder.java | 12 ++-- .../index/query/TemplateQueryBuilder.java | 4 +- .../index/query/TermsQueryBuilder.java | 2 +- .../index/query/WildcardQueryBuilder.java | 2 +- .../index/query/WrapperQueryBuilder.java | 4 +- .../FunctionScoreQueryBuilder.java | 26 ++++---- .../query/RestValidateQueryAction.java | 2 +- .../rest/action/cat/RestCountAction.java | 2 +- .../rest/action/count/RestCountAction.java | 2 +- .../action/explain/RestExplainAction.java | 2 +- .../rest/action/search/RestSearchAction.java | 2 +- .../rest/action/support/RestActions.java | 4 +- .../elasticsearch/search/SearchModule.java | 2 +- .../aggregations/AggregationBuilders.java | 4 +- .../filter/FilterAggregatorBuilder.java | 6 +- .../filter/FilterAggregatorFactory.java | 2 +- .../bucket/filters/FiltersAggregator.java | 6 +- .../filters/FiltersAggregatorBuilder.java | 10 +-- .../SignificantTermsAggregatorBuilder.java | 6 +- .../SignificantTermsAggregatorFactory.java | 4 +- .../significant/SignificantTermsParser.java | 4 +- .../search/builder/SearchSourceBuilder.java | 18 +++--- .../highlight/AbstractHighlighterBuilder.java | 6 +- .../search/rescore/QueryRescorerBuilder.java | 10 +-- .../search/rescore/RescoreBuilder.java | 2 +- .../search/sort/FieldSortBuilder.java | 8 +-- .../search/sort/GeoDistanceSortBuilder.java | 6 +- .../search/sort/ScriptSortBuilder.java | 8 +-- .../search/sort/SortBuilder.java | 2 +- .../index/query/AbstractQueryTestCase.java | 34 +++++----- .../index/query/BoolQueryBuilderTests.java | 18 +++--- .../query/BoostingQueryBuilderTests.java | 2 +- .../query/ConstantScoreQueryBuilderTests.java | 2 +- .../index/query/DisMaxQueryBuilderTests.java | 4 +- .../GeoBoundingBoxQueryBuilderTests.java | 2 +- .../query/GeoShapeQueryBuilderTests.java | 2 +- .../query/HasChildQueryBuilderTests.java | 4 +- .../index/query/IndicesQueryBuilderTests.java | 2 +- .../index/query/InnerHitBuilderTests.java | 2 +- .../index/query/NestedQueryBuilderTests.java | 2 +- .../query/PercolateQueryBuilderTests.java | 2 +- .../query/QueryStringQueryBuilderTests.java | 2 +- .../index/query/RandomQueryBuilder.java | 4 +- .../index/query/RangeQueryBuilderTests.java | 8 +-- .../query/SpanMultiTermQueryBuilderTests.java | 2 +- .../query/SpanNearQueryBuilderTests.java | 2 +- .../index/query/SpanOrQueryBuilderTests.java | 4 +- .../query/TemplateQueryBuilderTests.java | 12 ++-- .../index/query/TermsQueryBuilderTests.java | 2 +- .../index/query/WrapperQueryBuilderTests.java | 8 +-- .../FunctionScoreQueryBuilderTests.java | 16 ++--- .../search/aggregations/bucket/FilterIT.java | 4 +- .../search/aggregations/bucket/FiltersIT.java | 6 +- .../aggregations/metrics/FiltersTests.java | 2 +- .../functionscore/DecayFunctionScoreIT.java | 2 +- .../search/functionscore/QueryRescorerIT.java | 2 +- .../rescore/QueryRescoreBuilderTests.java | 2 +- .../search/sort/AbstractSortTestCase.java | 2 +- .../migration/migrate_5_0/java.asciidoc | 8 +-- .../messy/tests/MinDocCountTests.java | 2 +- .../AbstractBulkByScrollRequestBuilder.java | 2 +- .../reindex/ReindexParentChildTests.java | 6 +- .../deletebyquery/DeleteByQueryRequest.java | 6 +- .../DeleteByQueryRequestBuilder.java | 2 +- .../RestDeleteByQueryAction.java | 2 +- 103 files changed, 359 insertions(+), 354 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java index 0aec5777974..831ef6e1060 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java @@ -34,7 +34,7 @@ import java.io.IOException; */ public class ShardValidateQueryRequest extends BroadcastShardRequest { - private QueryBuilder query; + private QueryBuilder query; private String[] types = Strings.EMPTY_ARRAY; private boolean explain; private boolean rewrite; @@ -57,7 +57,7 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { this.nowInMillis = request.nowInMillis; } - public QueryBuilder query() { + public QueryBuilder query() { return query; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 603da6bfe9d..41ef37ad621 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -39,7 +39,7 @@ import java.util.Arrays; */ public class ValidateQueryRequest extends BroadcastRequest { - private QueryBuilder query = new MatchAllQueryBuilder(); + private QueryBuilder query = new MatchAllQueryBuilder(); private boolean explain; private boolean rewrite; @@ -73,11 +73,11 @@ public class ValidateQueryRequest extends BroadcastRequest /** * The query to validate. */ - public QueryBuilder query() { + public QueryBuilder query() { return query; } - public ValidateQueryRequest query(QueryBuilder query) { + public ValidateQueryRequest query(QueryBuilder query) { this.query = query; return this; } diff --git a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java index 6e1b6e82730..611d57345ba 100644 --- a/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java +++ b/core/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java @@ -39,7 +39,7 @@ public class ExplainRequest extends SingleShardRequest { private String id; private String routing; private String preference; - private QueryBuilder query; + private QueryBuilder query; private String[] fields; private FetchSourceContext fetchSourceContext; @@ -100,11 +100,11 @@ public class ExplainRequest extends SingleShardRequest { return this; } - public QueryBuilder query() { + public QueryBuilder query() { return query; } - public ExplainRequest query(QueryBuilder query) { + public ExplainRequest query(QueryBuilder query) { this.query = query; return this; } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java index 5a5924f7883..a6ee99a476c 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java @@ -48,7 +48,7 @@ import java.util.Map; public class PercolateSourceBuilder extends ToXContentToBytes { private DocBuilder docBuilder; - private QueryBuilder queryBuilder; + private QueryBuilder queryBuilder; private Integer size; private List> sorts; private Boolean trackScores; @@ -68,7 +68,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { * Sets a query to reduce the number of percolate queries to be evaluated and score the queries that match based * on this query. */ - public PercolateSourceBuilder setQueryBuilder(QueryBuilder queryBuilder) { + public PercolateSourceBuilder setQueryBuilder(QueryBuilder queryBuilder) { this.queryBuilder = queryBuilder; return this; } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java index 5c8b20b1f92..b80589df936 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java @@ -203,7 +203,7 @@ public class TransportPercolateAction extends HandledTransportAction queryBuilder = queryParseContext.parseInnerQueryBuilder(); + QueryBuilder queryBuilder = queryParseContext.parseInnerQueryBuilder(); BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); boolQueryBuilder.must(queryBuilder); boolQueryBuilder.filter(percolateQueryBuilder); diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index fc28addc111..9830f7be203 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -166,7 +166,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder queryBuilder) { + public SearchRequestBuilder setQuery(QueryBuilder queryBuilder) { sourceBuilder().query(queryBuilder); return this; } @@ -175,7 +175,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder postFilter) { + public SearchRequestBuilder setPostFilter(QueryBuilder postFilter) { sourceBuilder().postFilter(postFilter); return this; } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index 64da04e6511..735916504da 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -141,7 +141,7 @@ public class AliasValidator extends AbstractComponent { private static void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException { QueryParseContext queryParseContext = queryShardContext.newParseContext(parser); - QueryBuilder queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext); + QueryBuilder queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext); queryBuilder.toFilter(queryShardContext); } } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java index 10c7e46e353..58f9f6c695c 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java @@ -186,7 +186,7 @@ public class PercolatorFieldMapper extends FieldMapper { } XContentParser parser = context.parser(); - QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation()); + QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation()); // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); @@ -206,7 +206,7 @@ public class PercolatorFieldMapper extends FieldMapper { return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(context.newParseContext(parser), parser.getTokenLocation())); } - static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException { + static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException { // This means that fields in the query need to exist in the mapping prior to registering this query // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired. // diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index 6e82e7059d8..e883166f900 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -43,7 +43,7 @@ import java.util.Objects; * Base class for all classes producing lucene queries. * Supports conversion to BytesReference and creation of lucene Query objects. */ -public abstract class AbstractQueryBuilder> extends ToXContentToBytes implements QueryBuilder { +public abstract class AbstractQueryBuilder> extends ToXContentToBytes implements QueryBuilder { /** Default for boost to apply to resulting Lucene query. Defaults to 1.0*/ public static final float DEFAULT_BOOST = 1.0f; @@ -221,10 +221,10 @@ public abstract class AbstractQueryBuilder> * their {@link QueryBuilder#toQuery(QueryShardContext)} method are not added to the * resulting collection. */ - protected static Collection toQueries(Collection> queryBuilders, QueryShardContext context) throws QueryShardException, + protected static Collection toQueries(Collection queryBuilders, QueryShardContext context) throws QueryShardException, IOException { List queries = new ArrayList<>(queryBuilders.size()); - for (QueryBuilder queryBuilder : queryBuilders) { + for (QueryBuilder queryBuilder : queryBuilders) { Query query = queryBuilder.toQuery(context); if (query != null) { queries.add(query); @@ -241,13 +241,13 @@ public abstract class AbstractQueryBuilder> protected final static void writeQueries(StreamOutput out, List queries) throws IOException { out.writeVInt(queries.size()); - for (QueryBuilder query : queries) { + for (QueryBuilder query : queries) { out.writeNamedWriteable(query); } } - protected final static List> readQueries(StreamInput in) throws IOException { - List> queries = new ArrayList<>(); + protected final static List readQueries(StreamInput in) throws IOException { + List queries = new ArrayList<>(); int size = in.readVInt(); for (int i = 0; i < size; i++) { queries.add(in.readNamedWriteable(QueryBuilder.class)); @@ -256,8 +256,8 @@ public abstract class AbstractQueryBuilder> } @Override - public final QueryBuilder rewrite(QueryRewriteContext queryShardContext) throws IOException { - QueryBuilder rewritten = doRewrite(queryShardContext); + public final QueryBuilder rewrite(QueryRewriteContext queryShardContext) throws IOException { + QueryBuilder rewritten = doRewrite(queryShardContext); if (rewritten == this) { return rewritten; } @@ -270,7 +270,7 @@ public abstract class AbstractQueryBuilder> return rewritten; } - protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { return this; } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index c6de1213a8d..e5aa774addc 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -62,13 +62,13 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { private static final ParseField MINIMUM_NUMBER_SHOULD_MATCH = new ParseField("minimum_number_should_match"); private static final ParseField ADJUST_PURE_NEGATIVE = new ParseField("adjust_pure_negative"); - private final List> mustClauses = new ArrayList<>(); + private final List mustClauses = new ArrayList<>(); - private final List> mustNotClauses = new ArrayList<>(); + private final List mustNotClauses = new ArrayList<>(); - private final List> filterClauses = new ArrayList<>(); + private final List filterClauses = new ArrayList<>(); - private final List> shouldClauses = new ArrayList<>(); + private final List shouldClauses = new ArrayList<>(); private boolean disableCoord = DISABLE_COORD_DEFAULT; @@ -111,7 +111,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must appear in the matching documents and will * contribute to scoring. No null value allowed. */ - public BoolQueryBuilder must(QueryBuilder queryBuilder) { + public BoolQueryBuilder must(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -122,7 +122,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { /** * Gets the queries that must appear in the matching documents. */ - public List> must() { + public List must() { return this.mustClauses; } @@ -130,7 +130,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must appear in the matching documents but will * not contribute to scoring. No null value allowed. */ - public BoolQueryBuilder filter(QueryBuilder queryBuilder) { + public BoolQueryBuilder filter(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -141,7 +141,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { /** * Gets the queries that must appear in the matching documents but don't contribute to scoring */ - public List> filter() { + public List filter() { return this.filterClauses; } @@ -149,7 +149,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must not appear in the matching documents. * No null value allowed. */ - public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) { + public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -160,7 +160,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { /** * Gets the queries that must not appear in the matching documents. */ - public List> mustNot() { + public List mustNot() { return this.mustNotClauses; } @@ -171,7 +171,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * * @see #minimumNumberShouldMatch(int) */ - public BoolQueryBuilder should(QueryBuilder queryBuilder) { + public BoolQueryBuilder should(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -185,7 +185,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * @see #should(QueryBuilder) * @see #minimumNumberShouldMatch(int) */ - public List> should() { + public List should() { return this.shouldClauses; } @@ -288,13 +288,13 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { builder.endObject(); } - private static void doXArrayContent(String field, List> clauses, XContentBuilder builder, Params params) + private static void doXArrayContent(String field, List clauses, XContentBuilder builder, Params params) throws IOException { if (clauses.isEmpty()) { return; } builder.startArray(field); - for (QueryBuilder clause : clauses) { + for (QueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); @@ -308,15 +308,15 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { float boost = AbstractQueryBuilder.DEFAULT_BOOST; String minimumShouldMatch = null; - final List> mustClauses = new ArrayList<>(); - final List> mustNotClauses = new ArrayList<>(); - final List> shouldClauses = new ArrayList<>(); - final List> filterClauses = new ArrayList<>(); + final List mustClauses = new ArrayList<>(); + final List mustNotClauses = new ArrayList<>(); + final List shouldClauses = new ArrayList<>(); + final List filterClauses = new ArrayList<>(); String queryName = null; String currentFieldName = null; XContentParser.Token token; - QueryBuilder query; + QueryBuilder query; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -387,16 +387,16 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { } } BoolQueryBuilder boolQuery = new BoolQueryBuilder(); - for (QueryBuilder queryBuilder : mustClauses) { + for (QueryBuilder queryBuilder : mustClauses) { boolQuery.must(queryBuilder); } - for (QueryBuilder queryBuilder : mustNotClauses) { + for (QueryBuilder queryBuilder : mustNotClauses) { boolQuery.mustNot(queryBuilder); } - for (QueryBuilder queryBuilder : shouldClauses) { + for (QueryBuilder queryBuilder : shouldClauses) { boolQuery.should(queryBuilder); } - for (QueryBuilder queryBuilder : filterClauses) { + for (QueryBuilder queryBuilder : filterClauses) { boolQuery.filter(queryBuilder); } boolQuery.boost(boost); @@ -436,8 +436,8 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { } private static void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, - List> clauses, Occur occurs) throws IOException { - for (QueryBuilder query : clauses) { + List clauses, Occur occurs) throws IOException { + for (QueryBuilder query : clauses) { Query luceneQuery = null; switch (occurs) { case MUST: @@ -473,7 +473,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { } @Override - protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { BoolQueryBuilder newBuilder = new BoolQueryBuilder(); boolean changed = false; final int clauses = mustClauses.size() + mustNotClauses.size() + filterClauses.size() + shouldClauses.size(); @@ -498,20 +498,20 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { @Override protected void extractInnerHitBuilders(Map innerHits) { - List> clauses = new ArrayList<>(filter()); + List clauses = new ArrayList<>(filter()); clauses.addAll(must()); clauses.addAll(should()); // no need to include must_not (since there will be no hits for it) - for (QueryBuilder clause : clauses) { + for (QueryBuilder clause : clauses) { InnerHitBuilder.extractInnerHits(clause, innerHits); } } - private static boolean rewriteClauses(QueryRewriteContext queryRewriteContext, List> builders, - Consumer> consumer) throws IOException { + private static boolean rewriteClauses(QueryRewriteContext queryRewriteContext, List builders, + Consumer consumer) throws IOException { boolean changed = false; - for (QueryBuilder builder : builders) { - QueryBuilder result = builder.rewrite(queryRewriteContext); + for (QueryBuilder builder : builders) { + QueryBuilder result = builder.rewrite(queryRewriteContext); if (result != builder) { changed = true; } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java index fa439bc71d8..496cb7ec8a7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java @@ -53,9 +53,9 @@ public class BoostingQueryBuilder extends AbstractQueryBuilder positiveQuery; + private final QueryBuilder positiveQuery; - private final QueryBuilder negativeQuery; + private final QueryBuilder negativeQuery; private float negativeBoost = -1; @@ -66,7 +66,7 @@ public class BoostingQueryBuilder extends AbstractQueryBuilder positiveQuery, QueryBuilder negativeQuery) { + public BoostingQueryBuilder(QueryBuilder positiveQuery, QueryBuilder negativeQuery) { if (positiveQuery == null) { throw new IllegalArgumentException("inner clause [positive] cannot be null."); } @@ -226,7 +226,7 @@ public class BoostingQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { QueryBuilder positiveQuery = this.positiveQuery.rewrite(queryRewriteContext); QueryBuilder negativeQuery = this.negativeQuery.rewrite(queryRewriteContext); if (positiveQuery != this.positiveQuery || negativeQuery != this.negativeQuery) { diff --git a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java index 8ae2a088f2f..306a4568576 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java @@ -43,7 +43,7 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder filterBuilder; + private final QueryBuilder filterBuilder; /** * A query that wraps another query and simply returns a constant score equal to the @@ -51,7 +51,7 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder filterBuilder) { + public ConstantScoreQueryBuilder(QueryBuilder filterBuilder) { if (filterBuilder == null) { throw new IllegalArgumentException("inner clause [filter] cannot be null."); } @@ -74,7 +74,7 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder innerQuery() { + public QueryBuilder innerQuery() { return this.filterBuilder; } @@ -90,7 +90,7 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder query = null; + QueryBuilder query = null; boolean queryFound = false; String queryName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; @@ -163,8 +163,8 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { - QueryBuilder rewrite = filterBuilder.rewrite(queryRewriteContext); + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryBuilder rewrite = filterBuilder.rewrite(queryRewriteContext); if (rewrite != filterBuilder) { return new ConstantScoreQueryBuilder(rewrite); } diff --git a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java index 04662fbe863..f38b2c09b16 100644 --- a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java @@ -50,7 +50,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder private static final ParseField TIE_BREAKER_FIELD = new ParseField("tie_breaker"); private static final ParseField QUERIES_FIELD = new ParseField("queries"); - private final List> queries = new ArrayList<>(); + private final List queries = new ArrayList<>(); private float tieBreaker = DEFAULT_TIE_BREAKER; @@ -75,7 +75,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder /** * Add a sub-query to this disjunction. */ - public DisMaxQueryBuilder add(QueryBuilder queryBuilder) { + public DisMaxQueryBuilder add(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner dismax query clause cannot be null"); } @@ -86,7 +86,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder /** * @return an immutable list copy of the current sub-queries of this disjunction */ - public List> innerQueries() { + public List innerQueries() { return this.queries; } @@ -114,7 +114,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder builder.startObject(NAME); builder.field(TIE_BREAKER_FIELD.getPreferredName(), tieBreaker); builder.startArray(QUERIES_FIELD.getPreferredName()); - for (QueryBuilder queryBuilder : queries) { + for (QueryBuilder queryBuilder : queries) { queryBuilder.toXContent(builder, params); } builder.endArray(); @@ -128,7 +128,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder float boost = AbstractQueryBuilder.DEFAULT_BOOST; float tieBreaker = DisMaxQueryBuilder.DEFAULT_TIE_BREAKER; - final List> queries = new ArrayList<>(); + final List queries = new ArrayList<>(); boolean queriesFound = false; String queryName = null; @@ -140,7 +140,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder } else if (token == XContentParser.Token.START_OBJECT) { if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERIES_FIELD)) { queriesFound = true; - QueryBuilder query = parseContext.parseInnerQueryBuilder(); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); queries.add(query); } else { throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]"); @@ -149,7 +149,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERIES_FIELD)) { queriesFound = true; while (token != XContentParser.Token.END_ARRAY) { - QueryBuilder query = parseContext.parseInnerQueryBuilder(); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); queries.add(query); token = parser.nextToken(); } @@ -177,7 +177,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder disMaxQuery.tieBreaker(tieBreaker); disMaxQuery.queryName(queryName); disMaxQuery.boost(boost); - for (QueryBuilder query : queries) { + for (QueryBuilder query : queries) { disMaxQuery.add(query); } return disMaxQuery; diff --git a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java index e44f2868040..0382f353cb3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -35,7 +35,7 @@ import java.io.IOException; import java.util.Objects; public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder - implements SpanQueryBuilder{ + implements SpanQueryBuilder { public static final String NAME = "field_masking_span"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); @@ -43,7 +43,7 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder queryBuilder; + private final SpanQueryBuilder queryBuilder; private final String fieldName; @@ -53,7 +53,7 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder queryBuilder, String fieldName) { + public FieldMaskingSpanQueryBuilder(SpanQueryBuilder queryBuilder, String fieldName) { if (Strings.isEmpty(fieldName)) { throw new IllegalArgumentException("field name is null or empty"); } @@ -69,7 +69,7 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + queryBuilder = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); fieldName = in.readString(); } diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 7604ef94eb6..70b4f5eb7f5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -45,7 +45,7 @@ import java.util.Objects; * a match query with the fuzziness parameter for strings or range queries for numeric and date fields. */ @Deprecated -public class FuzzyQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { +public class FuzzyQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { public static final String NAME = "fuzzy"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index d3c47d5bc70..8b3c8a62486 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -587,7 +587,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { if (this.shape == null) { GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); ShapeBuilder shape = fetch(queryShardContext.getClient(), getRequest, indexedShapePath); diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java index 61466ab5ea5..990b5a35fd9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java @@ -77,7 +77,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder query; + private final QueryBuilder query; private final String type; private final ScoreMode scoreMode; private InnerHitBuilder innerHitBuilder; @@ -85,11 +85,11 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder query, ScoreMode scoreMode) { + public HasChildQueryBuilder(String type, QueryBuilder query, ScoreMode scoreMode) { this(type, query, DEFAULT_MIN_CHILDREN, DEFAULT_MAX_CHILDREN, scoreMode, null); } - private HasChildQueryBuilder(String type, QueryBuilder query, int minChildren, int maxChildren, ScoreMode scoreMode, + private HasChildQueryBuilder(String type, QueryBuilder query, int minChildren, int maxChildren, ScoreMode scoreMode, InnerHitBuilder innerHitBuilder) { this.type = requireValue(type, "[" + NAME + "] requires 'type' field"); this.query = requireValue(query, "[" + NAME + "] requires 'query' field"); @@ -158,7 +158,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder query() { + public QueryBuilder query() { return query; } @@ -238,7 +238,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder iqb = null; + QueryBuilder iqb = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -467,8 +467,8 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { - QueryBuilder rewrite = query.rewrite(queryRewriteContext); + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryBuilder rewrite = query.rewrite(queryRewriteContext); if (rewrite != query) { return new HasChildQueryBuilder(type, rewrite, minChildren, minChildren, scoreMode, innerHitBuilder); } diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java index dc3b4d9c66a..96356c276e7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java @@ -60,17 +60,17 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder query; + private final QueryBuilder query; private final String type; private final boolean score; private InnerHitBuilder innerHit; private boolean ignoreUnmapped = false; - public HasParentQueryBuilder(String type, QueryBuilder query, boolean score) { + public HasParentQueryBuilder(String type, QueryBuilder query, boolean score) { this(type, query, score, null); } - private HasParentQueryBuilder(String type, QueryBuilder query, boolean score, InnerHitBuilder innerHit) { + private HasParentQueryBuilder(String type, QueryBuilder query, boolean score, InnerHitBuilder innerHit) { this.type = requireValue(type, "[" + NAME + "] requires 'type' field"); this.query = requireValue(query, "[" + NAME + "] requires 'query' field"); this.score = score; @@ -101,7 +101,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder query() { + public QueryBuilder query() { return query; } @@ -238,7 +238,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder iqb = null; + QueryBuilder iqb = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -306,8 +306,8 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { - QueryBuilder rewrite = query.rewrite(queryShardContext); + protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + QueryBuilder rewrite = query.rewrite(queryShardContext); if (rewrite != query) { return new HasParentQueryBuilder(type, rewrite, score, innerHit); } diff --git a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java index 56fdeb779fa..7cfdf1baa1e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java @@ -55,17 +55,17 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder innerQuery; + private final QueryBuilder innerQuery; private final String[] indices; - private QueryBuilder noMatchQuery = defaultNoMatchQuery(); + private QueryBuilder noMatchQuery = defaultNoMatchQuery(); /** * @deprecated instead search on the `_index` field */ @Deprecated - public IndicesQueryBuilder(QueryBuilder innerQuery, String... indices) { + public IndicesQueryBuilder(QueryBuilder innerQuery, String... indices) { DEPRECATION_LOGGER.deprecated("{} query is deprecated. Instead search on the '_index' field", NAME); if (innerQuery == null) { throw new IllegalArgumentException("inner query cannot be null"); @@ -94,7 +94,7 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder innerQuery() { + public QueryBuilder innerQuery() { return this.innerQuery; } @@ -105,7 +105,7 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder noMatchQuery) { + public IndicesQueryBuilder noMatchQuery(QueryBuilder noMatchQuery) { if (noMatchQuery == null) { throw new IllegalArgumentException("noMatch query cannot be null"); } @@ -121,11 +121,11 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder noMatchQuery() { + public QueryBuilder noMatchQuery() { return this.noMatchQuery; } - private static QueryBuilder defaultNoMatchQuery() { + private static QueryBuilder defaultNoMatchQuery() { return QueryBuilders.matchAllQuery(); } @@ -144,9 +144,9 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder innerQuery = null; + QueryBuilder innerQuery = null; Collection indices = new ArrayList<>(); - QueryBuilder noMatchQuery = defaultNoMatchQuery(); + QueryBuilder noMatchQuery = defaultNoMatchQuery(); String queryName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; @@ -209,7 +209,7 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder parseNoMatchQuery(String type) { + static QueryBuilder parseNoMatchQuery(String type) { if ("all".equals(type)) { return QueryBuilders.matchAllQuery(); } else if ("none".equals(type)) { @@ -244,9 +244,9 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { - QueryBuilder newInnnerQuery = innerQuery.rewrite(queryShardContext); - QueryBuilder newNoMatchQuery = noMatchQuery.rewrite(queryShardContext); + protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + QueryBuilder newInnnerQuery = innerQuery.rewrite(queryShardContext); + QueryBuilder newNoMatchQuery = noMatchQuery.rewrite(queryShardContext); if (newInnnerQuery != innerQuery || newNoMatchQuery != noMatchQuery) { return new IndicesQueryBuilder(innerQuery, indices).noMatchQuery(noMatchQuery); } diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 61663713c8e..c07c75532f1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -131,7 +131,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl private boolean trackScores; private List fieldNames; - private QueryBuilder query = new MatchAllQueryBuilder(); + private QueryBuilder query = new MatchAllQueryBuilder(); private List> sorts; private List fieldDataFields; private Set scriptFields; @@ -411,7 +411,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl return this; } - QueryBuilder getQuery() { + QueryBuilder getQuery() { return query; } @@ -632,7 +632,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl return PARSER.parse(context.parser(), new InnerHitBuilder(), context); } - public static void extractInnerHits(QueryBuilder query, Map innerHitBuilders) { + public static void extractInnerHits(QueryBuilder query, Map innerHitBuilders) { if (query instanceof AbstractQueryBuilder) { ((AbstractQueryBuilder) query).extractInnerHitBuilders(innerHitBuilders); } else { diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index c71db8c22a5..66f623cbbb3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -57,7 +57,6 @@ import org.elasticsearch.index.mapper.core.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.core.TextFieldMapper.TextFieldType; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -1197,7 +1196,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { // TODO this needs heavy cleanups before we can rewrite it return this; } diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java index 0e946d628a1..be9abfc5e44 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java @@ -18,6 +18,6 @@ */ package org.elasticsearch.index.query; -public interface MultiTermQueryBuilder> extends QueryBuilder { +public interface MultiTermQueryBuilder extends QueryBuilder { } diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index bba0365d0ae..5d74b540116 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -57,7 +57,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder private final String path; private final ScoreMode scoreMode; - private final QueryBuilder query; + private final QueryBuilder query; private InnerHitBuilder innerHitBuilder; private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; @@ -161,7 +161,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder float boost = AbstractQueryBuilder.DEFAULT_BOOST; ScoreMode scoreMode = ScoreMode.Avg; String queryName = null; - QueryBuilder query = null; + QueryBuilder query = null; String path = null; String currentFieldName = null; InnerHitBuilder innerHitBuilder = null; @@ -259,7 +259,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder } @Override - protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { QueryBuilder rewrite = query.rewrite(queryRewriteContext); if (rewrite != query) { return new NestedQueryBuilder(path, rewrite, scoreMode, innerHitBuilder); diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java index 257460964d3..0e4b999dc33 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java @@ -327,7 +327,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { if (document != null) { return this; } diff --git a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index ffb4179aa3f..44677d1840d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -40,7 +40,7 @@ import java.util.Objects; /** * A Query that matches documents containing terms with a specified prefix. */ -public class PrefixQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { +public class PrefixQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { public static final String NAME = "prefix"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java index dcedd0f1d5e..197af655d54 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import java.io.IOException; -public interface QueryBuilder> extends NamedWriteable, ToXContent { +public interface QueryBuilder extends NamedWriteable, ToXContent { /** * Converts this QueryBuilder to a lucene {@link Query}. @@ -49,8 +49,11 @@ public interface QueryBuilder> extends NamedWriteabl /** * Sets the arbitrary name to be assigned to the query (see named queries). + * Implementers should return the concrete type of the + * {@link QueryBuilder} so that calls can be chained. This is done + * automatically when extending {@link AbstractQueryBuilder}. */ - QB queryName(String queryName); + QueryBuilder queryName(String queryName); /** * Returns the arbitrary name assigned to the query (see named queries). @@ -65,8 +68,11 @@ public interface QueryBuilder> extends NamedWriteabl /** * Sets the boost for this query. Documents matching this query will (in addition to the normal * weightings) have their score multiplied by the boost provided. + * Implementers should return the concrete type of the + * {@link QueryBuilder} so that calls can be chained. This is done + * automatically when extending {@link AbstractQueryBuilder}. */ - QB boost(float boost); + QueryBuilder boost(float boost); /** * Returns the name that identifies uniquely the query @@ -77,7 +83,7 @@ public interface QueryBuilder> extends NamedWriteabl * Rewrites this query builder into its primitive form. By default this method return the builder itself. If the builder * did not change the identity reference must be returned otherwise the builder will be rewritten infinitely. */ - default QueryBuilder rewrite(QueryRewriteContext queryShardContext) throws IOException { + default QueryBuilder rewrite(QueryRewriteContext queryShardContext) throws IOException { return this; } @@ -87,7 +93,7 @@ public interface QueryBuilder> extends NamedWriteabl * rewrites the query until it doesn't change anymore. * @throws IOException if an {@link IOException} occurs */ - static QueryBuilder rewriteQuery(QueryBuilder original, QueryRewriteContext context) throws IOException { + static QueryBuilder rewriteQuery(QueryBuilder original, QueryRewriteContext context) throws IOException { QueryBuilder builder = original; for (QueryBuilder rewrittenBuilder = builder.rewrite(context); rewrittenBuilder != builder; rewrittenBuilder = builder.rewrite(context)) { diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index 1199330ee4f..62662914fd8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -55,9 +55,9 @@ public class QueryParseContext implements ParseFieldMatcherSupplier { /** * Parses a top level query including the query element that wraps it */ - public QueryBuilder parseTopLevelQueryBuilder() { + public QueryBuilder parseTopLevelQueryBuilder() { try { - QueryBuilder queryBuilder = null; + QueryBuilder queryBuilder = null; for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); @@ -82,7 +82,7 @@ public class QueryParseContext implements ParseFieldMatcherSupplier { /** * Parses a query excluding the query element that wraps it */ - public QueryBuilder parseInnerQueryBuilder() throws IOException { + public QueryBuilder parseInnerQueryBuilder() throws IOException { // move to START object XContentParser.Token token; if (parser.currentToken() != XContentParser.Token.START_OBJECT) { @@ -105,7 +105,7 @@ public class QueryParseContext implements ParseFieldMatcherSupplier { if (token != XContentParser.Token.START_OBJECT && token != XContentParser.Token.START_ARRAY) { throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object"); } - QueryBuilder result = indicesQueriesRegistry.lookup(queryName, parseFieldMatcher, parser.getTokenLocation()).fromXContent(this); + QueryBuilder result = indicesQueriesRegistry.lookup(queryName, parseFieldMatcher, parser.getTokenLocation()).fromXContent(this); if (parser.currentToken() == XContentParser.Token.END_OBJECT || parser.currentToken() == XContentParser.Token.END_ARRAY) { // if we are at END_OBJECT, move to the next one... parser.nextToken(); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParser.java b/core/src/main/java/org/elasticsearch/index/query/QueryParser.java index 6f27b8e0647..069dc86cf8a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParser.java @@ -25,7 +25,7 @@ import java.io.IOException; * Defines a query parser that is able to parse {@link QueryBuilder}s from {@link org.elasticsearch.common.xcontent.XContent}. */ @FunctionalInterface -public interface QueryParser> { +public interface QueryParser { /** * Creates a new {@link QueryBuilder} from the query held by the {@link QueryParseContext} * in {@link org.elasticsearch.common.xcontent.XContent} format diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index 5890e2bdffe..241c38475ab 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -46,7 +46,7 @@ import java.util.Objects; /** * A Query that matches documents within an range of terms. */ -public class RangeQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { +public class RangeQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { public static final String NAME = "range"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); @@ -410,7 +410,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i } @Override - protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { final MappedFieldType.Relation relation = getRelation(queryRewriteContext); switch (relation) { case DISJOINT: diff --git a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index c895830d5a5..703b2463b11 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -41,7 +41,7 @@ import java.util.Objects; /** * A Query that does fuzzy matching for a specific value. */ -public class RegexpQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { +public class RegexpQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { public static final String NAME = "regexp"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java index d923b219897..ae4297e431d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java @@ -36,7 +36,7 @@ import java.util.Objects; * Builder for {@link org.apache.lucene.search.spans.SpanContainingQuery}. */ public class SpanContainingQueryBuilder extends AbstractQueryBuilder - implements SpanQueryBuilder { + implements SpanQueryBuilder { public static final String NAME = "span_containing"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); @@ -44,14 +44,14 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder big; - private final SpanQueryBuilder little; + private final SpanQueryBuilder big; + private final SpanQueryBuilder little; /** * @param big the big clause, it must enclose {@code little} for a match. * @param little the little clause, it must be contained within {@code big} for a match. */ - public SpanContainingQueryBuilder(SpanQueryBuilder big, SpanQueryBuilder little) { + public SpanContainingQueryBuilder(SpanQueryBuilder big, SpanQueryBuilder little) { if (big == null) { throw new IllegalArgumentException("inner clause [big] cannot be null."); } @@ -67,8 +67,8 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder) in.readNamedWriteable(QueryBuilder.class); - little = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + big = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + little = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); } @Override @@ -106,8 +106,8 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder big = null; - SpanQueryBuilder little = null; + SpanQueryBuilder big = null; + SpanQueryBuilder little = null; String currentFieldName = null; XContentParser.Token token; @@ -117,16 +117,16 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder)) { + if (!(query instanceof SpanQueryBuilder)) { throw new ParsingException(parser.getTokenLocation(), "span_containing [big] must be of type span query"); } - big = (SpanQueryBuilder) query; + big = (SpanQueryBuilder) query; } else if (parseContext.getParseFieldMatcher().match(currentFieldName, LITTLE_FIELD)) { QueryBuilder query = parseContext.parseInnerQueryBuilder(); - if (!(query instanceof SpanQueryBuilder)) { + if (!(query instanceof SpanQueryBuilder)) { throw new ParsingException(parser.getTokenLocation(), "span_containing [little] must be of type span query"); } - little = (SpanQueryBuilder) query; + little = (SpanQueryBuilder) query; } else { throw new ParsingException(parser.getTokenLocation(), "[span_containing] query does not support [" + currentFieldName + "]"); diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java index 58e80af977f..009f8a39652 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -public class SpanFirstQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder{ +public class SpanFirstQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_first"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); @@ -40,7 +40,7 @@ public class SpanFirstQueryBuilder extends AbstractQueryBuilder matchBuilder; + private final SpanQueryBuilder matchBuilder; private final int end; @@ -51,7 +51,7 @@ public class SpanFirstQueryBuilder extends AbstractQueryBuilderend positions */ - public SpanFirstQueryBuilder(SpanQueryBuilder matchBuilder, int end) { + public SpanFirstQueryBuilder(SpanQueryBuilder matchBuilder, int end) { if (matchBuilder == null) { throw new IllegalArgumentException("inner span query cannot be null"); } @@ -67,7 +67,7 @@ public class SpanFirstQueryBuilder extends AbstractQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + matchBuilder = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); end = in.readInt(); } @@ -80,7 +80,7 @@ public class SpanFirstQueryBuilder extends AbstractQueryBuilder innerQuery() { + public SpanQueryBuilder innerQuery() { return this.matchBuilder; } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 46f639f5f70..86418903f70 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -39,16 +39,16 @@ import java.util.Objects; * as a {@link SpanQueryBuilder} so it can be nested. */ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder - implements SpanQueryBuilder { + implements SpanQueryBuilder { public static final String NAME = "span_multi"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); private static final ParseField MATCH_FIELD = new ParseField("match"); - private final MultiTermQueryBuilder multiTermQueryBuilder; + private final MultiTermQueryBuilder multiTermQueryBuilder; - public SpanMultiTermQueryBuilder(MultiTermQueryBuilder multiTermQueryBuilder) { + public SpanMultiTermQueryBuilder(MultiTermQueryBuilder multiTermQueryBuilder) { if (multiTermQueryBuilder == null) { throw new IllegalArgumentException("inner multi term query cannot be null"); } @@ -60,7 +60,7 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + multiTermQueryBuilder = (MultiTermQueryBuilder) in.readNamedWriteable(QueryBuilder.class); } @Override @@ -68,7 +68,7 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder innerQuery() { + public MultiTermQueryBuilder innerQuery() { return this.multiTermQueryBuilder; } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index 1bd8e85b902..a503b708633 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -39,7 +39,7 @@ import java.util.Objects; * of intervening unmatched positions, as well as whether matches are required to be in-order. * The span near query maps to Lucene {@link SpanNearQuery}. */ -public class SpanNearQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { +public class SpanNearQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_near"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); @@ -52,7 +52,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder> clauses = new ArrayList<>(); + private final List clauses = new ArrayList<>(); private final int slop; @@ -62,7 +62,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder initialClause, int slop) { + public SpanNearQueryBuilder(SpanQueryBuilder initialClause, int slop) { if (initialClause == null) { throw new IllegalArgumentException("query must include at least one clause"); } @@ -75,8 +75,8 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clause : readQueries(in)) { - this.clauses.add((SpanQueryBuilder) clause); + for (QueryBuilder clause : readQueries(in)) { + this.clauses.add((SpanQueryBuilder) clause); } slop = in.readVInt(); inOrder = in.readBoolean(); @@ -96,7 +96,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clause) { + public SpanNearQueryBuilder clause(SpanQueryBuilder clause) { if (clause == null) { throw new IllegalArgumentException("query clauses cannot be null"); } @@ -107,7 +107,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder> clauses() { + public List clauses() { return this.clauses; } @@ -132,7 +132,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clause : clauses) { + for (SpanQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java index aa2388dd89a..02ce431de1e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -public class SpanNotQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { +public class SpanNotQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_not"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); @@ -48,9 +48,9 @@ public class SpanNotQueryBuilder extends AbstractQueryBuilder include; + private final SpanQueryBuilder include; - private final SpanQueryBuilder exclude; + private final SpanQueryBuilder exclude; private int pre = DEFAULT_PRE; @@ -62,7 +62,7 @@ public class SpanNotQueryBuilder extends AbstractQueryBuilder include, SpanQueryBuilder exclude) { + public SpanNotQueryBuilder(SpanQueryBuilder include, SpanQueryBuilder exclude) { if (include == null) { throw new IllegalArgumentException("inner clause [include] cannot be null."); } @@ -78,8 +78,8 @@ public class SpanNotQueryBuilder extends AbstractQueryBuilder) in.readNamedWriteable(QueryBuilder.class); - exclude = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + include = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + exclude = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); pre = in.readVInt(); post = in.readVInt(); } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java index 71b6b74b828..2a67a84be81 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java @@ -37,16 +37,16 @@ import java.util.Objects; /** * Span query that matches the union of its clauses. Maps to {@link SpanOrQuery}. */ -public class SpanOrQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { +public class SpanOrQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_or"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); private static final ParseField CLAUSES_FIELD = new ParseField("clauses"); - private final List> clauses = new ArrayList<>(); + private final List clauses = new ArrayList<>(); - public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { + public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { if (initialClause == null) { throw new IllegalArgumentException("query must include at least one clause"); } @@ -58,8 +58,8 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder */ public SpanOrQueryBuilder(StreamInput in) throws IOException { super(in); - for (QueryBuilder clause: readQueries(in)) { - clauses.add((SpanQueryBuilder) clause); + for (QueryBuilder clause: readQueries(in)) { + clauses.add((SpanQueryBuilder) clause); } } @@ -68,7 +68,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder writeQueries(out, clauses); } - public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { + public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { if (clause == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -79,7 +79,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder /** * @return the {@link SpanQueryBuilder} clauses that were set for this query */ - public List> clauses() { + public List clauses() { return this.clauses; } @@ -87,7 +87,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.startArray(CLAUSES_FIELD.getPreferredName()); - for (SpanQueryBuilder clause : clauses) { + for (SpanQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java index 90a75a5af1b..fec1cac2696 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java @@ -22,6 +22,6 @@ package org.elasticsearch.index.query; /** * Marker interface for a specific type of {@link QueryBuilder} that allows to build span queries */ -public interface SpanQueryBuilder> extends QueryBuilder { +public interface SpanQueryBuilder extends QueryBuilder { } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java index a4bafc5001c..3bb374ff276 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java @@ -36,7 +36,7 @@ import java.io.IOException; * A Span Query that matches documents containing a term. * @see SpanTermQuery */ -public class SpanTermQueryBuilder extends BaseTermQueryBuilder implements SpanQueryBuilder { +public class SpanTermQueryBuilder extends BaseTermQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_term"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java index cf430881ab9..eaedc80bab4 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java @@ -36,7 +36,7 @@ import java.util.Objects; * Builder for {@link org.apache.lucene.search.spans.SpanWithinQuery}. */ public class SpanWithinQueryBuilder extends AbstractQueryBuilder - implements SpanQueryBuilder { + implements SpanQueryBuilder { public static final String NAME = "span_within"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); @@ -44,15 +44,15 @@ public class SpanWithinQueryBuilder extends AbstractQueryBuilder big; - private final SpanQueryBuilder little; + private final SpanQueryBuilder big; + private final SpanQueryBuilder little; /** * Query that returns spans from little that are contained in a spans from big. * @param big clause that must enclose {@code little} for a match. * @param little the little clause, it must be contained within {@code big} for a match. */ - public SpanWithinQueryBuilder(SpanQueryBuilder big, SpanQueryBuilder little) { + public SpanWithinQueryBuilder(SpanQueryBuilder big, SpanQueryBuilder little) { if (big == null) { throw new IllegalArgumentException("inner clause [big] cannot be null."); } @@ -68,8 +68,8 @@ public class SpanWithinQueryBuilder extends AbstractQueryBuilder) in.readNamedWriteable(QueryBuilder.class); - little = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + big = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); + little = (SpanQueryBuilder) in.readNamedWriteable(QueryBuilder.class); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java index 80b3f2b1404..1a4d05374b3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java @@ -173,13 +173,13 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { ExecutableScript executable = queryRewriteContext.getScriptService().executable(template, ScriptContext.Standard.SEARCH, Collections.emptyMap(), queryRewriteContext.getClusterState()); BytesReference querySource = (BytesReference) executable.run(); try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { final QueryParseContext queryParseContext = queryRewriteContext.newParseContext(qSourceParser); - final QueryBuilder queryBuilder = queryParseContext.parseInnerQueryBuilder(); + final QueryBuilder queryBuilder = queryParseContext.parseInnerQueryBuilder(); if (boost() != DEFAULT_BOOST || queryName() != null) { final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.must(queryBuilder); diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index 084f2e766d4..5b0650eb7e4 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -381,7 +381,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { } @Override - protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { if (this.termsLookup != null) { TermsLookup termsLookup = new TermsLookup(this.termsLookup); if (termsLookup.index() == null) { // TODO this should go away? diff --git a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index bf6921e9452..ef49472b8cc 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -46,7 +46,7 @@ import java.util.Objects; * ?. */ public class WildcardQueryBuilder extends AbstractQueryBuilder - implements MultiTermQueryBuilder { + implements MultiTermQueryBuilder { public static final String NAME = "wildcard"; public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); diff --git a/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java index 4af7081f308..4037666393d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java @@ -160,11 +160,11 @@ public class WrapperQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext context) throws IOException { + protected QueryBuilder doRewrite(QueryRewriteContext context) throws IOException { try (XContentParser qSourceParser = XContentFactory.xContent(source).createParser(source)) { QueryParseContext parseContext = context.newParseContext(qSourceParser); - final QueryBuilder queryBuilder = parseContext.parseInnerQueryBuilder(); + final QueryBuilder queryBuilder = parseContext.parseInnerQueryBuilder(); if (boost() != DEFAULT_BOOST || queryName() != null) { final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.must(queryBuilder); diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java index 0dffa6d714a..98f878d1c2e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java @@ -75,7 +75,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder query; + private final QueryBuilder query; private float maxBoost = FunctionScoreQuery.DEFAULT_MAX_BOOST; @@ -92,7 +92,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder query) { + public FunctionScoreQueryBuilder(QueryBuilder query) { this(query, new FilterFunctionBuilder[0]); } @@ -120,7 +120,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder query, ScoreFunctionBuilder scoreFunctionBuilder) { + public FunctionScoreQueryBuilder(QueryBuilder query, ScoreFunctionBuilder scoreFunctionBuilder) { this(query, new FilterFunctionBuilder[]{new FilterFunctionBuilder(scoreFunctionBuilder)}); } @@ -130,7 +130,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder query, FilterFunctionBuilder[] filterFunctionBuilders) { + public FunctionScoreQueryBuilder(QueryBuilder query, FilterFunctionBuilder[] filterFunctionBuilders) { if (query == null) { throw new IllegalArgumentException("function_score: query must not be null"); } @@ -172,7 +172,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder query() { + public QueryBuilder query() { return this.query; } @@ -334,14 +334,14 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder filter; + private final QueryBuilder filter; private final ScoreFunctionBuilder scoreFunction; public FilterFunctionBuilder(ScoreFunctionBuilder scoreFunctionBuilder) { this(new MatchAllQueryBuilder(), scoreFunctionBuilder); } - public FilterFunctionBuilder(QueryBuilder filter, ScoreFunctionBuilder scoreFunction) { + public FilterFunctionBuilder(QueryBuilder filter, ScoreFunctionBuilder scoreFunction) { if (filter == null) { throw new IllegalArgumentException("function_score: filter must not be null"); } @@ -366,7 +366,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder getFilter() { + public QueryBuilder getFilter() { return filter; } @@ -403,7 +403,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder rewrite = filter.rewrite(context); + QueryBuilder rewrite = filter.rewrite(context); if (rewrite != filter) { return new FilterFunctionBuilder(rewrite, scoreFunction); } @@ -412,8 +412,8 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { - QueryBuilder queryBuilder = this.query.rewrite(queryRewriteContext); + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryBuilder queryBuilder = this.query.rewrite(queryRewriteContext); FilterFunctionBuilder[] rewrittenBuilders = new FilterFunctionBuilder[this.filterFunctionBuilders.length]; boolean rewritten = false; for (int i = 0; i < rewrittenBuilders.length; i++) { @@ -442,7 +442,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder query = null; + QueryBuilder query = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; @@ -571,7 +571,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder filter = null; + QueryBuilder filter = null; ScoreFunctionBuilder scoreFunction = null; Float functionWeight = null; if (token != XContentParser.Token.START_OBJECT) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java index 4ff7234b2ef..6ac71708e32 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java @@ -82,7 +82,7 @@ public class RestValidateQueryAction extends BaseRestHandler { return; } } else { - QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); + QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); if (queryBuilder != null) { validateQueryRequest.query(queryBuilder); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index 6fd64430c20..46e8fadd05a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -68,7 +68,7 @@ public class RestCountAction extends AbstractCatAction { if (source != null) { searchSourceBuilder.query(RestActions.getQueryContent(new BytesArray(source), indicesQueriesRegistry, parseFieldMatcher)); } else { - QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); + QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); if (queryBuilder != null) { searchSourceBuilder.query(queryBuilder); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java index c423f7a8537..71e5832071c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java @@ -74,7 +74,7 @@ public class RestCountAction extends BaseRestHandler { BytesReference restContent = RestActions.getRestContent(request); searchSourceBuilder.query(RestActions.getQueryContent(restContent, indicesQueriesRegistry, parseFieldMatcher)); } else { - QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); + QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); if (queryBuilder != null) { searchSourceBuilder.query(queryBuilder); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java index a1fdc13437c..47f5e7da19b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java @@ -74,7 +74,7 @@ public class RestExplainAction extends BaseRestHandler { BytesReference restContent = RestActions.getRestContent(request); explainRequest.query(RestActions.getQueryContent(restContent, indicesQueriesRegistry, parseFieldMatcher)); } else if (queryString != null) { - QueryBuilder query = RestActions.urlParamsToQueryBuilder(request); + QueryBuilder query = RestActions.urlParamsToQueryBuilder(request); explainRequest.query(query); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 5a65c8df116..8d6b003779b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -162,7 +162,7 @@ public class RestSearchAction extends BaseRestHandler { * values that are not overridden by the rest request. */ private static void parseSearchSource(final SearchSourceBuilder searchSourceBuilder, RestRequest request) { - QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); + QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); if (queryBuilder != null) { searchSourceBuilder.query(queryBuilder); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 9ab523ba1e6..652cb8c61e9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -93,7 +93,7 @@ public class RestActions { builder.endObject(); } - public static QueryBuilder urlParamsToQueryBuilder(RestRequest request) { + public static QueryBuilder urlParamsToQueryBuilder(RestRequest request) { String queryString = request.param("q"); if (queryString == null) { return null; @@ -130,7 +130,7 @@ public class RestActions { return content; } - public static QueryBuilder getQueryContent(BytesReference source, IndicesQueriesRegistry indicesQueriesRegistry, ParseFieldMatcher parseFieldMatcher) { + public static QueryBuilder getQueryContent(BytesReference source, IndicesQueriesRegistry indicesQueriesRegistry, ParseFieldMatcher parseFieldMatcher) { try (XContentParser requestParser = XContentFactory.xContent(source).createParser(source)) { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, requestParser, parseFieldMatcher); return context.parseTopLevelQueryBuilder(); diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 3429b1ba18b..e0a6ccd09b4 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -359,7 +359,7 @@ public class SearchModule extends AbstractModule { * is the name by under which the reader is registered. So it is the name that the query should use as its * {@link NamedWriteable#getWriteableName()} too. */ - public > void registerQuery(Writeable.Reader reader, QueryParser queryParser, + public void registerQuery(Writeable.Reader reader, QueryParser queryParser, ParseField queryName) { queryParserRegistry.register(queryParser, queryName); namedWriteableRegistry.register(QueryBuilder.class, queryName.getPreferredName(), reader); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java index 1da7c28cf09..23fa8007d5b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java @@ -142,7 +142,7 @@ public class AggregationBuilders { /** * Create a new {@link Filter} aggregation with the given name. */ - public static FilterAggregatorBuilder filter(String name, QueryBuilder filter) { + public static FilterAggregatorBuilder filter(String name, QueryBuilder filter) { return new FilterAggregatorBuilder(name, filter); } @@ -156,7 +156,7 @@ public class AggregationBuilders { /** * Create a new {@link Filters} aggregation with the given name. */ - public static FiltersAggregatorBuilder filters(String name, QueryBuilder... filters) { + public static FiltersAggregatorBuilder filters(String name, QueryBuilder... filters) { return new FiltersAggregatorBuilder(name, filters); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorBuilder.java index 1a0c6decdf9..9024e902d7d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorBuilder.java @@ -41,7 +41,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder filter; + private final QueryBuilder filter; /** * @param name @@ -51,7 +51,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder filter) { + public FilterAggregatorBuilder(String name, QueryBuilder filter) { super(name, InternalFilter.TYPE); if (filter == null) { throw new IllegalArgumentException("[filter] must not be null: [" + name + "]"); @@ -92,7 +92,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder filter = context.parseInnerQueryBuilder(); + QueryBuilder filter = context.parseInnerQueryBuilder(); if (filter == null) { throw new ParsingException(null, "filter cannot be null in filter aggregation [{}]", aggregationName); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java index 13b1e62b338..212494ef48b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java @@ -38,7 +38,7 @@ public class FilterAggregatorFactory extends AggregatorFactory filterBuilder, AggregationContext context, + public FilterAggregatorFactory(String name, Type type, QueryBuilder filterBuilder, AggregationContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, type, context, parent, subFactoriesBuilder, metaData); IndexSearcher contextSearcher = context.searchContext().searcher(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java index ca071d7cfc9..a5ce89c4666 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java @@ -59,9 +59,9 @@ public class FiltersAggregator extends BucketsAggregator { public static class KeyedFilter implements Writeable, ToXContent { private final String key; - private final QueryBuilder filter; + private final QueryBuilder filter; - public KeyedFilter(String key, QueryBuilder filter) { + public KeyedFilter(String key, QueryBuilder filter) { if (key == null) { throw new IllegalArgumentException("[key] must not be null"); } @@ -94,7 +94,7 @@ public class FiltersAggregator extends BucketsAggregator { return key; } - public QueryBuilder filter() { + public QueryBuilder filter() { return filter; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregatorBuilder.java index ca9f7f6c7c3..52a1b949b4b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregatorBuilder.java @@ -80,7 +80,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder... filters) { + public FiltersAggregatorBuilder(String name, QueryBuilder... filters) { super(name, InternalFilters.TYPE); List keyedFilters = new ArrayList<>(filters.length); for (int i = 0; i < filters.length; i++) { @@ -204,7 +204,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder keyedFilters = null; - List> nonKeyedFilters = null; + List nonKeyedFilters = null; XContentParser.Token token = null; String currentFieldName = null; @@ -235,7 +235,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder filter = context.parseInnerQueryBuilder(); + QueryBuilder filter = context.parseInnerQueryBuilder(); keyedFilters.add(new FiltersAggregator.KeyedFilter(key, filter == null ? matchAllQuery() : filter)); } } @@ -247,7 +247,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - QueryBuilder filter = context.parseInnerQueryBuilder(); + QueryBuilder filter = context.parseInnerQueryBuilder(); nonKeyedFilters.add(filter == null ? QueryBuilders.matchAllQuery() : filter); } } else { @@ -270,7 +270,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder[nonKeyedFilters.size()])); + nonKeyedFilters.toArray(new QueryBuilder[nonKeyedFilters.size()])); } if (otherBucket != null) { factory.otherBucket(otherBucket); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorBuilder.java index 373d757a22c..55b85bef0d9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorBuilder.java @@ -58,7 +58,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui private IncludeExclude includeExclude = null; private String executionHint = null; - private QueryBuilder filterBuilder = null; + private QueryBuilder filterBuilder = null; private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS); private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC; @@ -176,7 +176,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return executionHint; } - public SignificantTermsAggregatorBuilder backgroundFilter(QueryBuilder backgroundFilter) { + public SignificantTermsAggregatorBuilder backgroundFilter(QueryBuilder backgroundFilter) { if (backgroundFilter == null) { throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]"); } @@ -184,7 +184,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return this; } - public QueryBuilder backgroundFilter() { + public QueryBuilder backgroundFilter() { return filterBuilder; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 0f0a37d909a..ce3f83079ea 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -65,12 +65,12 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac private MappedFieldType fieldType; private FilterableTermsEnum termsEnum; private int numberOfAggregatorsCreated; - private final QueryBuilder filterBuilder; + private final QueryBuilder filterBuilder; private final TermsAggregator.BucketCountThresholds bucketCountThresholds; private final SignificanceHeuristic significanceHeuristic; public SignificantTermsAggregatorFactory(String name, Type type, ValuesSourceConfig config, IncludeExclude includeExclude, - String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, + String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, AggregationContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, type, config, context, parent, subFactoriesBuilder, metaData); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java index b6549c6dc29..60805bea692 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java @@ -66,7 +66,7 @@ public class SignificantTermsParser extends AbstractTermsParser { if (incExc != null) { factory.includeExclude(incExc); } - QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER); + QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER); if (backgroundFilter != null) { factory.backgroundFilter(backgroundFilter); } @@ -89,7 +89,7 @@ public class SignificantTermsParser extends AbstractTermsParser { return true; } else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregatorBuilder.BACKGROUND_FILTER)) { QueryParseContext queryParseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); - QueryBuilder filter = queryParseContext.parseInnerQueryBuilder(); + QueryBuilder filter = queryParseContext.parseInnerQueryBuilder(); otherOptions.put(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER, filter); return true; } diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index b656df4e154..61f4acb81aa 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -120,9 +120,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return new HighlightBuilder(); } - private QueryBuilder queryBuilder; + private QueryBuilder queryBuilder; - private QueryBuilder postQueryBuilder; + private QueryBuilder postQueryBuilder; private int from = -1; @@ -371,7 +371,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ * * @see org.elasticsearch.index.query.QueryBuilders */ - public SearchSourceBuilder query(QueryBuilder query) { + public SearchSourceBuilder query(QueryBuilder query) { this.queryBuilder = query; return this; } @@ -379,7 +379,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Gets the query for this request */ - public QueryBuilder query() { + public QueryBuilder query() { return queryBuilder; } @@ -388,7 +388,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ * only has affect on the search hits (not aggregations). This filter is * always executed as last filtering mechanism. */ - public SearchSourceBuilder postFilter(QueryBuilder postFilter) { + public SearchSourceBuilder postFilter(QueryBuilder postFilter) { this.postQueryBuilder = postFilter; return this; } @@ -396,7 +396,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Gets the post filter for this request */ - public QueryBuilder postFilter() { + public QueryBuilder postFilter() { return postQueryBuilder; } @@ -910,11 +910,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ */ public SearchSourceBuilder rewrite(QueryShardContext context) throws IOException { assert (this.equals(shallowCopy(queryBuilder, postQueryBuilder))); - QueryBuilder queryBuilder = null; + QueryBuilder queryBuilder = null; if (this.queryBuilder != null) { queryBuilder = this.queryBuilder.rewrite(context); } - QueryBuilder postQueryBuilder = null; + QueryBuilder postQueryBuilder = null; if (this.postQueryBuilder != null) { postQueryBuilder = this.postQueryBuilder.rewrite(context); } @@ -925,7 +925,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return this; } - private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder postQueryBuilder) { + private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder postQueryBuilder) { SearchSourceBuilder rewrittenBuilder = new SearchSourceBuilder(); rewrittenBuilder.aggregations = aggregations; rewrittenBuilder.explain = explain; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java index e19dd33efc8..557567fe354 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java @@ -80,7 +80,7 @@ public abstract class AbstractHighlighterBuilder highlightQuery; + protected QueryBuilder highlightQuery; protected Order order; @@ -275,7 +275,7 @@ public abstract class AbstractHighlighterBuilder highlightQuery) { + public HB highlightQuery(QueryBuilder highlightQuery) { this.highlightQuery = highlightQuery; return (HB) this; } @@ -283,7 +283,7 @@ public abstract class AbstractHighlighterBuilder highlightQuery() { + public QueryBuilder highlightQuery() { return this.highlightQuery; } diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java index 2d4778012d3..e91468c805f 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java @@ -41,7 +41,7 @@ public class QueryRescorerBuilder extends RescoreBuilder { public static final float DEFAULT_RESCORE_QUERYWEIGHT = 1.0f; public static final float DEFAULT_QUERYWEIGHT = 1.0f; public static final QueryRescoreMode DEFAULT_SCORE_MODE = QueryRescoreMode.Total; - private final QueryBuilder queryBuilder; + private final QueryBuilder queryBuilder; private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT; private float queryWeight = DEFAULT_QUERYWEIGHT; private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE; @@ -70,7 +70,7 @@ public class QueryRescorerBuilder extends RescoreBuilder { * Creates a new {@link QueryRescorerBuilder} instance * @param builder the query builder to build the rescore query from */ - public QueryRescorerBuilder(QueryBuilder builder) { + public QueryRescorerBuilder(QueryBuilder builder) { this.queryBuilder = builder; } @@ -96,7 +96,7 @@ public class QueryRescorerBuilder extends RescoreBuilder { /** * @return the query used for this rescore query */ - public QueryBuilder getRescoreQuery() { + public QueryBuilder getRescoreQuery() { return this.queryBuilder; } @@ -209,12 +209,12 @@ public class QueryRescorerBuilder extends RescoreBuilder { */ private static class InnerBuilder { - private QueryBuilder queryBuilder; + private QueryBuilder queryBuilder; private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT; private float queryWeight = DEFAULT_QUERYWEIGHT; private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE; - void setQueryBuilder(QueryBuilder builder) { + void setQueryBuilder(QueryBuilder builder) { this.queryBuilder = builder; } diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java index e29e7dcd6e9..16c9c9ba8c7 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java @@ -125,7 +125,7 @@ public abstract class RescoreBuilder> extends ToXC public abstract QueryRescoreContext build(QueryShardContext context) throws IOException; - public static QueryRescorerBuilder queryRescorer(QueryBuilder queryBuilder) { + public static QueryRescorerBuilder queryRescorer(QueryBuilder queryBuilder) { return new QueryRescorerBuilder(queryBuilder); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 8d89322e5cd..70876f0acea 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -66,7 +66,7 @@ public class FieldSortBuilder extends SortBuilder { private SortMode sortMode; - private QueryBuilder nestedFilter; + private QueryBuilder nestedFilter; private String nestedPath; @@ -189,7 +189,7 @@ public class FieldSortBuilder extends SortBuilder { * TODO should the above getters and setters be deprecated/ changed in * favour of real getters and setters? */ - public FieldSortBuilder setNestedFilter(QueryBuilder nestedFilter) { + public FieldSortBuilder setNestedFilter(QueryBuilder nestedFilter) { this.nestedFilter = nestedFilter; return this; } @@ -198,7 +198,7 @@ public class FieldSortBuilder extends SortBuilder { * Returns the nested filter that the nested objects should match with in * order to be taken into account for sorting. */ - public QueryBuilder getNestedFilter() { + public QueryBuilder getNestedFilter() { return this.nestedFilter; } @@ -324,7 +324,7 @@ public class FieldSortBuilder extends SortBuilder { public static FieldSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException { XContentParser parser = context.parser(); - QueryBuilder nestedFilter = null; + QueryBuilder nestedFilter = null; String nestedPath = null; Object missing = null; SortOrder order = null; diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 2e08e38a96a..491a37be8b6 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -292,7 +292,7 @@ public class GeoDistanceSortBuilder extends SortBuilder * Sets the nested filter that the nested objects should match with in order to be taken into account * for sorting. */ - public GeoDistanceSortBuilder setNestedFilter(QueryBuilder nestedFilter) { + public GeoDistanceSortBuilder setNestedFilter(QueryBuilder nestedFilter) { this.nestedFilter = nestedFilter; return this; } @@ -301,7 +301,7 @@ public class GeoDistanceSortBuilder extends SortBuilder * Returns the nested filter that the nested objects should match with in order to be taken into account * for sorting. **/ - public QueryBuilder getNestedFilter() { + public QueryBuilder getNestedFilter() { return this.nestedFilter; } @@ -406,7 +406,7 @@ public class GeoDistanceSortBuilder extends SortBuilder GeoDistance geoDistance = GeoDistance.DEFAULT; SortOrder order = SortOrder.ASC; SortMode sortMode = null; - QueryBuilder nestedFilter = null; + QueryBuilder nestedFilter = null; String nestedPath = null; boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 8a713e4992b..c574fbcb7df 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -80,7 +80,7 @@ public class ScriptSortBuilder extends SortBuilder { private SortMode sortMode; - private QueryBuilder nestedFilter; + private QueryBuilder nestedFilter; private String nestedPath; @@ -170,7 +170,7 @@ public class ScriptSortBuilder extends SortBuilder { * Sets the nested filter that the nested objects should match with in order to be taken into account * for sorting. */ - public ScriptSortBuilder setNestedFilter(QueryBuilder nestedFilter) { + public ScriptSortBuilder setNestedFilter(QueryBuilder nestedFilter) { this.nestedFilter = nestedFilter; return this; } @@ -178,7 +178,7 @@ public class ScriptSortBuilder extends SortBuilder { /** * Gets the nested filter. */ - public QueryBuilder getNestedFilter() { + public QueryBuilder getNestedFilter() { return this.nestedFilter; } @@ -236,7 +236,7 @@ public class ScriptSortBuilder extends SortBuilder { ScriptSortType type = null; SortMode sortMode = null; SortOrder order = null; - QueryBuilder nestedFilter = null; + QueryBuilder nestedFilter = null; String nestedPath = null; Map params = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 805d6e90c47..8cd3fb82413 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -169,7 +169,7 @@ public abstract class SortBuilder> extends ToXContentTo return Optional.empty(); } - protected static Nested resolveNested(QueryShardContext context, String nestedPath, QueryBuilder nestedFilter) throws IOException { + protected static Nested resolveNested(QueryShardContext context, String nestedPath, QueryBuilder nestedFilter) throws IOException { Nested nested = null; if (nestedPath != null) { BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter()); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index fac11ab1f78..c2a5fe5aacd 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -433,7 +433,7 @@ public abstract class AbstractQueryTestCase> return Collections.emptySet(); } - protected static XContentBuilder toXContent(QueryBuilder query, XContentType contentType) throws IOException { + protected static XContentBuilder toXContent(QueryBuilder query, XContentType contentType) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(contentType); if (randomBoolean()) { builder.prettyPrint(); @@ -499,12 +499,12 @@ public abstract class AbstractQueryTestCase> /** * Parses the query provided as string argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ - protected final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { + protected final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { assertParsedQuery(queryAsString, expectedQuery, ParseFieldMatcher.STRICT); } - protected final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { - QueryBuilder newQuery = parseQuery(queryAsString, matcher); + protected final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { + QueryBuilder newQuery = parseQuery(queryAsString, matcher); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); assertEquals(expectedQuery.hashCode(), newQuery.hashCode()); @@ -513,38 +513,38 @@ public abstract class AbstractQueryTestCase> /** * Parses the query provided as bytes argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ - protected final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery) throws IOException { + protected final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery) throws IOException { assertParsedQuery(queryAsBytes, expectedQuery, ParseFieldMatcher.STRICT); } - protected final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { - QueryBuilder newQuery = parseQuery(queryAsBytes, matcher); + protected final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { + QueryBuilder newQuery = parseQuery(queryAsBytes, matcher); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); assertEquals(expectedQuery.hashCode(), newQuery.hashCode()); } - protected final QueryBuilder parseQuery(String queryAsString) throws IOException { + protected final QueryBuilder parseQuery(String queryAsString) throws IOException { return parseQuery(queryAsString, ParseFieldMatcher.STRICT); } - protected final QueryBuilder parseQuery(String queryAsString, ParseFieldMatcher matcher) throws IOException { + protected final QueryBuilder parseQuery(String queryAsString, ParseFieldMatcher matcher) throws IOException { XContentParser parser = XContentFactory.xContent(queryAsString).createParser(queryAsString); return parseQuery(parser, matcher); } - protected final QueryBuilder parseQuery(BytesReference queryAsBytes) throws IOException { + protected final QueryBuilder parseQuery(BytesReference queryAsBytes) throws IOException { return parseQuery(queryAsBytes, ParseFieldMatcher.STRICT); } - protected final QueryBuilder parseQuery(BytesReference queryAsBytes, ParseFieldMatcher matcher) throws IOException { + protected final QueryBuilder parseQuery(BytesReference queryAsBytes, ParseFieldMatcher matcher) throws IOException { XContentParser parser = XContentFactory.xContent(queryAsBytes).createParser(queryAsBytes); return parseQuery(parser, matcher); } - private QueryBuilder parseQuery(XContentParser parser, ParseFieldMatcher matcher) throws IOException { + private QueryBuilder parseQuery(XContentParser parser, ParseFieldMatcher matcher) throws IOException { QueryParseContext context = createParseContext(parser, matcher); - QueryBuilder parseInnerQueryBuilder = context.parseInnerQueryBuilder(); + QueryBuilder parseInnerQueryBuilder = context.parseInnerQueryBuilder(); assertNull(parser.nextToken()); return parseInnerQueryBuilder; } @@ -602,8 +602,8 @@ public abstract class AbstractQueryTestCase> } } - private QueryBuilder rewriteQuery(QB queryBuilder, QueryRewriteContext rewriteContext) throws IOException { - QueryBuilder rewritten = QueryBuilder.rewriteQuery(queryBuilder, rewriteContext); + private QueryBuilder rewriteQuery(QB queryBuilder, QueryRewriteContext rewriteContext) throws IOException { + QueryBuilder rewritten = QueryBuilder.rewriteQuery(queryBuilder, rewriteContext); // extra safety to fail fast - serialize the rewritten version to ensure it's serializable. assertSerialization(rewritten); return rewritten; @@ -686,7 +686,7 @@ public abstract class AbstractQueryTestCase> try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(testQuery); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); + QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); assertEquals(testQuery, deserializedQuery); assertEquals(testQuery.hashCode(), deserializedQuery.hashCode()); assertNotSame(testQuery, deserializedQuery); @@ -963,7 +963,7 @@ public abstract class AbstractQueryTestCase> *
  • By now the roundtrip check for the json should be happy. * **/ - public static void checkGeneratedJson(String expected, QueryBuilder source) throws IOException { + public static void checkGeneratedJson(String expected, QueryBuilder source) throws IOException { // now assert that we actually generate the same JSON XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); source.toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index b6b97f6fbf2..a49d410fdd6 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -113,9 +113,9 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase getBooleanClauses(List> queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException { + private static List getBooleanClauses(List queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException { List clauses = new ArrayList<>(); - for (QueryBuilder query : queryBuilders) { + for (QueryBuilder query : queryBuilders) { Query innerQuery = query.toQuery(context); if (innerQuery != null) { clauses.add(new BooleanClause(innerQuery, occur)); @@ -132,22 +132,22 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase 0) { - QueryBuilder must = tempQueryBuilder.must().get(0); + QueryBuilder must = tempQueryBuilder.must().get(0); contentString += "\"must\": " + must.toString() + ","; expectedQuery.must(must); } if (tempQueryBuilder.mustNot().size() > 0) { - QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0); + QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0); contentString += (randomBoolean() ? "\"must_not\": " : "\"mustNot\": ") + mustNot.toString() + ","; expectedQuery.mustNot(mustNot); } if (tempQueryBuilder.should().size() > 0) { - QueryBuilder should = tempQueryBuilder.should().get(0); + QueryBuilder should = tempQueryBuilder.should().get(0); contentString += "\"should\": " + should.toString() + ","; expectedQuery.should(should); } if (tempQueryBuilder.filter().size() > 0) { - QueryBuilder filter = tempQueryBuilder.filter().get(0); + QueryBuilder filter = tempQueryBuilder.filter().get(0); contentString += "\"filter\": " + filter.toString() + ","; expectedQuery.filter(filter); } @@ -366,7 +366,7 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase rewritten = boolQueryBuilder.rewrite(createShardContext()); + QueryBuilder rewritten = boolQueryBuilder.rewrite(createShardContext()); if (mustRewrite == false && boolQueryBuilder.must().isEmpty()) { // if it's empty we rewrite to match all assertEquals(rewritten, new MatchAllQueryBuilder()); @@ -398,14 +398,14 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase rewritten = boolQueryBuilder.rewrite(createShardContext()); + QueryBuilder rewritten = boolQueryBuilder.rewrite(createShardContext()); BoolQueryBuilder expected = new BoolQueryBuilder(); expected.must(new WrapperQueryBuilder(new MatchAllQueryBuilder().toString())); assertEquals(expected, rewritten); expected = new BoolQueryBuilder(); expected.must(new MatchAllQueryBuilder()); - QueryBuilder rewrittenAgain = rewritten.rewrite(createShardContext()); + QueryBuilder rewrittenAgain = rewritten.rewrite(createShardContext()); assertEquals(rewrittenAgain, expected); assertEquals(QueryBuilder.rewriteQuery(boolQueryBuilder, createShardContext()), expected); } diff --git a/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java index 27c888aa9dd..ecdadeca923 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java @@ -108,7 +108,7 @@ public class BoostingQueryBuilderTests extends AbstractQueryTestCase rewrite = qb.rewrite(createShardContext()); + QueryBuilder rewrite = qb.rewrite(createShardContext()); if (positive instanceof MatchAllQueryBuilder && negative instanceof MatchAllQueryBuilder) { assertSame(rewrite, qb); } else { diff --git a/core/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java index 90b692e1db2..8d2b30c9ba8 100644 --- a/core/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java @@ -96,7 +96,7 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase new ConstantScoreQueryBuilder((QueryBuilder) null)); + expectThrows(IllegalArgumentException.class, () -> new ConstantScoreQueryBuilder((QueryBuilder) null)); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java index 810cca0b240..b77abd280a7 100644 --- a/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java @@ -74,7 +74,7 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0); + QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0); DisMaxQueryBuilder expectedQuery = new DisMaxQueryBuilder(); expectedQuery.add(innerQuery); String contentString = "{\n" + @@ -101,7 +101,7 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase innerQueryBuilder = parseQuery(queryString); + QueryBuilder innerQueryBuilder = parseQuery(queryString); DisMaxQueryBuilder disMaxBuilder = new DisMaxQueryBuilder().add(innerQueryBuilder); assertNull(disMaxBuilder.toQuery(createShardContext())); } diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index 8ba48839aba..4c6b26d6848 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -494,7 +494,7 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase parsedGeoBboxShortcut = parseQuery(json, ParseFieldMatcher.EMPTY); + QueryBuilder parsedGeoBboxShortcut = parseQuery(json, ParseFieldMatcher.EMPTY); assertThat(parsedGeoBboxShortcut, equalTo(parsed)); try { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 08f988c2a0b..aaf804e7703 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -261,7 +261,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase rewrite = sqb.rewrite(createShardContext()); + QueryBuilder rewrite = sqb.rewrite(createShardContext()); GeoShapeQueryBuilder geoShapeQueryBuilder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeToReturn); geoShapeQueryBuilder.strategy(sqb.strategy()); geoShapeQueryBuilder.relation(sqb.relation()); diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index 36a135a9fb1..35945f21209 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -115,7 +115,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase innerQueryBuilder = queryBuilder.query(); + QueryBuilder innerQueryBuilder = queryBuilder.query(); if (innerQueryBuilder instanceof EmptyQueryBuilder) { assertNull(query); } else { @@ -149,7 +149,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase query = RandomQueryBuilder.createQuery(random()); + QueryBuilder query = RandomQueryBuilder.createQuery(random()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.hasChildQuery(null, query, ScoreMode.None)); assertEquals("[has_child] requires 'type' field", e.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java index 2cccb3f613a..c234016c465 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java @@ -71,7 +71,7 @@ public class IndicesQueryBuilderTests extends AbstractQueryTestCase new IndicesQueryBuilder(new MatchAllQueryBuilder(), new String[0])); IndicesQueryBuilder indicesQueryBuilder = new IndicesQueryBuilder(new MatchAllQueryBuilder(), "index"); - expectThrows(IllegalArgumentException.class, () -> indicesQueryBuilder.noMatchQuery((QueryBuilder) null)); + expectThrows(IllegalArgumentException.class, () -> indicesQueryBuilder.noMatchQuery((QueryBuilder) null)); expectThrows(IllegalArgumentException.class, () -> indicesQueryBuilder.noMatchQuery((String) null)); } diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 4bc95a1c3ce..c7106a7ccf6 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -247,7 +247,7 @@ public class InnerHitBuilderTests extends ESTestCase { } if (includeQueryTypeOrPath) { - QueryBuilder query = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 16), randomAsciiOfLengthBetween(1, 16)); + QueryBuilder query = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 16), randomAsciiOfLengthBetween(1, 16)); if (randomBoolean()) { return new InnerHitBuilder(innerHits, randomAsciiOfLength(8), query); } else { diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index b35201f47a1..39c204ec8ae 100644 --- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -114,7 +114,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase innerQuery = RandomQueryBuilder.createQuery(random()); + QueryBuilder innerQuery = RandomQueryBuilder.createQuery(random()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.nestedQuery(null, innerQuery, ScoreMode.Avg)); assertThat(e.getMessage(), equalTo("[nested] requires 'path' field")); diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java index 59d79378caa..9dc0bdbd133 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java @@ -142,7 +142,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase pqb.toQuery(createShardContext())); assertThat(e.getMessage(), equalTo("query builder must be rewritten first")); - QueryBuilder rewrite = pqb.rewrite(createShardContext()); + QueryBuilder rewrite = pqb.rewrite(createShardContext()); PercolateQueryBuilder geoShapeQueryBuilder = new PercolateQueryBuilder(pqb.getField(), pqb.getDocumentType(), documentSource); assertEquals(geoShapeQueryBuilder, rewrite); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index a763b257afb..4de7dae8c0b 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -413,7 +413,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase queryBuilder = parseQuery(queryAsString); + QueryBuilder queryBuilder = parseQuery(queryAsString); assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class)); QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder; assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris"))); diff --git a/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java b/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java index f99a202eb88..23ef8045cc1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java +++ b/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java @@ -36,7 +36,7 @@ public class RandomQueryBuilder { * @param r random seed * @return a random {@link QueryBuilder} */ - public static QueryBuilder createQuery(Random r) { + public static QueryBuilder createQuery(Random r) { switch (RandomInts.randomIntBetween(r, 0, 4)) { case 0: return new MatchAllQueryBuilderTests().createTestQueryBuilder(); @@ -61,7 +61,7 @@ public class RandomQueryBuilder { public static MultiTermQueryBuilder createMultiTermQuery(Random r) { // for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense // see issue #12123 for discussion - MultiTermQueryBuilder multiTermQueryBuilder; + MultiTermQueryBuilder multiTermQueryBuilder; switch(RandomInts.randomIntBetween(r, 0, 3)) { case 0: RangeQueryBuilder stringRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME); diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index d242e2ecf8f..c3db90f1f6d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -464,7 +464,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase rewritten = query.rewrite(queryShardContext); + QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); @@ -485,7 +485,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase rewritten = query.rewrite(queryShardContext); + QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); } @@ -502,7 +502,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase rewritten = query.rewrite(queryShardContext); + QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } @@ -515,7 +515,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase rewritten = query.rewrite(queryShardContext); + QueryBuilder rewritten = query.rewrite(queryShardContext); assertThat(rewritten, sameInstance(query)); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index cb35b85d3d9..e9ac69001ef 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -59,7 +59,7 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase new SpanMultiTermQueryBuilder((MultiTermQueryBuilder) null)); + expectThrows(IllegalArgumentException.class, () -> new SpanMultiTermQueryBuilder((MultiTermQueryBuilder) null)); } /** diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java index 11ebd431030..202623d1980 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java @@ -50,7 +50,7 @@ public class SpanNearQueryBuilderTests extends AbstractQueryTestCase> spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + Iterator spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanNearQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java index b7533fe4605..a9c4ec7d7dc 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java @@ -45,14 +45,14 @@ public class SpanOrQueryBuilderTests extends AbstractQueryTestCase> spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + Iterator spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanOrQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } } public void testIllegalArguments() { - expectThrows(IllegalArgumentException.class, () -> new SpanOrQueryBuilder((SpanQueryBuilder) null)); + expectThrows(IllegalArgumentException.class, () -> new SpanOrQueryBuilder((SpanQueryBuilder) null)); try { SpanOrQueryBuilder spanOrBuilder = new SpanOrQueryBuilder(new SpanTermQueryBuilder("field", "value")); diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java index a6dc42a266e..ee6621bef5b 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java @@ -40,7 +40,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase templateBase; + private static QueryBuilder templateBase; @BeforeClass public static void setupClass() { @@ -102,7 +102,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase params = new HashMap<>(); params.put("template", "all"); - QueryBuilder expectedBuilder = new TemplateQueryBuilder(new Template(expectedTemplateString, ScriptType.INLINE, null, null, + QueryBuilder expectedBuilder = new TemplateQueryBuilder(new Template(expectedTemplateString, ScriptType.INLINE, null, null, params)); assertParsedQuery(query, expectedBuilder); } @@ -112,7 +112,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase params = new HashMap<>(); params.put("template", "all"); - QueryBuilder expectedBuilder = new TemplateQueryBuilder(new Template(expectedTemplateString, ScriptType.INLINE, null, + QueryBuilder expectedBuilder = new TemplateQueryBuilder(new Template(expectedTemplateString, ScriptType.INLINE, null, XContentType.JSON, params)); assertParsedQuery(query, expectedBuilder); } @@ -120,7 +120,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase builder = new TemplateQueryBuilder(new Template(query, ScriptType.INLINE, "mockscript", + QueryBuilder builder = new TemplateQueryBuilder(new Template(query, ScriptType.INLINE, "mockscript", XContentType.JSON, Collections.emptyMap())); try { builder.toQuery(createShardContext()); @@ -133,7 +133,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase builder = new TemplateQueryBuilder(new Template(query, ScriptType.INLINE, "mockscript", + QueryBuilder builder = new TemplateQueryBuilder(new Template(query, ScriptType.INLINE, "mockscript", XContentType.JSON, Collections.emptyMap())); assertEquals(new MatchAllQueryBuilder().queryName("foobar"), builder.rewrite(createShardContext())); @@ -145,7 +145,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase builder = new TemplateQueryBuilder(new Template(query.toString(), ScriptType.INLINE, "mockscript", + QueryBuilder builder = new TemplateQueryBuilder(new Template(query.toString(), ScriptType.INLINE, "mockscript", XContentType.JSON, Collections.emptyMap())); assertEquals(query, builder.rewrite(createShardContext())); diff --git a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index a16f4e2f2ea..0b4faebe18d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -291,7 +291,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase inShortcutParsed = parseQuery(json, ParseFieldMatcher.EMPTY); + QueryBuilder inShortcutParsed = parseQuery(json, ParseFieldMatcher.EMPTY); assertThat(inShortcutParsed, equalTo(parsed)); try { diff --git a/core/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java index 749cef61c61..e2a0d551919 100644 --- a/core/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java @@ -54,7 +54,7 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase innerQuery = queryBuilder.rewrite(createShardContext()); + QueryBuilder innerQuery = queryBuilder.rewrite(createShardContext()); Query expected = rewrite(innerQuery.toQuery(context)); assertEquals(rewrite(query), expected); } @@ -138,12 +138,12 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase rewrite = qb.rewrite(createShardContext()); + QueryBuilder rewrite = qb.rewrite(createShardContext()); assertEquals(tqb, rewrite); } public void testRewriteWithInnerName() throws IOException { - QueryBuilder builder = new WrapperQueryBuilder("{ \"match_all\" : {\"_name\" : \"foobar\"}}"); + QueryBuilder builder = new WrapperQueryBuilder("{ \"match_all\" : {\"_name\" : \"foobar\"}}"); QueryShardContext shardContext = createShardContext(); assertEquals(new MatchAllQueryBuilder().queryName("foobar"), builder.rewrite(shardContext)); builder = new WrapperQueryBuilder("{ \"match_all\" : {\"_name\" : \"foobar\"}}").queryName("outer"); @@ -153,7 +153,7 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase builder = new WrapperQueryBuilder(query.toString()); + QueryBuilder builder = new WrapperQueryBuilder(query.toString()); QueryShardContext shardContext = createShardContext(); assertEquals(query, builder.rewrite(shardContext)); builder = new WrapperQueryBuilder(query.toString()).boost(3); diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index 540066ba58b..0cbd80dc76c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -240,7 +240,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase new FunctionScoreQueryBuilder((QueryBuilder) null)); + expectThrows(IllegalArgumentException.class, () -> new FunctionScoreQueryBuilder((QueryBuilder) null)); expectThrows(IllegalArgumentException.class, () -> new FunctionScoreQueryBuilder((ScoreFunctionBuilder) null)); expectThrows(IllegalArgumentException.class, () -> new FunctionScoreQueryBuilder((FilterFunctionBuilder[]) null)); expectThrows(IllegalArgumentException.class, () -> new FunctionScoreQueryBuilder(null, randomFunction(123))); @@ -301,7 +301,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase queryBuilder = parseQuery(functionScoreQuery); + QueryBuilder queryBuilder = parseQuery(functionScoreQuery); /* * given that we copy part of the decay functions as bytes, we test that fromXContent and toXContent both work no matter what the * initial format was @@ -343,7 +343,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase) queryBuilder).buildAsBytes(XContentType.values()[i])); + queryBuilder = parseQuery(((AbstractQueryBuilder) queryBuilder).buildAsBytes(XContentType.values()[i])); } } } @@ -369,7 +369,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase queryBuilder = parseQuery(functionScoreQuery); + QueryBuilder queryBuilder = parseQuery(functionScoreQuery); /* * given that we copy part of the decay functions as bytes, we test that fromXContent and toXContent both work no matter what the * initial format was @@ -395,7 +395,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase) queryBuilder).buildAsBytes(XContentType.values()[i])); + queryBuilder = parseQuery(((AbstractQueryBuilder) queryBuilder).buildAsBytes(XContentType.values()[i])); } } } @@ -476,7 +476,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase query = parseQuery(queryString); + QueryBuilder query = parseQuery(queryString); assertThat(query, instanceOf(FunctionScoreQueryBuilder.class)); FunctionScoreQueryBuilder functionScoreQueryBuilder = (FunctionScoreQueryBuilder) query; assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[0].getScoreFunction(), @@ -618,9 +618,9 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase firstFunction = new WrapperQueryBuilder(new TermQueryBuilder("tq", "1").toString()); + QueryBuilder firstFunction = new WrapperQueryBuilder(new TermQueryBuilder("tq", "1").toString()); TermQueryBuilder secondFunction = new TermQueryBuilder("tq", "2"); - QueryBuilder queryBuilder = randomBoolean() ? new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()) + QueryBuilder queryBuilder = randomBoolean() ? new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()) : new TermQueryBuilder("foo", "bar"); FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(queryBuilder, new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index b309ed25217..9859936b410 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -109,7 +109,7 @@ public class FilterIT extends ESIntegTestCase { // See NullPointer issue when filters are empty: // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { - QueryBuilder emptyFilter = new BoolQueryBuilder(); + QueryBuilder emptyFilter = new BoolQueryBuilder(); SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).execute().actionGet(); assertSearchResponse(response); @@ -120,7 +120,7 @@ public class FilterIT extends ESIntegTestCase { } public void testEmptyFilter() throws Exception { - QueryBuilder emptyFilter = new EmptyQueryBuilder(); + QueryBuilder emptyFilter = new EmptyQueryBuilder(); SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index 2f076a34eac..fd4db82f2db 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -138,7 +138,7 @@ public class FiltersIT extends ESIntegTestCase { // See NullPointer issue when filters are empty: // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { - QueryBuilder emptyFilter = new BoolQueryBuilder(); + QueryBuilder emptyFilter = new BoolQueryBuilder(); SearchResponse response = client().prepareSearch("idx") .addAggregation(filters("tags", randomOrder(new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1"))))) @@ -207,7 +207,7 @@ public class FiltersIT extends ESIntegTestCase { } public void testEmptyFilter() throws Exception { - QueryBuilder emptyFilter = new EmptyQueryBuilder(); + QueryBuilder emptyFilter = new EmptyQueryBuilder(); SearchResponse response = client().prepareSearch("idx").addAggregation(filters("tag1", emptyFilter)).execute().actionGet(); assertSearchResponse(response); @@ -219,7 +219,7 @@ public class FiltersIT extends ESIntegTestCase { } public void testEmptyKeyedFilter() throws Exception { - QueryBuilder emptyFilter = new EmptyQueryBuilder(); + QueryBuilder emptyFilter = new EmptyQueryBuilder(); SearchResponse response = client().prepareSearch("idx").addAggregation(filters("tag1", new KeyedFilter("foo", emptyFilter))) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java index 43b76f74ca3..cd2dae53327 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java @@ -42,7 +42,7 @@ public class FiltersTests extends BaseAggregationTestCase[] filters = new QueryBuilder[size]; + QueryBuilder[] filters = new QueryBuilder[size]; for (int i = 0; i < size; i++) { filters[i] = QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20)); } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 23860883186..83e62072683 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -78,7 +78,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); // uses index.version.created } - private final QueryBuilder baseQuery = constantScoreQuery(termQuery("test", "value")); + private final QueryBuilder baseQuery = constantScoreQuery(termQuery("test", "value")); public void testDistanceScoreGeoLinGaussExp() throws Exception { assertAcked(prepareCreate("test").addMapping( diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index f6ef6175530..14d0fc959c3 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -583,7 +583,7 @@ public class QueryRescorerIT extends ESIntegTestCase { String[] intToEnglish = new String[] { English.intToEnglish(i), English.intToEnglish(i + 1), English.intToEnglish(i + 2), English.intToEnglish(i + 3) }; - QueryBuilder query = boolQuery().disableCoord(true) + QueryBuilder query = boolQuery().disableCoord(true) .should(functionScoreQuery(termQuery("field1", intToEnglish[0]), weightFactorFunction(2.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[1]), weightFactorFunction(3.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[2]), weightFactorFunction(5.0f)).boostMode(REPLACE)) diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index fde0b08974f..fb179dc7c16 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -314,7 +314,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { * create random shape that is put under test */ public static QueryRescorerBuilder randomRescoreBuilder() { - QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()) + QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()) .queryName(randomAsciiOfLength(20)); org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder); diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 76d5eba12f9..2978c5dae7a 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -256,7 +256,7 @@ public abstract class AbstractSortTestCase> extends EST return doubleFieldType; } - protected static QueryBuilder randomNestedFilter() { + protected static QueryBuilder randomNestedFilter() { int id = randomIntBetween(0, 2); switch(id) { case 0: return (new MatchAllQueryBuilder()).boost(randomFloat()); diff --git a/docs/reference/migration/migrate_5_0/java.asciidoc b/docs/reference/migration/migrate_5_0/java.asciidoc index d2bdf5528ac..004d803dc40 100644 --- a/docs/reference/migration/migrate_5_0/java.asciidoc +++ b/docs/reference/migration/migrate_5_0/java.asciidoc @@ -279,20 +279,20 @@ requests can now be validated at call time which results in much clearer errors. ==== ValidateQueryRequest `source(QuerySourceBuilder)`, `source(Map)`, `source(XContentBuilder)`, `source(String)`, `source(byte[])`, `source(byte[], int, int)`, -`source(BytesReference)` and `source()` have been removed in favor of using `query(QueryBuilder)` and `query()` +`source(BytesReference)` and `source()` have been removed in favor of using `query(QueryBuilder)` and `query()` ==== ValidateQueryRequestBuilder -`setSource()` methods have been removed in favor of using `setQuery(QueryBuilder)` +`setSource()` methods have been removed in favor of using `setQuery(QueryBuilder)` ==== ExplainRequest `source(QuerySourceBuilder)`, `source(Map)`, `source(BytesReference)` and `source()` have been removed in favor of using -`query(QueryBuilder)` and `query()` +`query(QueryBuilder)` and `query()` ==== ExplainRequestBuilder -The `setQuery(BytesReference)` method have been removed in favor of using `setQuery(QueryBuilder)` +The `setQuery(BytesReference)` method have been removed in favor of using `setQuery(QueryBuilder)` === ClusterStatsResponse diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index e00ac3320d1..1c17c1966e5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -68,7 +68,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { return Collections.singleton(GroovyPlugin.class); } - private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); + private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); private static int cardinality; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java index e93a7ba7e42..c158a7669d2 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java @@ -62,7 +62,7 @@ public abstract class AbstractBulkByScrollRequestBuilder< * Set the query that will filter the source. Just a convenience method for * easy chaining. */ - public Self filter(QueryBuilder filter) { + public Self filter(QueryBuilder filter) { source.setQuery(filter); return self(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java index c2f0b5625d8..45bacbcf13a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexParentChildTests.java @@ -32,9 +32,9 @@ import static org.hamcrest.Matchers.equalTo; * Index-by-search tests for parent/child. */ public class ReindexParentChildTests extends ReindexTestCase { - QueryBuilder findsCountry; - QueryBuilder findsCity; - QueryBuilder findsNeighborhood; + QueryBuilder findsCountry; + QueryBuilder findsCity; + QueryBuilder findsNeighborhood; public void testParentChild() throws Exception { createParentChildIndex("source"); diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java index 145ef690dc3..682fec46c3b 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java @@ -69,7 +69,7 @@ public class DeleteByQueryRequest extends ActionRequest im private String[] types = Strings.EMPTY_ARRAY; - private QueryBuilder query; + private QueryBuilder query; private String routing; @@ -132,11 +132,11 @@ public class DeleteByQueryRequest extends ActionRequest im return this; } - public QueryBuilder query() { + public QueryBuilder query() { return query; } - public DeleteByQueryRequest query(QueryBuilder queryBuilder) { + public DeleteByQueryRequest query(QueryBuilder queryBuilder) { this.query = queryBuilder; return this; } diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java index 7560e1e8b11..dc5ba3a15fb 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java @@ -55,7 +55,7 @@ public class DeleteByQueryRequestBuilder extends ActionRequestBuilder queryBuilder) { + public DeleteByQueryRequestBuilder setQuery(QueryBuilder queryBuilder) { request.query(queryBuilder); return this; } diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java index a7146c2a768..2b537d1cf8a 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java @@ -66,7 +66,7 @@ public class RestDeleteByQueryAction extends BaseRestHandler { if (RestActions.hasBodyContent(request)) { delete.query(RestActions.getQueryContent(RestActions.getRestContent(request), indicesQueriesRegistry, parseFieldMatcher)); } else { - QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); + QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); if (queryBuilder != null) { delete.query(queryBuilder); } From c55df195c5a11b92bad9316ef8e6bfdc5a39cbe3 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 6 May 2016 09:25:58 +0200 Subject: [PATCH 0096/1311] Fixed bad asciidoc --- docs/reference/mapping/fields/parent-field.asciidoc | 2 +- docs/reference/mapping/fields/routing-field.asciidoc | 2 +- docs/reference/mapping/fields/type-field.asciidoc | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/mapping/fields/parent-field.asciidoc b/docs/reference/mapping/fields/parent-field.asciidoc index c15a7bcd3b9..78ce1ebfd00 100644 --- a/docs/reference/mapping/fields/parent-field.asciidoc +++ b/docs/reference/mapping/fields/parent-field.asciidoc @@ -81,7 +81,7 @@ GET my_index/_search }, "script_fields": { "parent": { - "script": "doc['_parent']" <4> + "script": "doc['_parent']" <3> } } } diff --git a/docs/reference/mapping/fields/routing-field.asciidoc b/docs/reference/mapping/fields/routing-field.asciidoc index 496d8dcf56e..c90a52aab37 100644 --- a/docs/reference/mapping/fields/routing-field.asciidoc +++ b/docs/reference/mapping/fields/routing-field.asciidoc @@ -42,7 +42,7 @@ GET my_index/_search }, "script_fields": { "Routing value": { - "script": "doc['_routing']" <4> + "script": "doc['_routing']" <2> } } } diff --git a/docs/reference/mapping/fields/type-field.asciidoc b/docs/reference/mapping/fields/type-field.asciidoc index 8a569d22f53..a1d48129f5d 100644 --- a/docs/reference/mapping/fields/type-field.asciidoc +++ b/docs/reference/mapping/fields/type-field.asciidoc @@ -1,4 +1,4 @@ -\[[mapping-type-field]] +[[mapping-type-field]] === `_type` field Each document indexed is associated with a <> (see From de8354dd7fa93dbcc71f5dbf154079f92b2b0bf0 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 25 Apr 2016 17:45:33 +0200 Subject: [PATCH 0097/1311] Allow binary sort values. #17959 The `ip` field uses a binary representation internally. This breaks when rendering sort values in search responses since elasticsearch tries to write a binary byte[] as an utf8 json string. This commit extends the `DocValueFormat` API in order to give fields a chance to choose how to render values. Closes #6077 --- .../index/query/InnerHitBuilder.java | 4 +- .../elasticsearch/search/DocValueFormat.java | 58 +++++++++++++-- .../elasticsearch/search/SearchService.java | 3 +- .../metrics/tophits/TopHitsAggregator.java | 16 ++--- .../tophits/TopHitsAggregatorFactory.java | 4 +- .../controller/SearchPhaseController.java | 2 +- .../fetch/innerhits/InnerHitsContext.java | 4 +- .../innerhits/InnerHitsFetchSubPhase.java | 4 +- .../search/internal/DefaultSearchContext.java | 8 +-- .../internal/FilteredSearchContext.java | 6 +- .../search/internal/InternalSearchHit.java | 26 ++----- .../search/internal/SearchContext.java | 5 +- .../search/internal/SubSearchContext.java | 9 ++- .../search/query/QueryPhase.java | 19 +++-- .../search/query/QuerySearchResult.java | 34 ++++++++- .../search/rescore/RescorePhase.java | 2 +- .../searchafter/SearchAfterBuilder.java | 26 +++---- .../search/sort/FieldSortBuilder.java | 12 ++-- .../search/sort/GeoDistanceSortBuilder.java | 5 +- .../search/sort/ScoreSortBuilder.java | 9 ++- .../search/sort/ScriptSortBuilder.java | 5 +- .../search/sort/SortAndFormats.java | 38 ++++++++++ .../search/sort/SortBuilder.java | 16 +++-- .../search/sort/SortFieldAndFormat.java | 36 ++++++++++ .../common/network/NetworkAddressTests.java | 1 + .../query/HasChildQueryBuilderTests.java | 4 +- .../query/HasParentQueryBuilderTests.java | 4 +- .../index/query/NestedQueryBuilderTests.java | 4 +- .../search/DocValueFormatTests.java | 64 +++++++++++++++++ .../search/searchafter/SearchAfterIT.java | 7 +- .../search/sort/AbstractSortTestCase.java | 7 +- .../search/sort/FieldSortBuilderTests.java | 4 +- .../search/sort/FieldSortIT.java | 72 ++++++++++++------- .../search/sort/GeoDistanceSortBuilderIT.java | 2 +- .../sort/GeoDistanceSortBuilderTests.java | 3 +- .../search/sort/ScoreSortBuilderTests.java | 3 +- .../search/sort/ScriptSortBuilderTests.java | 3 +- .../migration/migrate_5_0/java.asciidoc | 4 ++ .../elasticsearch/test/TestSearchContext.java | 12 +--- 39 files changed, 402 insertions(+), 143 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/sort/SortAndFormats.java create mode 100644 core/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index c07c75532f1..b62b5a18a1d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.index.query; -import org.apache.lucene.search.Sort; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -41,6 +40,7 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; @@ -512,7 +512,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl innerHitsContext.fetchSourceContext(fetchSourceContext); } if (sorts != null) { - Optional optionalSort = SortBuilder.buildSort(sorts, context); + Optional optionalSort = SortBuilder.buildSort(sorts, context); if (optionalSort.isPresent()) { innerHitsContext.sort(optionalSort.get()); } diff --git a/core/src/main/java/org/elasticsearch/search/DocValueFormat.java b/core/src/main/java/org/elasticsearch/search/DocValueFormat.java index 78f8460f1cc..c7c993b82b6 100644 --- a/core/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/core/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -20,7 +20,6 @@ package org.elasticsearch.search; import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.index.Term; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.io.stream.NamedWriteable; @@ -29,8 +28,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.ip.LegacyIpFieldMapper; import org.joda.time.DateTimeZone; @@ -48,16 +47,33 @@ import java.util.concurrent.Callable; /** A formatter for values as returned by the fielddata/doc-values APIs. */ public interface DocValueFormat extends NamedWriteable { + /** Format a long value. This is used by terms and histogram aggregations + * to format keys for fields that use longs as a doc value representation + * such as the {@code long} and {@code date} fields. */ String format(long value); + /** Format a double value. This is used by terms and stats aggregations + * to format keys for fields that use numbers as a doc value representation + * such as the {@code long}, {@code double} or {@code date} fields. */ String format(double value); + /** Format a double value. This is used by terms aggregations to format + * keys for fields that use binary doc value representations such as the + * {@code keyword} and {@code ip} fields. */ String format(BytesRef value); + /** Parse a value that was formatted with {@link #format(long)} back to the + * original long value. */ long parseLong(String value, boolean roundUp, Callable now); + /** Parse a value that was formatted with {@link #format(double)} back to + * the original double value. */ double parseDouble(String value, boolean roundUp, Callable now); + /** Parse a value that was formatted with {@link #format(BytesRef)} back + * to the original BytesRef. */ + BytesRef parseBytesRef(String value); + public static final DocValueFormat RAW = new DocValueFormat() { @Override @@ -81,7 +97,7 @@ public interface DocValueFormat extends NamedWriteable { @Override public String format(BytesRef value) { - return Term.toString(value); + return value.utf8ToString(); } @Override @@ -99,6 +115,10 @@ public interface DocValueFormat extends NamedWriteable { public double parseDouble(String value, boolean roundUp, Callable now) { return Double.parseDouble(value); } + + public BytesRef parseBytesRef(String value) { + return new BytesRef(value); + } }; public static final class DateTime implements DocValueFormat { @@ -154,6 +174,11 @@ public interface DocValueFormat extends NamedWriteable { public double parseDouble(String value, boolean roundUp, Callable now) { return parseLong(value, roundUp, now); } + + @Override + public BytesRef parseBytesRef(String value) { + throw new UnsupportedOperationException(); + } } public static final DocValueFormat GEOHASH = new DocValueFormat() { @@ -191,6 +216,11 @@ public interface DocValueFormat extends NamedWriteable { public double parseDouble(String value, boolean roundUp, Callable now) { throw new UnsupportedOperationException(); } + + @Override + public BytesRef parseBytesRef(String value) { + throw new UnsupportedOperationException(); + } }; public static final DocValueFormat BOOLEAN = new DocValueFormat() { @@ -221,13 +251,24 @@ public interface DocValueFormat extends NamedWriteable { @Override public long parseLong(String value, boolean roundUp, Callable now) { - throw new UnsupportedOperationException(); + switch (value) { + case "false": + return 0; + case "true": + return 1; + } + throw new IllegalArgumentException("Cannot parse boolean [" + value + "], expected either [true] or [false]"); } @Override public double parseDouble(String value, boolean roundUp, Callable now) { throw new UnsupportedOperationException(); } + + @Override + public BytesRef parseBytesRef(String value) { + throw new UnsupportedOperationException(); + } }; public static final DocValueFormat IP = new DocValueFormat() { @@ -268,6 +309,11 @@ public interface DocValueFormat extends NamedWriteable { public double parseDouble(String value, boolean roundUp, Callable now) { return parseLong(value, roundUp, now); } + + @Override + public BytesRef parseBytesRef(String value) { + return new BytesRef(InetAddressPoint.encode(InetAddresses.forString(value))); + } }; public static final class Decimal implements DocValueFormat { @@ -344,5 +390,9 @@ public interface DocValueFormat extends NamedWriteable { return n.doubleValue(); } + @Override + public BytesRef parseBytesRef(String value) { + throw new UnsupportedOperationException(); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 24746431949..636e4338063 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -104,6 +104,7 @@ import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.suggest.Suggesters; import org.elasticsearch.threadpool.ThreadPool; @@ -698,7 +699,7 @@ public class SearchService extends AbstractLifecycleComponent imp } if (source.sorts() != null) { try { - Optional optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext()); + Optional optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext()); if (optionalSort.isPresent()) { context.sort(optionalSort.get()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java index 8e50dc60ea8..07292f1d29f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Sort; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; @@ -45,6 +44,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import java.io.IOException; import java.util.List; @@ -78,9 +78,9 @@ public class TopHitsAggregator extends MetricsAggregator { @Override public boolean needsScores() { - Sort sort = subSearchContext.sort(); + SortAndFormats sort = subSearchContext.sort(); if (sort != null) { - return sort.needsScores() || subSearchContext.trackScores(); + return sort.sort.needsScores() || subSearchContext.trackScores(); } else { // sort by score return true; @@ -112,12 +112,12 @@ public class TopHitsAggregator extends MetricsAggregator { public void collect(int docId, long bucket) throws IOException { TopDocsAndLeafCollector collectors = topDocsCollectors.get(bucket); if (collectors == null) { - Sort sort = subSearchContext.sort(); + SortAndFormats sort = subSearchContext.sort(); int topN = subSearchContext.from() + subSearchContext.size(); // In the QueryPhase we don't need this protection, because it is build into the IndexSearcher, // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); - TopDocsCollector topLevelCollector = sort != null ? TopFieldCollector.create(sort, topN, true, subSearchContext.trackScores(), subSearchContext.trackScores()) : TopScoreDocCollector.create(topN); + TopDocsCollector topLevelCollector = sort != null ? TopFieldCollector.create(sort.sort, topN, true, subSearchContext.trackScores(), subSearchContext.trackScores()) : TopScoreDocCollector.create(topN); collectors = new TopDocsAndLeafCollector(topLevelCollector); collectors.leafCollector = collectors.topLevelCollector.getLeafCollector(ctx); collectors.leafCollector.setScorer(scorer); @@ -137,7 +137,7 @@ public class TopHitsAggregator extends MetricsAggregator { } else { final TopDocs topDocs = topDocsCollector.topLevelCollector.topDocs(); - subSearchContext.queryResult().topDocs(topDocs); + subSearchContext.queryResult().topDocs(topDocs, subSearchContext.sort() == null ? null : subSearchContext.sort().formats); int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; for (int i = 0; i < topDocs.scoreDocs.length; i++) { docIdsToLoad[i] = topDocs.scoreDocs[i].doc; @@ -153,7 +153,7 @@ public class TopHitsAggregator extends MetricsAggregator { searchHitFields.score(scoreDoc.score); if (scoreDoc instanceof FieldDoc) { FieldDoc fieldDoc = (FieldDoc) scoreDoc; - searchHitFields.sortValues(fieldDoc.fields); + searchHitFields.sortValues(fieldDoc.fields, subSearchContext.sort().formats); } } topHits = new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), topDocs, fetchResult.hits(), pipelineAggregators(), @@ -166,7 +166,7 @@ public class TopHitsAggregator extends MetricsAggregator { public InternalTopHits buildEmptyAggregation() { TopDocs topDocs; if (subSearchContext.sort() != null) { - topDocs = new TopFieldDocs(0, new FieldDoc[0], subSearchContext.sort().getSort(), Float.NaN); + topDocs = new TopFieldDocs(0, new FieldDoc[0], subSearchContext.sort().sort.getSort(), Float.NaN); } else { topDocs = Lucene.EMPTY_TOP_DOCS; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java index 478f03c7eac..ac001222301 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.metrics.tophits; -import org.apache.lucene.search.Sort; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.Aggregator; @@ -35,6 +34,7 @@ import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; @@ -87,7 +87,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory optionalSort = SortBuilder.buildSort(sorts, subSearchContext.getQueryShardContext()); + Optional optionalSort = SortBuilder.buildSort(sorts, subSearchContext.getQueryShardContext()); if (optionalSort.isPresent()) { subSearchContext.sort(optionalSort.get()); } diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index 1c48be1b959..a8ecc7a508c 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -362,7 +362,7 @@ public class SearchPhaseController extends AbstractComponent { if (sorted) { FieldDoc fieldDoc = (FieldDoc) shardDoc; - searchHit.sortValues(fieldDoc.fields); + searchHit.sortValues(fieldDoc.fields, firstResult.sortValueFormats()); if (sortScoreIndex != -1) { searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue()); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index 56059fc0d01..31921457207 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -142,7 +142,7 @@ public final class InnerHitsContext { TopDocsCollector topDocsCollector; if (sort() != null) { try { - topDocsCollector = TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores()); + topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores()); } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } @@ -317,7 +317,7 @@ public final class InnerHitsContext { int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc()); TopDocsCollector topDocsCollector; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores()); + topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores()); } else { topDocsCollector = TopScoreDocCollector.create(topN); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java index ea7edcc3dd4..82c3755cdc9 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java @@ -73,7 +73,7 @@ public class InnerHitsFetchSubPhase implements FetchSubPhase { } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } - innerHits.queryResult().topDocs(topDocs); + innerHits.queryResult().topDocs(topDocs, innerHits.sort() == null ? null : innerHits.sort().formats); int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; for (int i = 0; i < topDocs.scoreDocs.length; i++) { docIdsToLoad[i] = topDocs.scoreDocs[i].doc; @@ -89,7 +89,7 @@ public class InnerHitsFetchSubPhase implements FetchSubPhase { searchHitFields.score(scoreDoc.score); if (scoreDoc instanceof FieldDoc) { FieldDoc fieldDoc = (FieldDoc) scoreDoc; - searchHitFields.sortValues(fieldDoc.fields); + searchHitFields.sortValues(fieldDoc.fields, innerHits.sort().formats); } } results.put(entry.getKey(), fetchResult.hits()); diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 435e809a893..96319303420 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.Sort; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Counter; @@ -71,6 +70,7 @@ import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QueryPhaseExecutionException; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.io.IOException; @@ -114,7 +114,7 @@ public class DefaultSearchContext extends SearchContext { private FetchSourceContext fetchSourceContext; private int from = -1; private int size = -1; - private Sort sort; + private SortAndFormats sort; private Float minimumScore; private boolean trackScores = false; // when sorting, track scores as well... private FieldDoc searchAfter; @@ -532,13 +532,13 @@ public class DefaultSearchContext extends SearchContext { } @Override - public SearchContext sort(Sort sort) { + public SearchContext sort(SortAndFormats sort) { this.sort = sort; return this; } @Override - public Sort sort() { + public SortAndFormats sort() { return this.sort; } diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 283e1dd60e8..8009d0b5fe4 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; -import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -55,6 +54,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.util.List; @@ -306,12 +306,12 @@ public abstract class FilteredSearchContext extends SearchContext { } @Override - public SearchContext sort(Sort sort) { + public SearchContext sort(SortAndFormats sort) { return in.sort(sort); } @Override - public Sort sort() { + public SortAndFormats sort() { return in.sort(); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index 628f409974e..f67ed51b226 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.StreamInput; @@ -34,6 +33,7 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; @@ -44,6 +44,7 @@ import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -326,21 +327,13 @@ public class InternalSearchHit implements SearchHit { this.highlightFields = highlightFields; } - public void sortValues(Object[] sortValues) { - // LUCENE 4 UPGRADE: There must be a better way - // we want to convert to a Text object here, and not BytesRef - - // Don't write into sortValues! Otherwise the fields in FieldDoc is modified, which may be used in other places. (SearchContext#lastEmitedDoc) - Object[] sortValuesCopy = new Object[sortValues.length]; - System.arraycopy(sortValues, 0, sortValuesCopy, 0, sortValues.length); - if (sortValues != null) { - for (int i = 0; i < sortValues.length; i++) { - if (sortValues[i] instanceof BytesRef) { - sortValuesCopy[i] = new Text(new BytesArray((BytesRef) sortValues[i])); - } + public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) { + this.sortValues = Arrays.copyOf(sortValues, sortValues.length); + for (int i = 0; i < sortValues.length; ++i) { + if (this.sortValues[i] instanceof BytesRef) { + this.sortValues[i] = sortValueFormats[i].format((BytesRef) sortValues[i]); } } - this.sortValues = sortValuesCopy; } @Override @@ -618,8 +611,6 @@ public class InternalSearchHit implements SearchHit { sortValues[i] = in.readShort(); } else if (type == 8) { sortValues[i] = in.readBoolean(); - } else if (type == 9) { - sortValues[i] = in.readText(); } else { throw new IOException("Can't match type [" + type + "]"); } @@ -726,9 +717,6 @@ public class InternalSearchHit implements SearchHit { } else if (type == Boolean.class) { out.writeByte((byte) 8); out.writeBoolean((Boolean) sortValue); - } else if (sortValue instanceof Text) { - out.writeByte((byte) 9); - out.writeText((Text) sortValue); } else { throw new IOException("Can't handle sort field value of type [" + type + "]"); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 8b55c764bbb..550a5f76caf 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -59,6 +59,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.util.ArrayList; @@ -244,9 +245,9 @@ public abstract class SearchContext implements Releasable { public abstract Float minimumScore(); - public abstract SearchContext sort(Sort sort); + public abstract SearchContext sort(SortAndFormats sort); - public abstract Sort sort(); + public abstract SortAndFormats sort(); public abstract SearchContext trackScores(boolean trackScores); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index be2c3798430..6cacf86d65f 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -19,19 +19,18 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.Query; -import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.util.ArrayList; @@ -48,7 +47,7 @@ public class SubSearchContext extends FilteredSearchContext { private int from; private int size = DEFAULT_SIZE; - private Sort sort; + private SortAndFormats sort; private ParsedQuery parsedQuery; private Query query; @@ -172,13 +171,13 @@ public class SubSearchContext extends FilteredSearchContext { } @Override - public SearchContext sort(Sort sort) { + public SearchContext sort(SortAndFormats sort) { this.sort = sort; return this; } @Override - public Sort sort() { + public SortAndFormats sort() { return sort; } diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 62210655a00..a29831f5a04 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.MinimumScoreCollector; import org.elasticsearch.common.lucene.search.FilteredCollector; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.SearchService; @@ -58,6 +59,7 @@ import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.profile.Profiler; import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.rescore.RescoreSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.TrackScoresParseElement; import org.elasticsearch.search.suggest.SuggestPhase; @@ -119,7 +121,9 @@ public class QueryPhase implements SearchPhase { if (searchContext.hasOnlySuggest()) { suggestPhase.execute(searchContext); // TODO: fix this once we can fetch docs for suggestions - searchContext.queryResult().topDocs(new TopDocs(0, Lucene.EMPTY_SCORE_DOCS, 0)); + searchContext.queryResult().topDocs( + new TopDocs(0, Lucene.EMPTY_SCORE_DOCS, 0), + new DocValueFormat[0]); return; } // Pre-process aggregations as late as possible. In the case of a DFS_Q_T_F @@ -141,15 +145,15 @@ public class QueryPhase implements SearchPhase { } } - private static boolean returnsDocsInOrder(Query query, Sort sort) { - if (sort == null || Sort.RELEVANCE.equals(sort)) { + private static boolean returnsDocsInOrder(Query query, SortAndFormats sf) { + if (sf == null || Sort.RELEVANCE.equals(sf.sort)) { // sort by score // queries that return constant scores will return docs in index // order since Lucene tie-breaks on the doc id return query.getClass() == ConstantScoreQuery.class || query.getClass() == MatchAllDocsQuery.class; } else { - return Sort.INDEXORDER.equals(sort); + return Sort.INDEXORDER.equals(sf.sort); } } @@ -176,6 +180,7 @@ public class QueryPhase implements SearchPhase { Collector collector; Callable topDocsCallable; + DocValueFormat[] sortValueFormats = new DocValueFormat[0]; assert query == searcher.rewrite(query); // already rewritten @@ -229,8 +234,10 @@ public class QueryPhase implements SearchPhase { } assert numDocs > 0; if (searchContext.sort() != null) { - topDocsCollector = TopFieldCollector.create(searchContext.sort(), numDocs, + SortAndFormats sf = searchContext.sort(); + topDocsCollector = TopFieldCollector.create(sf.sort, numDocs, (FieldDoc) after, true, searchContext.trackScores(), searchContext.trackScores()); + sortValueFormats = sf.formats; } else { rescore = !searchContext.rescore().isEmpty(); for (RescoreSearchContext rescoreContext : searchContext.rescore()) { @@ -402,7 +409,7 @@ public class QueryPhase implements SearchPhase { queryResult.terminatedEarly(false); } - queryResult.topDocs(topDocsCallable.call()); + queryResult.topDocs(topDocsCallable.call(), sortValueFormats); if (searchContext.getProfilers() != null) { List shardResults = Profiler.buildShardResults(searchContext.getProfilers().getProfilers()); diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 2b82633ebfd..1408ebe8359 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -19,12 +19,14 @@ package org.elasticsearch.search.query; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -51,6 +53,7 @@ public class QuerySearchResult extends QuerySearchResultProvider { private int from; private int size; private TopDocs topDocs; + private DocValueFormat[] sortValueFormats; private InternalAggregations aggregations; private List pipelineAggregators; private Suggest suggest; @@ -112,8 +115,20 @@ public class QuerySearchResult extends QuerySearchResultProvider { return topDocs; } - public void topDocs(TopDocs topDocs) { + public void topDocs(TopDocs topDocs, DocValueFormat[] sortValueFormats) { this.topDocs = topDocs; + if (topDocs.scoreDocs.length > 0 && topDocs.scoreDocs[0] instanceof FieldDoc) { + int numFields = ((FieldDoc) topDocs.scoreDocs[0]).fields.length; + if (numFields != sortValueFormats.length) { + throw new IllegalArgumentException("The number of sort fields does not match: " + + numFields + " != " + sortValueFormats.length); + } + } + this.sortValueFormats = sortValueFormats; + } + + public DocValueFormat[] sortValueFormats() { + return sortValueFormats; } public Aggregations aggregations() { @@ -192,6 +207,15 @@ public class QuerySearchResult extends QuerySearchResultProvider { // shardTarget = readSearchShardTarget(in); from = in.readVInt(); size = in.readVInt(); + int numSortFieldsPlus1 = in.readVInt(); + if (numSortFieldsPlus1 == 0) { + sortValueFormats = null; + } else { + sortValueFormats = new DocValueFormat[numSortFieldsPlus1 - 1]; + for (int i = 0; i < sortValueFormats.length; ++i) { + sortValueFormats[i] = in.readNamedWriteable(DocValueFormat.class); + } + } topDocs = readTopDocs(in); if (in.readBoolean()) { aggregations = InternalAggregations.readAggregations(in); @@ -233,6 +257,14 @@ public class QuerySearchResult extends QuerySearchResultProvider { // shardTarget.writeTo(out); out.writeVInt(from); out.writeVInt(size); + if (sortValueFormats == null) { + out.writeVInt(0); + } else { + out.writeVInt(1 + sortValueFormats.length); + for (int i = 0; i < sortValueFormats.length; ++i) { + out.writeNamedWriteable(sortValueFormats[i]); + } + } writeTopDocs(out, topDocs); if (aggregations == null) { out.writeBoolean(false); diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/core/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index 732a80bad20..b82ed941e1c 100644 --- a/core/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/core/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -61,7 +61,7 @@ public class RescorePhase extends AbstractComponent implements SearchPhase { for (RescoreSearchContext ctx : context.rescore()) { topDocs = ctx.rescorer().rescore(topDocs, context, ctx); } - context.queryResult().topDocs(topDocs); + context.queryResult().topDocs(topDocs, context.queryResult().sortValueFormats()); } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); } diff --git a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java index 6d72efdf368..6ed4b0db5bc 100644 --- a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.searchafter; import org.apache.lucene.search.FieldDoc; -import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; @@ -36,6 +34,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.sort.SortAndFormats; import java.io.IOException; import java.util.ArrayList; @@ -104,21 +104,23 @@ public class SearchAfterBuilder implements ToXContent, Writeable { return Arrays.copyOf(sortValues, sortValues.length); } - public static FieldDoc buildFieldDoc(Sort sort, Object[] values) { - if (sort == null || sort.getSort() == null || sort.getSort().length == 0) { + public static FieldDoc buildFieldDoc(SortAndFormats sort, Object[] values) { + if (sort == null || sort.sort.getSort() == null || sort.sort.getSort().length == 0) { throw new IllegalArgumentException("Sort must contain at least one field."); } - SortField[] sortFields = sort.getSort(); + SortField[] sortFields = sort.sort.getSort(); if (sortFields.length != values.length) { throw new IllegalArgumentException( - SEARCH_AFTER.getPreferredName() + " has " + values.length + " value(s) but sort has " + sort.getSort().length + "."); + SEARCH_AFTER.getPreferredName() + " has " + values.length + " value(s) but sort has " + + sort.sort.getSort().length + "."); } Object[] fieldValues = new Object[sortFields.length]; for (int i = 0; i < sortFields.length; i++) { SortField sortField = sortFields[i]; + DocValueFormat format = sort.formats[i]; if (values[i] != null) { - fieldValues[i] = convertValueFromSortField(values[i], sortField); + fieldValues[i] = convertValueFromSortField(values[i], sortField, format); } else { fieldValues[i] = null; } @@ -130,15 +132,15 @@ public class SearchAfterBuilder implements ToXContent, Writeable { return new FieldDoc(Integer.MAX_VALUE, 0, fieldValues); } - private static Object convertValueFromSortField(Object value, SortField sortField) { + private static Object convertValueFromSortField(Object value, SortField sortField, DocValueFormat format) { if (sortField.getComparatorSource() instanceof IndexFieldData.XFieldComparatorSource) { IndexFieldData.XFieldComparatorSource cmpSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource(); - return convertValueFromSortType(sortField.getField(), cmpSource.reducedType(), value); + return convertValueFromSortType(sortField.getField(), cmpSource.reducedType(), value, format); } - return convertValueFromSortType(sortField.getField(), sortField.getType(), value); + return convertValueFromSortType(sortField.getField(), sortField.getType(), value, format); } - private static Object convertValueFromSortType(String fieldName, SortField.Type sortType, Object value) { + private static Object convertValueFromSortType(String fieldName, SortField.Type sortType, Object value, DocValueFormat format) { try { switch (sortType) { case DOC: @@ -179,7 +181,7 @@ public class SearchAfterBuilder implements ToXContent, Writeable { case STRING_VAL: case STRING: - return new BytesRef(value.toString()); + return format.parseBytesRef(value.toString()); default: throw new IllegalArgumentException("Comparator type [" + sortType.name() + "] for field [" + fieldName diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 70876f0acea..892673f890a 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; @@ -55,8 +56,10 @@ public class FieldSortBuilder extends SortBuilder { * special field name to sort by index order */ public static final String DOC_FIELD_NAME = "_doc"; - private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC); - private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true); + private static final SortFieldAndFormat SORT_DOC = new SortFieldAndFormat( + new SortField(null, SortField.Type.DOC), DocValueFormat.RAW); + private static final SortFieldAndFormat SORT_DOC_REVERSE = new SortFieldAndFormat( + new SortField(null, SortField.Type.DOC, true), DocValueFormat.RAW); private final String fieldName; @@ -246,7 +249,7 @@ public class FieldSortBuilder extends SortBuilder { } @Override - public SortField build(QueryShardContext context) throws IOException { + public SortFieldAndFormat build(QueryShardContext context) throws IOException { if (DOC_FIELD_NAME.equals(fieldName)) { if (order == SortOrder.DESC) { return SORT_DOC_REVERSE; @@ -281,7 +284,8 @@ public class FieldSortBuilder extends SortBuilder { } IndexFieldData.XFieldComparatorSource fieldComparatorSource = fieldData .comparatorSource(missing, localSortMode, nested); - return new SortField(fieldType.name(), fieldComparatorSource, reverse); + SortField field = new SortField(fieldType.name(), fieldComparatorSource, reverse); + return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); } } diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 491a37be8b6..dce9a7ec3fe 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -51,6 +51,7 @@ import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; @@ -504,7 +505,7 @@ public class GeoDistanceSortBuilder extends SortBuilder } @Override - public SortField build(QueryShardContext context) throws IOException { + public SortFieldAndFormat build(QueryShardContext context) throws IOException { final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0); // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes List localPoints = new ArrayList(); @@ -585,7 +586,7 @@ public class GeoDistanceSortBuilder extends SortBuilder }; - return new SortField(fieldName, geoDistanceComparatorSource, reverse); + return new SortFieldAndFormat(new SortField(fieldName, geoDistanceComparatorSource, reverse), DocValueFormat.RAW); } static void parseGeoPoints(XContentParser parser, List geoPoints) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index 8267429f77c..5b9b139e495 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.DocValueFormat; import java.io.IOException; import java.util.Objects; @@ -40,8 +41,10 @@ public class ScoreSortBuilder extends SortBuilder { public static final String NAME = "_score"; public static final ParseField ORDER_FIELD = new ParseField("order"); - private static final SortField SORT_SCORE = new SortField(null, SortField.Type.SCORE); - private static final SortField SORT_SCORE_REVERSE = new SortField(null, SortField.Type.SCORE, true); + private static final SortFieldAndFormat SORT_SCORE = new SortFieldAndFormat( + new SortField(null, SortField.Type.SCORE), DocValueFormat.RAW); + private static final SortFieldAndFormat SORT_SCORE_REVERSE = new SortFieldAndFormat( + new SortField(null, SortField.Type.SCORE, true), DocValueFormat.RAW); /** * Build a ScoreSortBuilder default to descending sort order. @@ -106,7 +109,7 @@ public class ScoreSortBuilder extends SortBuilder { } @Override - public SortField build(QueryShardContext context) { + public SortFieldAndFormat build(QueryShardContext context) { if (order == SortOrder.DESC) { return SORT_SCORE; } else { diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index c574fbcb7df..eeb418c0a97 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -52,6 +52,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import org.elasticsearch.script.SearchScript; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; @@ -302,7 +303,7 @@ public class ScriptSortBuilder extends SortBuilder { @Override - public SortField build(QueryShardContext context) throws IOException { + public SortFieldAndFormat build(QueryShardContext context) throws IOException { final SearchScript searchScript = context.getScriptService().search( context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap(), context.getClusterState()); @@ -366,7 +367,7 @@ public class ScriptSortBuilder extends SortBuilder { throw new QueryShardException(context, "custom script sort type [" + type + "] not supported"); } - return new SortField("_script", fieldComparatorSource, reverse); + return new SortFieldAndFormat(new SortField("_script", fieldComparatorSource, reverse), DocValueFormat.RAW); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortAndFormats.java b/core/src/main/java/org/elasticsearch/search/sort/SortAndFormats.java new file mode 100644 index 00000000000..21a9d112fea --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/sort/SortAndFormats.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.sort; + +import org.apache.lucene.search.Sort; +import org.elasticsearch.search.DocValueFormat; + +public final class SortAndFormats { + + public final Sort sort; + public final DocValueFormat[] formats; + + public SortAndFormats(Sort sort, DocValueFormat[] formats) { + if (sort.getSort().length != formats.length) { + throw new IllegalArgumentException("Number of sort field mismatch: " + + sort.getSort().length + " != " + formats.length); + } + this.sort = sort; + this.formats = formats; + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 8cd3fb82413..7fb0baf6548 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.search.DocValueFormat; import java.io.IOException; import java.util.ArrayList; @@ -65,9 +66,9 @@ public abstract class SortBuilder> extends ToXContentTo } /** - * Create a @link {@link SortField} from this builder. + * Create a @link {@link SortFieldAndFormat} from this builder. */ - protected abstract SortField build(QueryShardContext context) throws IOException; + protected abstract SortFieldAndFormat build(QueryShardContext context) throws IOException; /** * Set the order of sorting. @@ -143,10 +144,13 @@ public abstract class SortBuilder> extends ToXContentTo } } - public static Optional buildSort(List> sortBuilders, QueryShardContext context) throws IOException { + public static Optional buildSort(List> sortBuilders, QueryShardContext context) throws IOException { List sortFields = new ArrayList<>(sortBuilders.size()); + List sortFormats = new ArrayList<>(sortBuilders.size()); for (SortBuilder builder : sortBuilders) { - sortFields.add(builder.build(context)); + SortFieldAndFormat sf = builder.build(context); + sortFields.add(sf.field); + sortFormats.add(sf.format); } if (!sortFields.isEmpty()) { // optimize if we just sort on score non reversed, we don't really @@ -163,7 +167,9 @@ public abstract class SortBuilder> extends ToXContentTo } } if (sort) { - return Optional.of(new Sort(sortFields.toArray(new SortField[sortFields.size()]))); + return Optional.of(new SortAndFormats( + new Sort(sortFields.toArray(new SortField[sortFields.size()])), + sortFormats.toArray(new DocValueFormat[sortFormats.size()]))); } } return Optional.empty(); diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java b/core/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java new file mode 100644 index 00000000000..f9756b79068 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/sort/SortFieldAndFormat.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.sort; + +import java.util.Objects; + +import org.apache.lucene.search.SortField; +import org.elasticsearch.search.DocValueFormat; + +public final class SortFieldAndFormat { + + public final SortField field; + public final DocValueFormat format; + + public SortFieldAndFormat(SortField field, DocValueFormat format) { + this.field = Objects.requireNonNull(field); + this.format = Objects.requireNonNull(format); + } + +} diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java index d62e6ac7dba..4f08eee9b77 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java @@ -100,4 +100,5 @@ public class NetworkAddressTests extends ESTestCase { byte bytes[] = InetAddress.getByName(address).getAddress(); return Inet6Address.getByAddress(hostname, bytes, scopeid); } + } diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index 35945f21209..2df15592f45 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -140,8 +140,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase DocValueFormat.RAW.parseLong("", randomBoolean(), null)); + expectThrows(IllegalArgumentException.class, () -> DocValueFormat.RAW.parseLong("abc", randomBoolean(), null)); + + assertEquals(-1d, DocValueFormat.RAW.parseDouble("-1", randomBoolean(), null), 0d); + assertEquals(1d, DocValueFormat.RAW.parseDouble("1", randomBoolean(), null), 0d); + assertEquals(.5, DocValueFormat.RAW.parseDouble("0.5", randomBoolean(), null), 0d); + // not checking exception messages as they could depend on the JVM + expectThrows(IllegalArgumentException.class, () -> DocValueFormat.RAW.parseLong("", randomBoolean(), null)); + expectThrows(IllegalArgumentException.class, () -> DocValueFormat.RAW.parseLong("abc", randomBoolean(), null)); + + assertEquals(new BytesRef("abc"), DocValueFormat.RAW.parseBytesRef("abc")); + } + + public void testBooleanParse() { + assertEquals(0L, DocValueFormat.BOOLEAN.parseLong("false", randomBoolean(), null)); + assertEquals(1L, DocValueFormat.BOOLEAN.parseLong("true", randomBoolean(), null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> DocValueFormat.BOOLEAN.parseLong("", randomBoolean(), null)); + assertEquals("Cannot parse boolean [], expected either [true] or [false]", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> DocValueFormat.BOOLEAN.parseLong("0", randomBoolean(), null)); + assertEquals("Cannot parse boolean [0], expected either [true] or [false]", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> DocValueFormat.BOOLEAN.parseLong("False", randomBoolean(), null)); + assertEquals("Cannot parse boolean [False], expected either [true] or [false]", e.getMessage()); + } + + public void testIPParse() { + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.1.7"))), + DocValueFormat.IP.parseBytesRef("192.168.1.7")); + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))), + DocValueFormat.IP.parseBytesRef("::1")); + } } diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 13c99944959..08c661a130b 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchContextException; import org.elasticsearch.search.SearchHit; @@ -189,11 +188,11 @@ public class SearchAfterIT extends ESIntegTestCase { values.add(randomDouble()); break; case 6: - values.add(new Text(randomAsciiOfLengthBetween(5, 20))); + values.add(randomAsciiOfLengthBetween(5, 20)); break; } } - values.add(new Text(UUIDs.randomBase64UUID())); + values.add(UUIDs.randomBase64UUID()); documents.add(values); } int reqSize = randomInt(NUM_DOCS-1); @@ -296,7 +295,7 @@ public class SearchAfterIT extends ESIntegTestCase { } else if (type == Boolean.class) { mappings.add("field" + Integer.toString(i)); mappings.add("type=boolean"); - } else if (types.get(i) instanceof Text) { + } else if (types.get(i) instanceof String) { mappings.add("field" + Integer.toString(i)); mappings.add("type=keyword"); } else { diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 2978c5dae7a..01a8a07d5bb 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -62,6 +62,7 @@ import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptServiceTests.TestEngineService; import org.elasticsearch.script.ScriptSettings; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -163,12 +164,12 @@ public abstract class AbstractSortTestCase> extends EST QueryShardContext mockShardContext = createMockShardContext(); for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { T sortBuilder = createTestItem(); - SortField sortField = sortBuilder.build(mockShardContext); - sortFieldAssertions(sortBuilder, sortField); + SortFieldAndFormat sortField = sortBuilder.build(mockShardContext); + sortFieldAssertions(sortBuilder, sortField.field, sortField.format); } } - protected abstract void sortFieldAssertions(T builder, SortField sortField) throws IOException; + protected abstract void sortFieldAssertions(T builder, SortField sortField, DocValueFormat format) throws IOException; /** * Test serialization and deserialization of the test sort. diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 4b6eb82304a..baaf3ac5d3c 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.search.DocValueFormat; import java.io.IOException; import java.util.Arrays; @@ -110,7 +111,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase` or `bytes[]` have been removed in favor of providing the diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 60a660ead42..5cac904df35 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -18,16 +18,13 @@ */ package org.elasticsearch.test; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; -import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -65,13 +62,10 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.threadpool.ThreadPool; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - public class TestSearchContext extends SearchContext { final PageCacheRecycler pageCacheRecycler; @@ -365,12 +359,12 @@ public class TestSearchContext extends SearchContext { } @Override - public SearchContext sort(Sort sort) { + public SearchContext sort(SortAndFormats sort) { return null; } @Override - public Sort sort() { + public SortAndFormats sort() { return null; } From b91df36a627be0f1c78d61b68fe017f2976b8f6e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 2 May 2016 17:17:00 +0200 Subject: [PATCH 0098/1311] Fix and test handling of `null_value`. #18090 This was mostly untested and had some bugs. Closes #18085 --- .../index/mapper/core/DateFieldMapper.java | 10 ++- .../index/mapper/core/NumberFieldMapper.java | 37 ++++++++++- .../index/mapper/ip/IpFieldMapper.java | 6 +- .../mapper/core/DateFieldMapperTests.java | 51 ++++++++++++++++ .../mapper/core/KeywordFieldMapperTests.java | 18 +++++- .../mapper/core/NumberFieldMapperTests.java | 61 +++++++++++++++++++ .../mapper/core/NumberFieldTypeTests.java | 31 ++++++++++ .../index/mapper/ip/IpFieldMapperTests.java | 52 ++++++++++++++++ 8 files changed, 258 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 81977fe6caf..89e2d8409d5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -36,7 +36,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.util.LocaleUtils; @@ -152,7 +151,7 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } - builder.nullValue(InetAddresses.forString(propNode.toString())); + builder.nullValue(propNode.toString()); iterator.remove(); } else if (propName.equals("ignore_malformed")) { builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); @@ -561,7 +560,7 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu dateAsString = dateAsObject.toString(); } } else { - dateAsString = context.parser().text(); + dateAsString = context.parser().textOrNull(); } if (dateAsString == null) { @@ -615,6 +614,11 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu if (includeDefaults || ignoreMalformed.explicit()) { builder.field("ignore_malformed", ignoreMalformed.value()); } + + if (includeDefaults || fieldType().nullValue() != null) { + builder.field("null_value", fieldType().nullValueAsString()); + } + if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 2c3a9a5c83e..f1088652988 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -366,8 +366,15 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc BYTE("byte", NumericType.BYTE) { @Override Byte parse(Object value) { - if (value instanceof Byte) { - return (Byte) value; + if (value instanceof Number) { + double doubleValue = ((Number) value).doubleValue(); + if (doubleValue < Byte.MIN_VALUE || doubleValue > Byte.MAX_VALUE) { + throw new IllegalArgumentException("Value [" + value + "] is out of range for a byte"); + } + if (doubleValue % 1 != 0) { + throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); + } + return ((Number) value).byteValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); @@ -426,6 +433,13 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc @Override Short parse(Object value) { if (value instanceof Number) { + double doubleValue = ((Number) value).doubleValue(); + if (doubleValue < Short.MIN_VALUE || doubleValue > Short.MAX_VALUE) { + throw new IllegalArgumentException("Value [" + value + "] is out of range for a short"); + } + if (doubleValue % 1 != 0) { + throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); + } return ((Number) value).shortValue(); } if (value instanceof BytesRef) { @@ -485,6 +499,13 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc @Override Integer parse(Object value) { if (value instanceof Number) { + double doubleValue = ((Number) value).doubleValue(); + if (doubleValue < Integer.MIN_VALUE || doubleValue > Integer.MAX_VALUE) { + throw new IllegalArgumentException("Value [" + value + "] is out of range for an integer"); + } + if (doubleValue % 1 != 0) { + throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); + } return ((Number) value).intValue(); } if (value instanceof BytesRef) { @@ -581,6 +602,13 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc @Override Long parse(Object value) { if (value instanceof Number) { + double doubleValue = ((Number) value).doubleValue(); + if (doubleValue < Long.MIN_VALUE || doubleValue > Long.MAX_VALUE) { + throw new IllegalArgumentException("Value [" + value + "] is out of range for a long"); + } + if (doubleValue % 1 != 0) { + throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); + } return ((Number) value).longValue(); } if (value instanceof BytesRef) { @@ -944,6 +972,11 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc if (includeDefaults || coerce.explicit()) { builder.field("coerce", coerce.value()); } + + if (includeDefaults || fieldType().nullValue() != null) { + builder.field("null_value", fieldType().nullValue()); + } + if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 6cb3cbe46c8..5f5a3d85fe1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -339,7 +339,7 @@ public class IpFieldMapper extends FieldMapper implements AllFieldMapper.Include if (context.externalValueSet()) { addressAsObject = context.externalValue(); } else { - addressAsObject = context.parser().text(); + addressAsObject = context.parser().textOrNull(); } if (addressAsObject == null) { @@ -395,6 +395,10 @@ public class IpFieldMapper extends FieldMapper implements AllFieldMapper.Include protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); + if (includeDefaults || fieldType().nullValue() != null) { + builder.field("null_value", InetAddresses.toAddrString((InetAddress) fieldType().nullValue())); + } + if (includeDefaults || ignoreMalformed.explicit()) { builder.field("ignore_malformed", ignoreMalformed.value()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java index 9f09e3e2e13..ee19d094a3f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java @@ -251,4 +251,55 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); } + + public void testNullValue() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "date") + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); + + mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "date") + .field("null_value", "2016-03-11") + .endObject() + .endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(8, pointField.fieldType().pointNumBytes()); + assertFalse(pointField.fieldType().stored()); + assertEquals(1457654400000L, pointField.numericValue().longValue()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + assertEquals(1457654400000L, dvField.numericValue().longValue()); + assertFalse(dvField.fieldType().stored()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java index 28867ed1f73..8b9c9134fa3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/KeywordFieldMapperTests.java @@ -126,14 +126,28 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { public void testNullValue() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "keyword").field("null_value", "uri").endObject().endObject() + .startObject("properties").startObject("field").field("type", "keyword").endObject().endObject() .endObject().endObject().string(); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "keyword").field("null_value", "uri").endObject().endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .endObject() .bytes()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java index 0f0f5a33213..45cc09fff0e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java @@ -316,4 +316,65 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [norms")); } } + + public void testNullValue() throws IOException { + for (String type : TYPES) { + doTestNullValue(type); + } + } + + private void doTestNullValue(String type) throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", type) + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); + + Object missing; + if (Arrays.asList("float", "double").contains(type)) { + missing = 123d; + } else { + missing = 123L; + } + mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", type) + .field("null_value", missing) + .endObject() + .endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertFalse(pointField.fieldType().stored()); + assertEquals(123, pointField.numericValue().doubleValue(), 0d); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + assertFalse(dvField.fieldType().stored()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java index 25591591e42..22dec5bdde5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java @@ -75,4 +75,35 @@ public class NumberFieldTypeTests extends FieldTypeTestCase { () -> ft.rangeQuery("1", "3", true, true)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } + + public void testConversions() { + assertEquals((byte) 3, NumberType.BYTE.parse(3d)); + assertEquals((short) 3, NumberType.SHORT.parse(3d)); + assertEquals(3, NumberType.INTEGER.parse(3d)); + assertEquals(3L, NumberType.LONG.parse(3d)); + assertEquals(3f, NumberType.FLOAT.parse(3d)); + assertEquals(3d, NumberType.DOUBLE.parse(3d)); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> NumberType.BYTE.parse(3.5)); + assertEquals("Value [3.5] has a decimal part", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, () -> NumberType.SHORT.parse(3.5)); + assertEquals("Value [3.5] has a decimal part", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, () -> NumberType.INTEGER.parse(3.5)); + assertEquals("Value [3.5] has a decimal part", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, () -> NumberType.LONG.parse(3.5)); + assertEquals("Value [3.5] has a decimal part", e.getMessage()); + assertEquals(3.5f, NumberType.FLOAT.parse(3.5)); + assertEquals(3.5d, NumberType.DOUBLE.parse(3.5)); + + e = expectThrows(IllegalArgumentException.class, () -> NumberType.BYTE.parse(128)); + assertEquals("Value [128] is out of range for a byte", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, () -> NumberType.SHORT.parse(65536)); + assertEquals("Value [65536] is out of range for a short", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, () -> NumberType.INTEGER.parse(2147483648L)); + assertEquals("Value [2147483648] is out of range for an integer", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, () -> NumberType.LONG.parse(10000000000000000000d)); + assertEquals("Value [1.0E19] is out of range for a long", e.getMessage()); + assertEquals(1.1f, NumberType.FLOAT.parse(1.1)); // accuracy loss is expected + assertEquals(1.1d, NumberType.DOUBLE.parse(1.1)); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java index 3bb96cce31e..25979c5a632 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java @@ -36,6 +36,7 @@ import org.junit.Before; import static org.hamcrest.Matchers.containsString; +import java.io.IOException; import java.net.InetAddress; public class IpFieldMapperTests extends ESSingleNodeTestCase { @@ -217,4 +218,55 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { fields = doc.rootDoc().getFields("_all"); assertEquals(0, fields.length); } + + public void testNullValue() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "ip") + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); + + mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "ip") + .field("null_value", "::1") + .endObject() + .endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(16, pointField.fieldType().pointNumBytes()); + assertFalse(pointField.fieldType().stored()); + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))), pointField.binaryValue()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_SET, dvField.fieldType().docValuesType()); + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))), dvField.binaryValue()); + assertFalse(dvField.fieldType().stored()); + } } From 4ddf916aabaf6e60e68f0cf59ef5dbc3c17cffed Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 6 May 2016 10:40:19 +0200 Subject: [PATCH 0099/1311] Removed scripting docs for docs[field].multiValued Closes #18164 --- docs/reference/modules/scripting/groovy.asciidoc | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/reference/modules/scripting/groovy.asciidoc b/docs/reference/modules/scripting/groovy.asciidoc index 60b64e0131a..0c7f4676d2c 100644 --- a/docs/reference/modules/scripting/groovy.asciidoc +++ b/docs/reference/modules/scripting/groovy.asciidoc @@ -31,9 +31,6 @@ on the underlying field type): `doc['field_name'].empty`:: A boolean indicating if the field has no values within the doc. -`doc['field_name'].multiValued`:: - A boolean indicating that the field has several values within the corpus. - `doc['field_name'].lat`:: The latitude of a geo point type, or `null`. From 7d14728960f4b0678f9970b0aff4d4f7fbca8459 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 May 2016 19:30:47 +0200 Subject: [PATCH 0100/1311] Add xContent shuffling to some more tests This adds some random shuffling of xContent to some more test cases. Relates to #5831 --- .../common/xcontent/XContentBuilder.java | 4 ++++ .../common/xcontent/XContentGenerator.java | 3 ++- .../xcontent/json/JsonXContentGenerator.java | 5 +++++ .../AbstractShapeBuilderTestCase.java | 4 +--- .../ConstructingObjectParserTests.java | 3 +-- .../index/query/AbstractQueryTestCase.java | 6 +++--- .../index/query/InnerHitBuilderTests.java | 3 +-- .../query/PercolateQueryBuilderTests.java | 7 ++----- .../snapshots/blobstore/FileInfoTests.java | 3 +-- .../ingest/IngestMetadataTests.java | 3 +-- .../script/ScriptMetaDataTests.java | 4 +--- .../aggregations/BaseAggregationTestCase.java | 2 +- .../BasePipelineAggregationTestCase.java | 2 +- .../highlight/HighlightBuilderTests.java | 3 +-- .../rescore/QueryRescoreBuilderTests.java | 4 +--- .../searchafter/SearchAfterBuilderTests.java | 5 ++--- .../search/sort/AbstractSortTestCase.java | 2 +- .../AbstractSuggestionBuilderTestCase.java | 7 ++----- .../CompletionSuggesterBuilderTests.java | 7 ++----- .../phrase/DirectCandidateGeneratorTests.java | 4 +--- .../phrase/SmoothingModelTestCase.java | 2 +- .../org/elasticsearch/test/ESTestCase.java | 20 +++++++++++-------- .../test/test/ESTestCaseTests.java | 4 +--- 23 files changed, 48 insertions(+), 59 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index 4367b15fd52..3fa182e3727 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -113,6 +113,10 @@ public final class XContentBuilder implements BytesStream, Releasable { return this; } + public boolean isPrettyPrint() { + return generator.isPrettyPrint(); + } + public XContentBuilder lfAtEnd() { generator.usePrintLineFeedAtEnd(); return this; diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java index f23ae441989..a2cceae8367 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.bytes.BytesReference; - import java.io.Closeable; import java.io.IOException; import java.io.InputStream; @@ -34,6 +33,8 @@ public interface XContentGenerator extends Closeable { void usePrettyPrint(); + boolean isPrettyPrint(); + void usePrintLineFeedAtEnd(); void writeStartArray() throws IOException; diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java index 4148148e507..e74ab01bd6a 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java @@ -104,6 +104,11 @@ public class JsonXContentGenerator implements XContentGenerator { prettyPrint = true; } + @Override + public boolean isPrettyPrint() { + return this.prettyPrint; + } + @Override public void usePrintLineFeedAtEnd() { writeLineFeedAtEnd = true; diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 79b890ab401..9cbd4bb769d 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -34,8 +34,6 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; -import java.util.Collections; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -81,7 +79,7 @@ public abstract class AbstractShapeBuilderTestCase exte contentBuilder.prettyPrint(); } XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); - XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet()); + XContentBuilder shuffled = shuffleXContent(builder); XContentParser shapeParser = XContentHelper.createParser(shuffled.bytes()); shapeParser.nextToken(); ShapeBuilder parsedShape = ShapeBuilder.parse(shapeParser); diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java index ff23a822832..e04fdab12aa 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import static java.util.Collections.emptySet; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.hamcrest.Matchers.instanceOf; @@ -48,7 +47,7 @@ public class ConstructingObjectParserTests extends ESTestCase { expected.setD(randomBoolean()); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); expected.toXContent(builder, ToXContent.EMPTY_PARAMS); - builder = shuffleXContent(builder, emptySet()); + builder = shuffleXContent(builder); BytesReference bytes = builder.bytes(); XContentParser parser = XContentFactory.xContent(bytes).createParser(bytes); try { diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index c2a5fe5aacd..078809b43f5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -426,11 +426,11 @@ public abstract class AbstractQueryTestCase> } /** - * Subclasses can override this method and return a set of fields which should be protected from + * Subclasses can override this method and return an array of fieldnames which should be protected from * recursive random shuffling in the {@link #testFromXContent()} test case */ - protected Set shuffleProtectedFields() { - return Collections.emptySet(); + protected String[] shuffleProtectedFields() { + return new String[0]; } protected static XContentBuilder toXContent(QueryBuilder query, XContentType contentType) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index c7106a7ccf6..676671f066e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -24,7 +24,6 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -93,7 +92,7 @@ public class InnerHitBuilderTests extends ESTestCase { InnerHitBuilder innerHit = randomInnerHits(true, false); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet()); + XContentBuilder shuffled = shuffleXContent(builder); if (randomBoolean()) { shuffled.prettyPrint(); } diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java index 9dc0bdbd133..690b2c03a2f 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java @@ -42,15 +42,12 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.Collections; -import java.util.Set; - import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class PercolateQueryBuilderTests extends AbstractQueryTestCase { - private static final Set SHUFFLE_PROTECTED_FIELDS = - Collections.singleton(PercolateQueryBuilder.DOCUMENT_FIELD.getPreferredName()); + private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[] { PercolateQueryBuilder.DOCUMENT_FIELD.getPreferredName()}; private static String queryField; private static String docType; @@ -105,7 +102,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase shuffleProtectedFields() { + protected String[] shuffleProtectedFields() { return SHUFFLE_PROTECTED_FIELDS; } diff --git a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java index 1a31df45575..67c431135a0 100644 --- a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java +++ b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.test.ESTestCase; import java.io.IOException; - import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -53,7 +52,7 @@ public class FileInfoTests extends ESTestCase { BlobStoreIndexShardSnapshot.FileInfo info = new BlobStoreIndexShardSnapshot.FileInfo("_foobar", meta, size); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); BlobStoreIndexShardSnapshot.FileInfo.toXContent(info, builder, ToXContent.EMPTY_PARAMS); - byte[] xcontent = builder.bytes().toBytes(); + byte[] xcontent = shuffleXContent(builder).bytes().toBytes(); final BlobStoreIndexShardSnapshot.FileInfo parsedInfo; try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(xcontent)) { diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java index 38c1684b7ff..7924d069f2e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -57,7 +56,7 @@ public class IngestMetadataTests extends ESTestCase { builder.startObject(); ingestMetadata.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet()); + XContentBuilder shuffled = shuffleXContent(builder); final XContentParser parser = XContentFactory.xContent(shuffled.bytes()).createParser(shuffled.bytes()); MetaData.Custom custom = ingestMetadata.fromXContent(parser); assertTrue(custom instanceof IngestMetadata); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java b/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java index 86c3fe561d6..c934de2dd72 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java @@ -34,8 +34,6 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Collections; -import java.util.HashMap; -import java.util.Map; public class ScriptMetaDataTests extends ESTestCase { @@ -79,7 +77,7 @@ public class ScriptMetaDataTests extends ESTestCase { xContentBuilder.startObject(); expected.toXContent(xContentBuilder, new ToXContent.MapParams(Collections.emptyMap())); xContentBuilder.endObject(); - xContentBuilder = shuffleXContent(xContentBuilder, Collections.emptySet()); + xContentBuilder = shuffleXContent(xContentBuilder); XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes()); parser.nextToken(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 329b90058e4..4af87c982e1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -223,7 +223,7 @@ public abstract class BaseAggregationTestCase> builder.prettyPrint(); } factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet()); + XContentBuilder shuffled = shuffleXContent(builder); XContentParser parser = XContentFactory.xContent(shuffled.bytes()).createParser(shuffled.bytes()); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index daee1782f4b..2acbd919484 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -225,7 +225,7 @@ public abstract class BasePipelineAggregationTestCase> extends EST builder.prettyPrint(); } testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet()); + XContentBuilder shuffled = shuffleXContent(builder); XContentParser itemParser = XContentHelper.createParser(shuffled.bytes()); itemParser.nextToken(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index ed1bc323557..1f4030f487c 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -39,9 +39,6 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; -import java.util.Collections; -import java.util.Set; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -173,8 +170,8 @@ public abstract class AbstractSuggestionBuilderTestCase shuffleProtectedFields() { - return Collections.emptySet(); + protected String[] shuffleProtectedFields() { + return new String[0]; } private SB mutate(SB firstBuilder) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index af728647212..04412d47065 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -37,14 +37,11 @@ import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Set; - import static org.hamcrest.Matchers.containsString; public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase { - private static final Set SHUFFLE_PROTECTED_FIELDS = - Collections.singleton(CompletionSuggestionBuilder.CONTEXTS_FIELD.getPreferredName()); + private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[] {CompletionSuggestionBuilder.CONTEXTS_FIELD.getPreferredName()}; @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { @@ -113,7 +110,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe * the equals() test will fail because their {@link BytesReference} representation isn't the same */ @Override - protected Set shuffleProtectedFields() { + protected String[] shuffleProtectedFields() { return SHUFFLE_PROTECTED_FIELDS; } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index a3a5ea01c33..3fd3850b98a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCan import org.elasticsearch.test.ESTestCase; import java.io.IOException; - import static org.hamcrest.Matchers.equalTo; public class DirectCandidateGeneratorTests extends ESTestCase{ @@ -117,8 +116,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ builder.prettyPrint(); } generator.toXContent(builder, ToXContent.EMPTY_PARAMS); - - XContentParser parser = XContentHelper.createParser(builder.bytes()); + XContentParser parser = XContentHelper.createParser(shuffleXContent(builder).bytes()); QueryParseContext context = new QueryParseContext(mockRegistry, parser, ParseFieldMatcher.STRICT); parser.nextToken(); DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.fromXContent(context); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index 1731ddd7c4f..f167eefa43d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -100,7 +100,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { contentBuilder.startObject(); testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - XContentParser parser = XContentHelper.createParser(contentBuilder.bytes()); + XContentParser parser = XContentHelper.createParser(shuffleXContent(contentBuilder).bytes()); QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT); parser.nextToken(); // go to start token, real parsing would do that in the outer element parser SmoothingModel parsedModel = fromXContent(context); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 01f0d5b8151..3dbbf25e202 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -414,7 +414,7 @@ public abstract class ESTestCase extends LuceneTestCase { if (input != null) { return randomValueOtherThanMany(input::equals, randomSupplier); } - + return(randomSupplier.get()); } @@ -634,25 +634,29 @@ public abstract class ESTestCase extends LuceneTestCase { * recursive shuffling behavior can be made by passing in the names of fields which * internally should stay untouched. */ - public static XContentBuilder shuffleXContent(XContentBuilder builder, Set exceptFieldNames) throws IOException { + public static XContentBuilder shuffleXContent(XContentBuilder builder, String... exceptFieldNames) throws IOException { BytesReference bytes = builder.bytes(); XContentParser parser = XContentFactory.xContent(bytes).createParser(bytes); // use ordered maps for reproducibility - Map shuffledMap = shuffleMap(parser.mapOrdered(), exceptFieldNames); - XContentBuilder jsonBuilder = XContentFactory.contentBuilder(builder.contentType()); - return jsonBuilder.map(shuffledMap); + Map shuffledMap = shuffleMap(parser.mapOrdered(), new HashSet<>(Arrays.asList(exceptFieldNames))); + XContentBuilder xContentBuilder = XContentFactory.contentBuilder(builder.contentType()); + if (builder.isPrettyPrint()) { + xContentBuilder.prettyPrint(); + } + return xContentBuilder.map(shuffledMap); } - private static Map shuffleMap(Map map, Set exceptFieldNames) { + private static Map shuffleMap(Map map, Set exceptFields) { List keys = new ArrayList<>(map.keySet()); + // even though we shuffle later, we need this to make tests reproduce on different jvms Collections.sort(keys); Map targetMap = new TreeMap<>(); Collections.shuffle(keys, random()); for (String key : keys) { Object value = map.get(key); - if (value instanceof Map && exceptFieldNames.contains(key) == false) { - targetMap.put(key, shuffleMap((Map) value, exceptFieldNames)); + if (value instanceof Map && exceptFields.contains(key) == false) { + targetMap.put(key, shuffleMap((Map) value, exceptFields)); } else { targetMap.put(key, value); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java index 714354cc24c..005cd643480 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java @@ -29,11 +29,9 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.greaterThan; @@ -71,7 +69,7 @@ public class ESTestCaseTests extends ESTestCase { Map randomStringObjectMap = randomStringObjectMap(5); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.map(randomStringObjectMap); - XContentBuilder shuffleXContent = shuffleXContent(builder, Collections.emptySet()); + XContentBuilder shuffleXContent = shuffleXContent(builder); XContentParser parser = XContentFactory.xContent(shuffleXContent.bytes()).createParser(shuffleXContent.bytes()); Map resultMap = parser.map(); assertEquals("both maps should contain the same mappings", randomStringObjectMap, resultMap); From e88ac116335d9217d5b9b13ca6bb2575c6d52611 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 6 May 2016 09:34:57 +0200 Subject: [PATCH 0101/1311] Add back Version.V_5_0_0. #18176 This was lost whene releasing alpha2 since the version constant got renamed. --- buildSrc/version.properties | 2 +- core/src/main/java/org/elasticsearch/Version.java | 6 +++++- qa/backwards-5.0/build.gradle | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 0f6a09327d6..fee8404080a 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,4 +1,4 @@ -elasticsearch = 5.0.0-alpha2 +elasticsearch = 5.0.0 lucene = 6.0.0 # optional dependencies diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 56d245ddc51..0e869d06149 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -73,7 +73,9 @@ public class Version { public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final int V_5_0_0_alpha2_ID = 5000002; public static final Version V_5_0_0_alpha2 = new Version(V_5_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); - public static final Version CURRENT = V_5_0_0_alpha2; + public static final int V_5_0_0_ID = 5000099; + public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); + public static final Version CURRENT = V_5_0_0; static { assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to [" @@ -86,6 +88,8 @@ public class Version { public static Version fromId(int id) { switch (id) { + case V_5_0_0_ID: + return V_5_0_0; case V_5_0_0_alpha2_ID: return V_5_0_0_alpha2; case V_5_0_0_alpha1_ID: diff --git a/qa/backwards-5.0/build.gradle b/qa/backwards-5.0/build.gradle index 164f0e53d52..93d361c989c 100644 --- a/qa/backwards-5.0/build.gradle +++ b/qa/backwards-5.0/build.gradle @@ -18,6 +18,6 @@ integTest { cluster { numNodes = 2 numBwcNodes = 1 - bwcVersion = "5.0.0-alpha2-SNAPSHOT" // this is the same as the current version until we released the first RC + bwcVersion = "5.0.0-SNAPSHOT" // this is the same as the current version until we released the first RC } } From 93567a2f1b3c93f7ede97da6fa6fb6a2657aaa5e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 6 May 2016 12:51:11 +0200 Subject: [PATCH 0102/1311] Remove StringBuilder reuse for uid creation. #18181 This would be better handled by escape analysis. --- .../index/mapper/ParseContext.java | 23 ------------------- .../org/elasticsearch/index/mapper/Uid.java | 13 +---------- .../mapper/internal/ParentFieldMapper.java | 3 +-- .../index/mapper/internal/UidFieldMapper.java | 3 +-- 4 files changed, 3 insertions(+), 39 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java index 931fbebede2..33476b48cc3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java @@ -326,11 +326,6 @@ public abstract class ParseContext { return in.externalValue(); } - @Override - public StringBuilder stringBuilder() { - return in.stringBuilder(); - } - @Override public void addDynamicMapper(Mapper update) { in.addDynamicMapper(update); @@ -366,8 +361,6 @@ public abstract class ParseContext { private Field uid, version; - private StringBuilder stringBuilder = new StringBuilder(); - private AllEntries allEntries = new AllEntries(); private List dynamicMappers = new ArrayList<>(); @@ -526,16 +519,6 @@ public abstract class ParseContext { return this.allEntries; } - /** - * A string builder that can be used to construct complex names for example. - * Its better to reuse the. - */ - @Override - public StringBuilder stringBuilder() { - stringBuilder.setLength(0); - return this.stringBuilder; - } - @Override public void addDynamicMapper(Mapper mapper) { dynamicMappers.add(mapper); @@ -736,12 +719,6 @@ public abstract class ParseContext { return clazz.cast(externalValue()); } - /** - * A string builder that can be used to construct complex names for example. - * Its better to reuse the. - */ - public abstract StringBuilder stringBuilder(); - /** * Add a new mapper dynamically created while parsing. */ diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Uid.java b/core/src/main/java/org/elasticsearch/index/mapper/Uid.java index 414a00d8eed..70acbcd82c3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Uid.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Uid.java @@ -123,13 +123,6 @@ public final class Uid { return ref; } - public static BytesRef createUidAsBytes(BytesRef type, BytesRef id, BytesRefBuilder spare) { - spare.copyBytes(type); - spare.append(DELIMITER_BYTES); - spare.append(id); - return spare.get(); - } - public static BytesRef[] createUidsForTypesAndId(Collection types, Object id) { return createUidsForTypesAndIds(types, Collections.singletonList(id)); } @@ -149,11 +142,7 @@ public final class Uid { } public static String createUid(String type, String id) { - return createUid(new StringBuilder(), type, id); - } - - public static String createUid(StringBuilder sb, String type, String id) { - return sb.append(type).append(DELIMITER).append(id).toString(); + return type + DELIMITER + id; } public static boolean hasDelimiter(BytesRef uid) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 9473297de6f..2bf4f75f417 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -31,7 +31,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -264,7 +263,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { } // we did not add it in the parsing phase, add it now fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(parentId))); - } else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) { + } else if (parentId != null && !parsedParentId.equals(Uid.createUid(parentType, parentId))) { throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]"); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index e150a8cd10f..b22369ef812 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -23,7 +23,6 @@ import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Term; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -177,7 +176,7 @@ public class UidFieldMapper extends MetadataFieldMapper { @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - Field uid = new Field(NAME, Uid.createUid(context.stringBuilder(), context.type(), context.id()), Defaults.FIELD_TYPE); + Field uid = new Field(NAME, Uid.createUid(context.type(), context.id()), Defaults.FIELD_TYPE); context.uid(uid); fields.add(uid); if (fieldType().hasDocValues()) { From d3c5f865be75577a96c4014c07d104ab0dcf60e3 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Fri, 6 May 2016 13:20:28 +0200 Subject: [PATCH 0103/1311] Exclude all but string fields from highlighting if wildcards are used in fieldname We should prevent highlighting if a field is anything but a text or keyword field. However, someone might implement a custom field type that has text and still want to highlight on that. We cannot know in advance if the highlighter will be able to highlight such a field and so we do the following: If the field is only highlighted because the field matches a wildcard we assume it was a mistake and do not process it. If the field was explicitly given we assume that whoever issued the query knew what they were doing and try to highlight anyway. closes #17537 --- .../search/highlight/HighlightPhase.java | 16 ++ .../externalvalues/ExternalMapperPlugin.java | 3 +- .../ExternalValuesMapperIntegrationIT.java | 50 +++++ .../externalvalues/FakeStringFieldMapper.java | 193 ++++++++++++++++++ .../search/highlight/HighlighterSearchIT.java | 30 ++- .../search/request/highlighting.asciidoc | 4 +- 6 files changed, 293 insertions(+), 3 deletions(-) create mode 100755 core/src/test/java/org/elasticsearch/index/mapper/externalvalues/FakeStringFieldMapper.java diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index b6f41135025..591aaab01d1 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -102,6 +104,20 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { continue; } + // We should prevent highlighting if a field is anything but a text or keyword field. + // However, someone might implement a custom field type that has text and still want to + // highlight on that. We cannot know in advance if the highlighter will be able to + // highlight such a field and so we do the following: + // If the field is only highlighted because the field matches a wildcard we assume + // it was a mistake and do not process it. + // If the field was explicitly given we assume that whoever issued the query knew + // what they were doing and try to highlight anyway. + if (fieldNameContainsWildcards) { + if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && fieldMapper.fieldType() + .typeName().equals(StringFieldMapper.CONTENT_TYPE) == false) { + continue; + } + } String highlighterType = field.fieldOptions().highlighterType(); if (highlighterType == null) { for(String highlighterCandidate : STANDARD_HIGHLIGHTERS_BY_PRECEDENCE) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java index 863e0c25fb0..87daaa58769 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java @@ -43,6 +43,7 @@ public class ExternalMapperPlugin extends Plugin { indicesModule.registerMapper(EXTERNAL, new ExternalMapper.TypeParser(EXTERNAL, "foo")); indicesModule.registerMapper(EXTERNAL_BIS, new ExternalMapper.TypeParser(EXTERNAL_BIS, "bar")); indicesModule.registerMapper(EXTERNAL_UPPER, new ExternalMapper.TypeParser(EXTERNAL_UPPER, "FOO BAR")); + indicesModule.registerMapper(FakeStringFieldMapper.CONTENT_TYPE, new FakeStringFieldMapper.TypeParser()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java index 171245841a0..350cbc43f9a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java @@ -25,10 +25,12 @@ import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { @@ -37,6 +39,54 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { return pluginList(ExternalMapperPlugin.class); } + public void testHighlightingOnCustomString() throws Exception { + prepareCreate("test-idx").addMapping("type", + XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field").field("type", FakeStringFieldMapper.CONTENT_TYPE).endObject() + .endObject() + .endObject().endObject()).execute().get(); + ensureYellow("test-idx"); + + index("test-idx", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "Every day is exactly the same") + .endObject()); + refresh(); + + SearchResponse response; + // test if the highlighting is excluded when we use wildcards + response = client().prepareSearch("test-idx") + .setQuery(QueryBuilders.matchQuery("field", "exactly the same")) + .highlighter(new HighlightBuilder().field("*")) + .execute().actionGet(); + assertSearchResponse(response); + assertThat(response.getHits().getTotalHits(), equalTo(1L)); + assertThat(response.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); + + // make sure it is not excluded when we explicitly provide the fieldname + response = client().prepareSearch("test-idx") + .setQuery(QueryBuilders.matchQuery("field", "exactly the same")) + .highlighter(new HighlightBuilder().field("field")) + .execute().actionGet(); + assertSearchResponse(response); + assertThat(response.getHits().getTotalHits(), equalTo(1L)); + assertThat(response.getHits().getAt(0).getHighlightFields().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getHighlightFields().get("field").fragments()[0].string(), equalTo("Every day is " + + "exactly the same")); + + // make sure it is not excluded when we explicitly provide the fieldname and a wildcard + response = client().prepareSearch("test-idx") + .setQuery(QueryBuilders.matchQuery("field", "exactly the same")) + .highlighter(new HighlightBuilder().field("*").field("field")) + .execute().actionGet(); + assertSearchResponse(response); + assertThat(response.getHits().getTotalHits(), equalTo(1L)); + assertThat(response.getHits().getAt(0).getHighlightFields().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getHighlightFields().get("field").fragments()[0].string(), equalTo("Every day is " + + "exactly the same")); + } + public void testExternalValues() throws Exception { prepareCreate("test-idx").addMapping("type", XContentFactory.jsonBuilder().startObject().startObject("type") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/FakeStringFieldMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/FakeStringFieldMapper.java new file mode 100755 index 00000000000..e0c1243f82f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/FakeStringFieldMapper.java @@ -0,0 +1,193 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.externalvalues; + +import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; + +// Like a String mapper but with very few options. We just use it to test if highlighting on a custom string mapped field works as expected. +public class FakeStringFieldMapper extends FieldMapper { + + public static final String CONTENT_TYPE = "fake_string"; + + public static class Defaults { + + public static final MappedFieldType FIELD_TYPE = new FakeStringFieldType(); + + static { + FIELD_TYPE.freeze(); + } + } + + public static class Builder extends FieldMapper.Builder { + + public Builder(String name) { + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); + builder = this; + } + + @Override + public FakeStringFieldType fieldType() { + return (FakeStringFieldType) super.fieldType(); + } + + @Override + public FakeStringFieldMapper build(BuilderContext context) { + setupFieldType(context); + return new FakeStringFieldMapper( + name, fieldType(), defaultFieldType, + context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + } + } + + public static class TypeParser implements Mapper.TypeParser { + + public TypeParser() { + } + + @Override + public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { + FakeStringFieldMapper.Builder builder = new FakeStringFieldMapper.Builder(fieldName); + parseTextField(builder, fieldName, node, parserContext); + return builder; + } + } + + public static final class FakeStringFieldType extends MappedFieldType { + + + public FakeStringFieldType() { + } + + protected FakeStringFieldType(FakeStringFieldType ref) { + super(ref); + } + + public FakeStringFieldType clone() { + return new FakeStringFieldType(this); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public Query nullValueQuery() { + if (nullValue() == null) { + return null; + } + return termQuery(nullValue(), null); + } + + @Override + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, + @Nullable QueryShardContext context) { + RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } + } + + protected FakeStringFieldMapper(String simpleName, FakeStringFieldType fieldType, MappedFieldType defaultFieldType, + Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + } + + @Override + protected StringFieldMapper clone() { + return (StringFieldMapper) super.clone(); + } + + @Override + protected boolean customBoost() { + return true; + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + StringFieldMapper.ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().boost()); + if (valueAndBoost.value() == null) { + return; + } + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); + fields.add(field); + } + if (fieldType().hasDocValues()) { + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); + } + } + + public static StringFieldMapper.ValueAndBoost parseCreateFieldForString(ParseContext context, float defaultBoost) throws IOException { + if (context.externalValueSet()) { + return new StringFieldMapper.ValueAndBoost(context.externalValue().toString(), defaultBoost); + } + XContentParser parser = context.parser(); + return new StringFieldMapper.ValueAndBoost(parser.textOrNull(), defaultBoost); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + } + + @Override + public FakeStringFieldType fieldType() { + return (FakeStringFieldType) super.fieldType(); + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + doXContentAnalyzers(builder, includeDefaults); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index b56588e1759..95be99ac3b6 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -2542,4 +2541,33 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = search.setQuery(boostingQuery(phrase, terms).boost(1).negativeBoost(1/boost)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); } + + public void testGeoFieldHighlighting() throws IOException { + // check that we do not get an exception for geo_point fields in case someone tries to highlight + // it accidential with a wildcard + // see https://github.com/elastic/elasticsearch/issues/17537 + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("geo_point") + .field("type", "geo_point") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("geo_point", "60.12,100.34").endObject()) + .get(); + refresh(); + SearchResponse search = client().prepareSearch().setSource( + new SearchSourceBuilder().query(QueryBuilders.geoBoundingBoxQuery("geo_point").setCorners(61.10078883158897, -170.15625, + -64.92354174306496, 118.47656249999999)).highlighter(new HighlightBuilder().field("*"))).get(); + assertNoFailures(search); + assertThat(search.getHits().totalHits(), equalTo(1L)); + } } diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index bce9f13ab26..bfb5ffae7b3 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -35,7 +35,9 @@ be used for highlighting if it mapped to have `store` set to `true`. ================================== The field name supports wildcard notation. For example, using `comment_*` -will cause all fields that match the expression to be highlighted. +will cause all <> or <> fields that match the expression to be highlighted. +Note that all other fields will not be highlighted. If you use a custom mapper and want to +highlight on a field anyway, you have to provide the field name explicitly. [[plain-highlighter]] ==== Plain highlighter From 7b69e4ef43cbe0a2607df95dbd5924391c390856 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Fri, 6 May 2016 14:29:19 +0200 Subject: [PATCH 0104/1311] keyword fields should also be highlighted --- .../search/highlight/HighlightPhase.java | 5 ++-- .../search/highlight/HighlighterSearchIT.java | 30 +++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index 591aaab01d1..1c4a9edca2a 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; @@ -113,8 +114,8 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { // If the field was explicitly given we assume that whoever issued the query knew // what they were doing and try to highlight anyway. if (fieldNameContainsWildcards) { - if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && fieldMapper.fieldType() - .typeName().equals(StringFieldMapper.CONTENT_TYPE) == false) { + if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && fieldMapper.fieldType().typeName().equals + (KeywordFieldMapper.CONTENT_TYPE) == false) { continue; } } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 95be99ac3b6..8b80c7657cf 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -2570,4 +2570,34 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertNoFailures(search); assertThat(search.getHits().totalHits(), equalTo(1L)); } + + public void testKeywordFieldHighlighting() throws IOException { + // check that we do not get an exception for geo_point fields in case someone tries to highlight + // it accidential with a wildcard + // see https://github.com/elastic/elasticsearch/issues/17537 + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("keyword_field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("keyword_field", "some text").endObject()) + .get(); + refresh(); + SearchResponse search = client().prepareSearch().setSource( + new SearchSourceBuilder().query(QueryBuilders.matchQuery("keyword_field", "some text")).highlighter(new HighlightBuilder().field("*"))) + .get(); + assertNoFailures(search); + assertThat(search.getHits().totalHits(), equalTo(1L)); + assertThat(search.getHits().getAt(0).getHighlightFields().get("keyword_field").getFragments()[0].string(), equalTo("some text")); + } } From b12a42351ebe6f5b90d4a22110e4c6e2d8608378 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Fri, 6 May 2016 15:00:41 +0200 Subject: [PATCH 0105/1311] Pipeline Stats: Fix concurrent modification exception (#18177) Due to trying to modify a map while iterating it, a concurrent modification in the pipeline stats could be thrown. This uses an iterator to prevent this. Closes #18126 --- .../ingest/PipelineExecutionService.java | 7 ++++-- .../ingest/PipelineExecutionServiceTests.java | 22 +++++++++++++++++-- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 94c79db30a0..8674e805974 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -35,6 +35,7 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; @@ -124,9 +125,11 @@ public class PipelineExecutionService implements ClusterStateListener { void updatePipelineStats(IngestMetadata ingestMetadata) { boolean changed = false; Map newStatsPerPipeline = new HashMap<>(statsHolderPerPipeline); - for (String pipeline : newStatsPerPipeline.keySet()) { + Iterator iterator = newStatsPerPipeline.keySet().iterator(); + while (iterator.hasNext()) { + String pipeline = iterator.next(); if (ingestMetadata.getPipelines().containsKey(pipeline) == false) { - newStatsPerPipeline.remove(pipeline); + iterator.remove(); changed = true; } } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index b84ba928be4..3c0de328c8c 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -46,11 +46,13 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.eq; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.argThat; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -380,6 +382,22 @@ public class PipelineExecutionServiceTests extends ESTestCase { assertThat(ingestStats.getTotalStats().getIngestCount(), equalTo(2L)); } + // issue: https://github.com/elastic/elasticsearch/issues/18126 + public void testUpdatingStatsWhenRemovingPipelineWorks() throws Exception { + Map configurationMap = new HashMap<>(); + configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"))); + configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"))); + executionService.updatePipelineStats(new IngestMetadata(configurationMap)); + assertThat(executionService.stats().getStatsPerPipeline(), hasKey("_id1")); + assertThat(executionService.stats().getStatsPerPipeline(), hasKey("_id2")); + + configurationMap = new HashMap<>(); + configurationMap.put("_id3", new PipelineConfiguration("_id3", new BytesArray("{}"))); + executionService.updatePipelineStats(new IngestMetadata(configurationMap)); + assertThat(executionService.stats().getStatsPerPipeline(), not(hasKey("_id1"))); + assertThat(executionService.stats().getStatsPerPipeline(), not(hasKey("_id2"))); + } + private IngestDocument eqID(String index, String type, String id, Map source) { return argThat(new IngestDocumentMatcher(index, type, id, source)); } From b6698c3145b78c5f996ea52432abd12447866218 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 5 May 2016 18:07:54 -0400 Subject: [PATCH 0106/1311] Random script fields can't overlap This causes round tripping through xcontent to fail. Closes #18166 --- .../elasticsearch/index/query/InnerHitBuilderTests.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 676671f066e..d4ba6ca9062 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -86,7 +86,6 @@ public class InnerHitBuilderTests extends ESTestCase { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/18166") public void testFromAndToXContent() throws Exception { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { InnerHitBuilder innerHit = randomInnerHits(true, false); @@ -221,7 +220,12 @@ public class InnerHitBuilderTests extends ESTestCase { innerHits.setTrackScores(randomBoolean()); innerHits.setFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); innerHits.setFieldDataFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); - innerHits.setScriptFields(new HashSet<>(randomListStuff(16, InnerHitBuilderTests::randomScript))); + // Random script fields deduped on their field name. + Map scriptFields = new HashMap<>(); + for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) { + scriptFields.put(field.fieldName(), field); + } + innerHits.setScriptFields(new HashSet<>(scriptFields.values())); FetchSourceContext randomFetchSourceContext; if (randomBoolean()) { randomFetchSourceContext = new FetchSourceContext(randomBoolean()); From e839dad978ed66ecec2876432fb5688df83d8aa5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 5 May 2016 20:00:22 +0200 Subject: [PATCH 0107/1311] Remove unused chechstyle_supressions line length checks The files removed are below the 140 character line length limit. --- .../resources/checkstyle_suppressions.xml | 91 ------------------- 1 file changed, 91 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 0473f58cf73..bebe07612b0 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -21,7 +21,6 @@ - @@ -61,7 +60,6 @@ - @@ -178,21 +176,17 @@ - - - - @@ -201,10 +195,8 @@ - - @@ -216,13 +208,11 @@ - - @@ -251,7 +241,6 @@ - @@ -267,7 +256,6 @@ - @@ -375,8 +363,6 @@ - - @@ -392,21 +378,16 @@ - - - - - @@ -420,16 +401,13 @@ - - - @@ -481,9 +459,7 @@ - - @@ -500,7 +476,6 @@ - @@ -530,28 +505,21 @@ - - - - - - - @@ -569,13 +537,11 @@ - - @@ -596,20 +562,16 @@ - - - - @@ -626,7 +588,6 @@ - @@ -639,10 +600,8 @@ - - @@ -651,10 +610,7 @@ - - - @@ -665,15 +621,11 @@ - - - - @@ -691,37 +643,23 @@ - - - - - - - - - - - - - - @@ -736,10 +674,7 @@ - - - @@ -764,7 +699,6 @@ - @@ -787,7 +721,6 @@ - @@ -804,14 +737,12 @@ - - @@ -843,7 +774,6 @@ - @@ -864,7 +794,6 @@ - @@ -931,7 +860,6 @@ - @@ -947,14 +875,11 @@ - - - @@ -1006,11 +931,8 @@ - - - @@ -1074,7 +996,6 @@ - @@ -1105,7 +1026,6 @@ - @@ -1138,7 +1058,6 @@ - @@ -1167,7 +1086,6 @@ - @@ -1279,13 +1197,11 @@ - - @@ -1325,15 +1241,12 @@ - - - @@ -1365,7 +1278,6 @@ - @@ -1398,7 +1310,6 @@ - @@ -1422,7 +1333,6 @@ - @@ -1451,7 +1361,6 @@ - From e90d00ffce2eb21469f8a931728ba776e72cc804 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 6 May 2016 09:17:18 -0400 Subject: [PATCH 0108/1311] Remove handshake from transport client This commit removes handshaking from the transport client. This handshaking is not needed because of the existence of the liveness check. Relates #18174 --- .../TransportClientNodesService.java | 4 +-- .../zen/ping/unicast/UnicastZenPing.java | 2 +- .../transport/TransportService.java | 27 +++++++++++++++---- .../transport/FailAndRetryMockTransport.java | 6 +---- .../AbstractSimpleTransportTestCase.java | 5 ++-- .../NettyTransportServiceHandshakeTests.java | 8 +++--- 6 files changed, 32 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 68ed7c927ac..c3379c9ceaf 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -360,7 +360,7 @@ public class TransportClientNodesService extends AbstractComponent { try { // its a listed node, light connect to it... logger.trace("connecting to listed node (light) [{}]", listedNode); - transportService.connectToNodeLight(listedNode, pingTimeout, !ignoreClusterName); + transportService.connectToNodeLight(listedNode); } catch (Throwable e) { logger.debug("failed to connect to node [{}], removed from nodes list", e, listedNode); newFilteredNodes.add(listedNode); @@ -435,7 +435,7 @@ public class TransportClientNodesService extends AbstractComponent { } else { // its a listed node, light connect to it... logger.trace("connecting to listed node (light) [{}]", listedNode); - transportService.connectToNodeLight(listedNode, pingTimeout, !ignoreClusterName); + transportService.connectToNodeLight(listedNode); } } catch (Exception e) { logger.debug("failed to connect to node [{}], ignoring...", e, listedNode); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index cc37504360c..d200ca5b07b 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -402,7 +402,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen // connect to the node, see if we manage to do it, if not, bail if (!nodeFoundByAddress) { logger.trace("[{}] connecting (light) to {}", sendPingsHandler.id(), finalNodeToSend); - transportService.connectToNodeLight(finalNodeToSend, timeout.getMillis()); + transportService.connectToNodeLightAndHandshake(finalNodeToSend, timeout.getMillis()); } else { logger.trace("[{}] connecting to {}", sendPingsHandler.id(), finalNodeToSend); transportService.connectToNode(finalNodeToSend); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 89cc68debfd..820cdfc130a 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -277,6 +277,18 @@ public class TransportService extends AbstractLifecycleComponent imp //we make sure that nodes get added to the connected ones when calling addTransportAddress, by returning proper nodes info if (connectMode) { TransportResponseHandler transportResponseHandler = transportServiceAdapter.onResponseReceived(requestId); - if (action.equals(TransportLivenessAction.NAME)) { - transportResponseHandler.handleResponse(new LivenessResponse(clusterName, node)); - } else { - transportResponseHandler.handleResponse(new TransportService.HandshakeResponse(node, clusterName, Version.CURRENT)); - } + transportResponseHandler.handleResponse(new LivenessResponse(ClusterName.DEFAULT, node)); return; } diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 45966606d31..f285a1db52e 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.ClusterSettings; @@ -1179,7 +1178,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } try { - serviceB.connectToNodeLight(nodeA, 100); + serviceB.connectToNodeLightAndHandshake(nodeA, 100); fail("exception should be thrown"); } catch (ConnectTransportException e) { // all is well @@ -1239,7 +1238,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } try { - serviceB.connectToNodeLight(nodeA, 100); + serviceB.connectToNodeLightAndHandshake(nodeA, 100); fail("exception should be thrown"); } catch (ConnectTransportException e) { // all is well diff --git a/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java b/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java index b376a55af73..2b0ff6e0c1a 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java +++ b/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java @@ -109,7 +109,7 @@ public class NettyTransportServiceHandshakeTests extends ESTestCase { test); DiscoveryNode connectedNode = - handleA.transportService.connectToNodeLight( + handleA.transportService.connectToNodeLightAndHandshake( new DiscoveryNode( "", handleB.discoveryNode.getAddress(), @@ -131,7 +131,7 @@ public class NettyTransportServiceHandshakeTests extends ESTestCase { NetworkHandle handleB = startServices("TS_B", settings, Version.CURRENT, new ClusterName("b")); try { - handleA.transportService.connectToNodeLight( + handleA.transportService.connectToNodeLightAndHandshake( new DiscoveryNode( "", handleB.discoveryNode.getAddress(), @@ -154,7 +154,7 @@ public class NettyTransportServiceHandshakeTests extends ESTestCase { startServices("TS_B", settings, VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()), test); try { - handleA.transportService.connectToNodeLight( + handleA.transportService.connectToNodeLightAndHandshake( new DiscoveryNode( "", handleB.discoveryNode.getAddress(), @@ -180,7 +180,7 @@ public class NettyTransportServiceHandshakeTests extends ESTestCase { new ClusterName("b") ); - DiscoveryNode connectedNode = handleA.transportService.connectToNodeLight( + DiscoveryNode connectedNode = handleA.transportService.connectToNodeLightAndHandshake( new DiscoveryNode( "", handleB.discoveryNode.getAddress(), From cb40b986d15317e7afdc2872eccf25b6d0f05fef Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 6 May 2016 09:09:06 -0400 Subject: [PATCH 0109/1311] Allow leading `/` in AUTOSENSE path Relates to #18160 --- .../elasticsearch/gradle/RestTestsFromSnippetsTask.groovy | 6 ++---- docs/reference/mapping/params/analyzer.asciidoc | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy index c4201a01f26..1ce8adf1ba4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/RestTestsFromSnippetsTask.groovy @@ -176,10 +176,8 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { String body = matcher.group("body") String catchPart = last ? snippet.catchPart : null if (pathAndQuery.startsWith('/')) { - // Why not do some light linting while we're here? - throw new InvalidUserDataException( - "Path shouldn't start with a '/': $snippet\n" - + snippet.contents) + // Leading '/'s break the generated paths + pathAndQuery = pathAndQuery.substring(1) } emitDo(method, pathAndQuery, body, catchPart) } diff --git a/docs/reference/mapping/params/analyzer.asciidoc b/docs/reference/mapping/params/analyzer.asciidoc index 89d83f2b369..b2138794546 100644 --- a/docs/reference/mapping/params/analyzer.asciidoc +++ b/docs/reference/mapping/params/analyzer.asciidoc @@ -41,7 +41,7 @@ in the field mapping, as follows: [source,js] -------------------------------------------------- -PUT my_index +PUT /my_index { "mappings": { "my_type": { From d52537dc7bcb6331dd18b920f04d661cf688a66f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 6 May 2016 09:54:26 -0400 Subject: [PATCH 0110/1311] Add semicolon query string parameter delimiter This commit adds support for the semicolon character as a valid query string parameter delimiter. Relates #18186 --- .../elasticsearch/rest/support/RestUtils.java | 2 +- .../rest/util/RestUtilsTests.java | 28 +++++++++++++------ 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java b/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java index 7a976fe1f1d..ef7a7ac0edd 100644 --- a/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java +++ b/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java @@ -73,7 +73,7 @@ public class RestUtils { name = decodeComponent(s.substring(pos, i)); } pos = i + 1; - } else if (c == '&') { + } else if (c == '&' || c == ';') { if (name == null && pos != i) { // We haven't seen an `=' so far but moved forward. // Must be a param of the form '&a&' so add it with diff --git a/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java b/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java index 065b99ea5ae..a2f9e42fc3c 100644 --- a/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/rest/util/RestUtilsTests.java @@ -36,6 +36,10 @@ import static org.hamcrest.Matchers.nullValue; */ public class RestUtilsTests extends ESTestCase { + static char randomDelimiter() { + return randomBoolean() ? '&' : ';'; + } + public void testDecodeQueryString() { Map params = new HashMap<>(); @@ -45,7 +49,7 @@ public class RestUtilsTests extends ESTestCase { assertThat(params.get("test"), equalTo("value")); params.clear(); - uri = "something?test=value&test1=value1"; + uri = String.format(Locale.ROOT, "something?test=value%ctest1=value1", randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); assertThat(params.size(), equalTo(2)); assertThat(params.get("test"), equalTo("value")); @@ -70,12 +74,12 @@ public class RestUtilsTests extends ESTestCase { assertThat(params.size(), equalTo(0)); params.clear(); - uri = "something?&"; + uri = String.format(Locale.ROOT, "something?%c", randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); assertThat(params.size(), equalTo(0)); params.clear(); - uri = "something?p=v&&p1=v1"; + uri = String.format(Locale.ROOT, "something?p=v%c%cp1=v1", randomDelimiter(), randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); assertThat(params.size(), equalTo(2)); assertThat(params.get("p"), equalTo("v")); @@ -87,7 +91,7 @@ public class RestUtilsTests extends ESTestCase { assertThat(params.size(), equalTo(0)); params.clear(); - uri = "something?&="; + uri = String.format(Locale.ROOT, "something?%c=", randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); assertThat(params.size(), equalTo(0)); @@ -98,14 +102,14 @@ public class RestUtilsTests extends ESTestCase { assertThat(params.get("a"), equalTo("")); params.clear(); - uri = "something?p=v&a"; + uri = String.format(Locale.ROOT, "something?p=v%ca", randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); assertThat(params.size(), equalTo(2)); assertThat(params.get("a"), equalTo("")); assertThat(params.get("p"), equalTo("v")); params.clear(); - uri = "something?p=v&a&p1=v1"; + uri = String.format(Locale.ROOT, "something?p=v%ca%cp1=v1", randomDelimiter(), randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); assertThat(params.size(), equalTo(3)); assertThat(params.get("a"), equalTo("")); @@ -113,7 +117,7 @@ public class RestUtilsTests extends ESTestCase { assertThat(params.get("p1"), equalTo("v1")); params.clear(); - uri = "something?p=v&a&b&p1=v1"; + uri = String.format(Locale.ROOT, "something?p=v%ca%cb%cp1=v1", randomDelimiter(), randomDelimiter(), randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); assertThat(params.size(), equalTo(4)); assertThat(params.get("a"), equalTo("")); @@ -139,9 +143,15 @@ public class RestUtilsTests extends ESTestCase { Map params = new HashMap<>(); // This is a valid URL - String uri = "example.com/:@-._~!$&'()*+,=;:@-._~!$&'()*+,=:@-._~!$&'()*+,==?/?:@-._~!$'()*+,;=/?:@-._~!$'()*+,;==#/?:@-._~!$&'()*+,;="; + String uri = String.format( + Locale.ROOT, + "example.com/:@-._~!$%c'()*+,=;:@-._~!$%c'()*+,=:@-._~!$%c'()*+,==?/?:@-._~!$'()*+,=/?:@-._~!$'()*+,==#/?:@-._~!$%c'()*+,;=", + randomDelimiter(), + randomDelimiter(), + randomDelimiter(), + randomDelimiter()); RestUtils.decodeQueryString(uri, uri.indexOf('?') + 1, params); - assertThat(params.get("/?:@-._~!$'()* ,;"), equalTo("/?:@-._~!$'()* ,;==")); + assertThat(params.get("/?:@-._~!$'()* ,"), equalTo("/?:@-._~!$'()* ,==")); assertThat(params.size(), equalTo(1)); } From 473be0137398f18d40660622fcfaf77756dd93eb Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Fri, 6 May 2016 16:05:19 +0200 Subject: [PATCH 0111/1311] Documentation: Switch to https for debian repository --- docs/reference/setup/install/deb.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/setup/install/deb.asciidoc b/docs/reference/setup/install/deb.asciidoc index b327dc4a85f..9c7a043bdd5 100644 --- a/docs/reference/setup/install/deb.asciidoc +++ b/docs/reference/setup/install/deb.asciidoc @@ -33,7 +33,7 @@ Save the repository definition to +/etc/apt/sources.list.d/elasticsearch-{major ["source","sh",subs="attributes,callouts"] -------------------------------------------------- -echo "deb http://packages.elastic.co/elasticsearch/{major-version}/debian stable main" | sudo tee -a /etc/apt/sources.list.d/elasticsearch-{major-version}.list +echo "deb https://packages.elastic.co/elasticsearch/{major-version}/debian stable main" | sudo tee -a /etc/apt/sources.list.d/elasticsearch-{major-version}.list -------------------------------------------------- [WARNING] @@ -63,7 +63,7 @@ If two entries exist for the same Elasticsearch repository, you will see an erro ["literal",subs="attributes,callouts"] -Duplicate sources.list entry http://packages.elastic.co/elasticsearch/{major-version}/debian/ ...` +Duplicate sources.list entry https://packages.elastic.co/elasticsearch/{major-version}/debian/ ...` Examine +/etc/apt/sources.list.d/elasticsearch-{major-version}.list+ for the duplicate entry or locate the duplicate entry amongst the files in `/etc/apt/sources.list.d/` and the `/etc/apt/sources.list` file. ================================================== @@ -188,4 +188,4 @@ locations for a Debian-based system: |======================================================================= -include::next-steps.asciidoc[] \ No newline at end of file +include::next-steps.asciidoc[] From f270ed26c36a381a32778444ec83e797bfda504f Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Fri, 6 May 2016 16:26:43 +0200 Subject: [PATCH 0112/1311] fix highlighing for old version indices with string fields --- .../search/highlight/HighlightPhase.java | 5 ++- .../search/highlight/HighlighterSearchIT.java | 44 +++++++++++++++++-- 2 files changed, 44 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index 1c4a9edca2a..92011fd77e7 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -114,8 +114,9 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { // If the field was explicitly given we assume that whoever issued the query knew // what they were doing and try to highlight anyway. if (fieldNameContainsWildcards) { - if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && fieldMapper.fieldType().typeName().equals - (KeywordFieldMapper.CONTENT_TYPE) == false) { + if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && + fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false && + fieldMapper.fieldType().typeName().equals(StringFieldMapper.CONTENT_TYPE) == false) { continue; } } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 8b80c7657cf..869182cb51a 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -19,9 +19,11 @@ package org.elasticsearch.search.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,15 +37,18 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.search.MatchQuery.Type; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder.Field; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import java.io.IOException; +import java.util.Collection; import java.util.HashMap; import java.util.Map; @@ -84,6 +89,12 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; public class HighlighterSearchIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testHighlightingWithWildcardName() throws IOException { // test the kibana case with * as fieldname that will try highlight all fields including meta fields XContentBuilder mappings = jsonBuilder(); @@ -2572,9 +2583,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { } public void testKeywordFieldHighlighting() throws IOException { - // check that we do not get an exception for geo_point fields in case someone tries to highlight - // it accidential with a wildcard - // see https://github.com/elastic/elasticsearch/issues/17537 + // check that keyword highlighting works XContentBuilder mappings = jsonBuilder(); mappings.startObject(); mappings.startObject("type") @@ -2600,4 +2609,33 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertThat(search.getHits().totalHits(), equalTo(1L)); assertThat(search.getHits().getAt(0).getHighlightFields().get("keyword_field").getFragments()[0].string(), equalTo("some text")); } + + public void testStringFieldHighlighting() throws IOException { + // check that string field highlighting on old indexes works + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("string_field") + .field("type", "string") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings) + .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_2))); + ensureYellow(); + + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("string_field", "some text").endObject()) + .get(); + refresh(); + SearchResponse search = client().prepareSearch().setSource( + new SearchSourceBuilder().query(QueryBuilders.matchQuery("string_field", "some text")).highlighter(new HighlightBuilder().field("*"))) + .get(); + assertNoFailures(search); + assertThat(search.getHits().totalHits(), equalTo(1L)); + assertThat(search.getHits().getAt(0).getHighlightFields().get("string_field").getFragments()[0].string(), equalTo("some text")); + } } From ddebbb9536fadf5f08b99073f2ce551f31f44140 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Fri, 6 May 2016 16:50:34 +0200 Subject: [PATCH 0113/1311] add string to documentation --- docs/reference/search/request/highlighting.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index bfb5ffae7b3..53d6e23fa80 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -35,7 +35,8 @@ be used for highlighting if it mapped to have `store` set to `true`. ================================== The field name supports wildcard notation. For example, using `comment_*` -will cause all <> or <> fields that match the expression to be highlighted. +will cause all <> and <> fields (and <> +from versions before 5.0) that match the expression to be highlighted. Note that all other fields will not be highlighted. If you use a custom mapper and want to highlight on a field anyway, you have to provide the field name explicitly. From 0ff5652fffb56853fb98a63dda8d63160695c34e Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 6 May 2016 16:59:53 +0200 Subject: [PATCH 0114/1311] Add node name to Cat Recovery closes #8041 --- .../rest/action/cat/RestRecoveryAction.java | 4 +++ docs/reference/cat/recovery.asciidoc | 12 +++---- .../test/cat.recovery/10_basic.yaml | 33 ++++++++++++++++++- 3 files changed, 42 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java index 7c555c9b357..4764462d958 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java @@ -88,7 +88,9 @@ public class RestRecoveryAction extends AbstractCatAction { .addCell("type", "alias:ty;desc:recovery type") .addCell("stage", "alias:st;desc:recovery stage") .addCell("source_host", "alias:shost;desc:source host") + .addCell("source_node", "alias:snode;desc:source node name") .addCell("target_host", "alias:thost;desc:target host") + .addCell("target_node", "alias:tnode;desc:target node name") .addCell("repository", "alias:rep;desc:repository") .addCell("snapshot", "alias:snap;desc:snapshot") .addCell("files", "alias:f;desc:number of files to recover") @@ -149,7 +151,9 @@ public class RestRecoveryAction extends AbstractCatAction { t.addCell(state.getType().toString().toLowerCase(Locale.ROOT)); t.addCell(state.getStage().toString().toLowerCase(Locale.ROOT)); t.addCell(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getHostName()); + t.addCell(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getName()); t.addCell(state.getTargetNode().getHostName()); + t.addCell(state.getTargetNode().getName()); t.addCell(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getRepository()); t.addCell(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getSnapshot()); t.addCell(state.getIndex().totalRecoverFiles()); diff --git a/docs/reference/cat/recovery.asciidoc b/docs/reference/cat/recovery.asciidoc index b9a16b2913d..6fe748096d1 100644 --- a/docs/reference/cat/recovery.asciidoc +++ b/docs/reference/cat/recovery.asciidoc @@ -15,12 +15,12 @@ are no shards in transit from one node to another: [source,sh] ---------------------------------------------------------------------------- > curl -XGET 'localhost:9200/_cat/recovery?v' -index shard time type stage source_host target_host repository snapshot files files_percent bytes bytes_percent total_files total_bytes translog translog_percent total_translog -index 0 87ms store done 127.0.0.1 127.0.0.1 n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 -index 1 97ms store done 127.0.0.1 127.0.0.1 n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 -index 2 93ms store done 127.0.0.1 127.0.0.1 n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 -index 3 90ms store done 127.0.0.1 127.0.0.1 n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 -index 4 9ms store done 127.0.0.1 127.0.0.1 n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 +index shard time type stage source_host source_node target_host target_node repository snapshot files files_percent bytes bytes_percent total_files total_bytes translog translog_percent total_translog +index 0 87ms store done 127.0.0.1 Athena 127.0.0.1 Athena n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 +index 1 97ms store done 127.0.0.1 Athena 127.0.0.1 Athena n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 +index 2 93ms store done 127.0.0.1 Athena 127.0.0.1 Athena n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 +index 3 90ms store done 127.0.0.1 Athena 127.0.0.1 Athena n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 +index 4 9ms store done 127.0.0.1 Athena 127.0.0.1 Athena n/a n/a 0 0.0% 0 0.0% 0 0 0 100.0% 0 --------------------------------------------------------------------------- In the above case, the source and target nodes are the same because the recovery diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml index effc4c20313..fc596dd5792 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml @@ -19,7 +19,8 @@ cluster.health: wait_for_status: yellow - do: - cat.recovery: {} + cat.recovery: + h: i,s,t,ty,st,shost,thost,rep,snap,f,fr,fp,tf,b,br,bp,tb,to,tor,top - match: $body: | @@ -48,3 +49,33 @@ \n )+ $/ + + - do: + cat.recovery: + h: shard,source_node,bytes + + - match: + $body: | + /^ + ( + \d \s+ # shard + ((\S+\s?){1,10})\s+ # source_node + \d+ # bytes + \n + )+ + $/ + + - do: + cat.recovery: + h: shard,target_node,bytes + + - match: + $body: | + /^ + ( + \d \s+ # shard + ((\S+\s?){1,10})\s+ # target_node + \d+ # bytes + \n + )+ + $/ From 1d5b9f1ce65567b5d4ff195fe27090d0b72bfee1 Mon Sep 17 00:00:00 2001 From: Chris Earle Date: Fri, 6 May 2016 13:17:43 -0400 Subject: [PATCH 0115/1311] Test was not updated with #18187 --- .../action/cat/RestRecoveryActionTests.java | 61 ++++++++++--------- 1 file changed, 33 insertions(+), 28 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index 4e8ea3b3eb0..978d226da40 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -127,26 +127,29 @@ public class RestRecoveryActionTests extends ESTestCase { assertNotNull(table); List headers = table.getHeaders(); + assertThat(headers.get(0).value, equalTo("index")); assertThat(headers.get(1).value, equalTo("shard")); assertThat(headers.get(2).value, equalTo("time")); assertThat(headers.get(3).value, equalTo("type")); assertThat(headers.get(4).value, equalTo("stage")); assertThat(headers.get(5).value, equalTo("source_host")); - assertThat(headers.get(6).value, equalTo("target_host")); - assertThat(headers.get(7).value, equalTo("repository")); - assertThat(headers.get(8).value, equalTo("snapshot")); - assertThat(headers.get(9).value, equalTo("files")); - assertThat(headers.get(10).value, equalTo("files_recovered")); - assertThat(headers.get(11).value, equalTo("files_percent")); - assertThat(headers.get(12).value, equalTo("files_total")); - assertThat(headers.get(13).value, equalTo("bytes")); - assertThat(headers.get(14).value, equalTo("bytes_recovered")); - assertThat(headers.get(15).value, equalTo("bytes_percent")); - assertThat(headers.get(16).value, equalTo("bytes_total")); - assertThat(headers.get(17).value, equalTo("translog_ops")); - assertThat(headers.get(18).value, equalTo("translog_ops_recovered")); - assertThat(headers.get(19).value, equalTo("translog_ops_percent")); + assertThat(headers.get(6).value, equalTo("source_node")); + assertThat(headers.get(7).value, equalTo("target_host")); + assertThat(headers.get(8).value, equalTo("target_node")); + assertThat(headers.get(9).value, equalTo("repository")); + assertThat(headers.get(10).value, equalTo("snapshot")); + assertThat(headers.get(11).value, equalTo("files")); + assertThat(headers.get(12).value, equalTo("files_recovered")); + assertThat(headers.get(13).value, equalTo("files_percent")); + assertThat(headers.get(14).value, equalTo("files_total")); + assertThat(headers.get(15).value, equalTo("bytes")); + assertThat(headers.get(16).value, equalTo("bytes_recovered")); + assertThat(headers.get(17).value, equalTo("bytes_percent")); + assertThat(headers.get(18).value, equalTo("bytes_total")); + assertThat(headers.get(19).value, equalTo("translog_ops")); + assertThat(headers.get(20).value, equalTo("translog_ops_recovered")); + assertThat(headers.get(21).value, equalTo("translog_ops_percent")); assertThat(table.getRows().size(), equalTo(successfulShards)); for (int i = 0; i < successfulShards; i++) { @@ -158,24 +161,26 @@ public class RestRecoveryActionTests extends ESTestCase { assertThat(cells.get(3).value, equalTo(state.getType().name().toLowerCase(Locale.ROOT))); assertThat(cells.get(4).value, equalTo(state.getStage().name().toLowerCase(Locale.ROOT))); assertThat(cells.get(5).value, equalTo(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getHostName())); - assertThat(cells.get(6).value, equalTo(state.getTargetNode().getHostName())); + assertThat(cells.get(6).value, equalTo(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getName())); + assertThat(cells.get(7).value, equalTo(state.getTargetNode().getHostName())); + assertThat(cells.get(8).value, equalTo(state.getTargetNode().getName())); assertThat( - cells.get(7).value, + cells.get(9).value, equalTo(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getRepository())); assertThat( - cells.get(8).value, + cells.get(10).value, equalTo(state.getRestoreSource() == null ? "n/a" : state.getRestoreSource().snapshotId().getSnapshot())); - assertThat(cells.get(9).value, equalTo(state.getIndex().totalRecoverFiles())); - assertThat(cells.get(10).value, equalTo(state.getIndex().recoveredFileCount())); - assertThat(cells.get(11).value, equalTo(percent(state.getIndex().recoveredFilesPercent()))); - assertThat(cells.get(12).value, equalTo(state.getIndex().totalFileCount())); - assertThat(cells.get(13).value, equalTo(state.getIndex().totalRecoverBytes())); - assertThat(cells.get(14).value, equalTo(state.getIndex().recoveredBytes())); - assertThat(cells.get(15).value, equalTo(percent(state.getIndex().recoveredBytesPercent()))); - assertThat(cells.get(16).value, equalTo(state.getIndex().totalBytes())); - assertThat(cells.get(17).value, equalTo(state.getTranslog().totalOperations())); - assertThat(cells.get(18).value, equalTo(state.getTranslog().recoveredOperations())); - assertThat(cells.get(19).value, equalTo(percent(state.getTranslog().recoveredPercent()))); + assertThat(cells.get(11).value, equalTo(state.getIndex().totalRecoverFiles())); + assertThat(cells.get(12).value, equalTo(state.getIndex().recoveredFileCount())); + assertThat(cells.get(13).value, equalTo(percent(state.getIndex().recoveredFilesPercent()))); + assertThat(cells.get(14).value, equalTo(state.getIndex().totalFileCount())); + assertThat(cells.get(15).value, equalTo(state.getIndex().totalRecoverBytes())); + assertThat(cells.get(16).value, equalTo(state.getIndex().recoveredBytes())); + assertThat(cells.get(17).value, equalTo(percent(state.getIndex().recoveredBytesPercent()))); + assertThat(cells.get(18).value, equalTo(state.getIndex().totalBytes())); + assertThat(cells.get(19).value, equalTo(state.getTranslog().totalOperations())); + assertThat(cells.get(20).value, equalTo(state.getTranslog().recoveredOperations())); + assertThat(cells.get(21).value, equalTo(percent(state.getTranslog().recoveredPercent()))); } } From 0eaa831f481ff06a4bd1d201b10f9f9df1b98efd Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 6 May 2016 13:24:54 -0400 Subject: [PATCH 0116/1311] Preserve config files from RPM install This commit modifies the packaging for the RPM package so that edits to config files will not get lost during removal and upgrade. Relates #18188 --- distribution/build.gradle | 13 ++++-- .../src/main/packaging/scripts/postrm | 7 +++ distribution/src/main/packaging/scripts/prerm | 6 +++ .../packaging/scripts/40_rpm_package.bats | 45 ++++++++++++++++--- 4 files changed, 61 insertions(+), 10 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index 09050db2159..d1d79d2db74 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -322,12 +322,13 @@ configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) { configurationFile '/etc/elasticsearch/elasticsearch.yml' configurationFile '/etc/elasticsearch/jvm.options' configurationFile '/etc/elasticsearch/logging.yml' - into('/etc') { - from "${packagingFiles}/etc" + into('/etc/elasticsearch') { fileMode 0750 permissionGroup 'elasticsearch' includeEmptyDirs true createDirectoryEntry true + fileType CONFIG | NOREPLACE + from "${packagingFiles}/etc/elasticsearch" } into('/usr/lib/tmpfiles.d') { @@ -335,21 +336,25 @@ configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) { } configurationFile '/usr/lib/systemd/system/elasticsearch.service' into('/usr/lib/systemd/system') { + fileType CONFIG | NOREPLACE from "${packagingFiles}/systemd/elasticsearch.service" } into('/usr/lib/sysctl.d') { + fileType CONFIG | NOREPLACE from "${packagingFiles}/systemd/sysctl/elasticsearch.conf" } configurationFile '/etc/init.d/elasticsearch' into('/etc/init.d') { - from "${packagingFiles}/init.d/elasticsearch" fileMode 0755 + fileType CONFIG | NOREPLACE + from "${packagingFiles}/init.d/elasticsearch" } configurationFile project.expansions['path.env'] into(new File(project.expansions['path.env']).getParent()) { - from "${project.packagingFiles}/env/elasticsearch" fileMode 0644 dirMode 0755 + fileType CONFIG | NOREPLACE + from "${project.packagingFiles}/env/elasticsearch" } /** diff --git a/distribution/src/main/packaging/scripts/postrm b/distribution/src/main/packaging/scripts/postrm index d4104845249..2fa42678d96 100644 --- a/distribution/src/main/packaging/scripts/postrm +++ b/distribution/src/main/packaging/scripts/postrm @@ -55,6 +55,7 @@ LOG_DIR="/var/log/elasticsearch" PLUGINS_DIR="/usr/share/elasticsearch/plugins" PID_DIR="/var/run/elasticsearch" DATA_DIR="/var/lib/elasticsearch" +CONF_DIR="/etc/elasticsearch" # Source the default env file if [ "$SOURCE_ENV_FILE" = "true" ]; then @@ -102,6 +103,12 @@ if [ "$REMOVE_DIRS" = "true" ]; then if [ -d "$DATA_DIR" ]; then rmdir --ignore-fail-on-non-empty "$DATA_DIR" fi + + # delete the conf directory if and only if empty + if [ -d "$CONF_DIR" ]; then + rmdir --ignore-fail-on-non-empty "$CONF_DIR" + fi + fi if [ "$REMOVE_USER_AND_GROUP" = "true" ]; then diff --git a/distribution/src/main/packaging/scripts/prerm b/distribution/src/main/packaging/scripts/prerm index 07f39759aee..79523629f77 100644 --- a/distribution/src/main/packaging/scripts/prerm +++ b/distribution/src/main/packaging/scripts/prerm @@ -64,4 +64,10 @@ if [ "$STOP_REQUIRED" = "true" ]; then echo " OK" fi +SCRIPTS_DIR="/etc/elasticsearch/scripts" +# delete the scripts directory if and only if empty +if [ -d "$SCRIPTS_DIR" ]; then + rmdir --ignore-fail-on-non-empty "$SCRIPTS_DIR" +fi + ${scripts.footer} diff --git a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats index 064439cdd5a..0d3170cafeb 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/40_rpm_package.bats @@ -116,7 +116,7 @@ setup() { assert_file_not_exist "/etc/elasticsearch" assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" - assert_file_not_exist "/etc/elasticsearch/jvm.options" + assert_file_not_exist "/etc/elasticsearch/jvm.options" assert_file_not_exist "/etc/elasticsearch/logging.yml" assert_file_not_exist "/etc/init.d/elasticsearch" @@ -125,7 +125,6 @@ setup() { assert_file_not_exist "/etc/sysconfig/elasticsearch" } - @test "[RPM] reinstall package" { rpm -i elasticsearch-$(cat version).rpm } @@ -134,12 +133,46 @@ setup() { rpm -qe 'elasticsearch' } -@test "[RPM] verify package reinstallation" { - verify_package_installation +@test "[RPM] reremove package" { + echo "# ping" >> "/etc/elasticsearch/elasticsearch.yml" + echo "# ping" >> "/etc/elasticsearch/jvm.options" + echo "# ping" >> "/etc/elasticsearch/logging.yml" + echo "# ping" >> "/etc/elasticsearch/scripts/script" + rpm -e 'elasticsearch' } -@test "[RPM] reremove package" { - rpm -e 'elasticsearch' +@test "[RPM] verify preservation" { + # The removal must disable the service + # see prerm file + if is_systemd; then + run systemctl is-enabled elasticsearch.service + [ "$status" -eq 1 ] + fi + + # Those directories are deleted when removing the package + # see postrm file + assert_file_not_exist "/var/log/elasticsearch" + assert_file_not_exist "/usr/share/elasticsearch/plugins" + assert_file_not_exist "/var/run/elasticsearch" + + assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" + assert_file_exist "/etc/elasticsearch/elasticsearch.yml.rpmsave" + assert_file_not_exist "/etc/elasticsearch/jvm.options" + assert_file_exist "/etc/elasticsearch/jvm.options.rpmsave" + assert_file_not_exist "/etc/elasticsearch/logging.yml" + assert_file_exist "/etc/elasticsearch/logging.yml.rpmsave" + assert_file_exist "/etc/elasticsearch/scripts.rpmsave" + assert_file_exist "/etc/elasticsearch/scripts.rpmsave/script" + + assert_file_not_exist "/etc/init.d/elasticsearch" + assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service" + + assert_file_not_exist "/etc/sysconfig/elasticsearch" +} + +@test "[RPM] finalize package removal" { + # cleanup + rm -rf /etc/elasticsearch } @test "[RPM] package has been removed again" { From 5be79ed02ceb9fbe6c867a888ada2af8d53fd8d2 Mon Sep 17 00:00:00 2001 From: Chris Earle Date: Mon, 25 Apr 2016 15:55:25 -0400 Subject: [PATCH 0117/1311] Add Failure Details to every NodesResponse Most of the current implementations of BaseNodesResponse (plural Nodes) ignore FailedNodeExceptions. - This adds a helper function to do the grouping to TransportNodesAction - Requires a non-null array of FailedNodeExceptions within the BaseNodesResponse constructor - Reads/writes the array to output - Also adds StreamInput and StreamOutput methods for generically reading and writing arrays --- .../elasticsearch/ElasticsearchException.java | 4 +- .../hotthreads/NodesHotThreadsResponse.java | 22 +-- .../TransportNodesHotThreadsAction.java | 22 +-- .../cluster/node/info/NodesInfoResponse.java | 26 +-- .../node/info/TransportNodesInfoAction.java | 25 ++- .../node/stats/NodesStatsResponse.java | 26 +-- .../node/stats/TransportNodesStatsAction.java | 26 ++- .../status/TransportNodesSnapshotsStatus.java | 75 ++------ .../cluster/stats/ClusterStatsIndices.java | 79 +-------- .../cluster/stats/ClusterStatsNodes.java | 161 +----------------- .../cluster/stats/ClusterStatsResponse.java | 77 +++------ .../stats/TransportClusterStatsAction.java | 19 +-- .../TransportIndicesShardStoresAction.java | 9 +- .../ingest/PutPipelineTransportAction.java | 2 +- .../support/nodes/BaseNodesResponse.java | 85 ++++++--- .../support/nodes/TransportNodesAction.java | 68 +++++++- .../cluster/InternalClusterInfoService.java | 2 +- .../cluster/health/ClusterHealthStatus.java | 24 ++- .../common/io/stream/StreamInput.java | 24 +++ .../common/io/stream/StreamOutput.java | 14 +- .../gateway/AsyncShardFetch.java | 18 +- .../org/elasticsearch/gateway/Gateway.java | 6 +- .../gateway/GatewayAllocator.java | 2 +- .../TransportNodesListGatewayMetaState.java | 59 ++----- ...ransportNodesListGatewayStartedShards.java | 60 ++----- .../TransportNodesListShardStoreMetaData.java | 61 ++----- .../hotthreads/RestNodesHotThreadsAction.java | 2 +- .../node/info/RestNodesInfoAction.java | 18 +- .../node/stats/RestNodesStatsAction.java | 5 +- .../cluster/stats/RestClusterStatsAction.java | 5 +- .../rest/action/support/RestActions.java | 156 +++++++++++++++-- .../action/admin/HotThreadsIT.java | 2 +- .../ClusterAllocationExplainIT.java | 4 +- .../node/tasks/CancellableTasksTests.java | 4 +- .../node/tasks/TaskManagerTestCase.java | 40 ++--- .../cluster/node/tasks/TestTaskPlugin.java | 40 ++--- .../nodes/TransportNodesActionTests.java | 85 ++++++--- .../ClusterStateBackwardsCompatIT.java | 4 +- .../elasticsearch/cluster/DiskUsageTests.java | 10 +- .../ack/AckClusterUpdateSettingsIT.java | 4 +- .../allocation/decider/MockDiskUsagesIT.java | 2 +- .../common/io/stream/BytesStreamsTests.java | 54 +++++- .../discovery/zen/ZenDiscoveryIT.java | 4 +- .../gateway/AsyncShardFetchTests.java | 7 +- .../gateway/RecoveryFromGatewayIT.java | 10 +- .../index/store/CorruptedFileIT.java | 8 +- .../index/store/CorruptedTranslogIT.java | 2 +- .../index/suggest/stats/SuggestStatsIT.java | 3 +- .../breaker/CircuitBreakerServiceIT.java | 4 +- .../indices/recovery/IndexRecoveryIT.java | 8 +- .../indices/stats/IndexStatsIT.java | 24 +-- .../nodesinfo/SimpleNodesInfoIT.java | 14 +- .../threadpool/SimpleThreadPoolIT.java | 4 +- .../netty/NettyTransportPublishAddressIT.java | 2 +- .../messy/tests/SearchStatsTests.java | 4 +- .../AbstractAzureComputeServiceTestCase.java | 2 +- .../MockInternalClusterInfoService.java | 4 +- .../elasticsearch/test/ESIntegTestCase.java | 4 +- .../org/elasticsearch/test/ExternalNode.java | 6 +- .../test/ExternalTestCluster.java | 6 +- 60 files changed, 726 insertions(+), 821 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 3332bfed0c3..b242811b7be 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -47,7 +48,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VAL /** * A base class for all elasticsearch exceptions. */ -public class ElasticsearchException extends RuntimeException implements ToXContent { +public class ElasticsearchException extends RuntimeException implements ToXContent, Writeable { public static final String REST_EXCEPTION_SKIP_CAUSE = "rest.exception.cause.skip"; public static final String REST_EXCEPTION_SKIP_STACK_TRACE = "rest.exception.stacktrace.skip"; @@ -235,6 +236,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte } } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(this.getMessage()); out.writeThrowable(this.getCause()); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java index 22d4795fc95..3136f2b6826 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java @@ -19,12 +19,14 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.List; /** */ @@ -33,26 +35,18 @@ public class NodesHotThreadsResponse extends BaseNodesResponse { NodesHotThreadsResponse() { } - public NodesHotThreadsResponse(ClusterName clusterName, NodeHotThreads[] nodes) { - super(clusterName, nodes); + public NodesHotThreadsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - nodes = new NodeHotThreads[in.readVInt()]; - for (int i = 0; i < nodes.length; i++) { - nodes[i] = NodeHotThreads.readNodeHotThreads(in); - } + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeHotThreads::readNodeHotThreads); } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(nodes.length); - for (NodeHotThreads node : nodes) { - node.writeTo(out); - } + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index d53f651da45..7198851fd28 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; @@ -35,33 +36,28 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesHotThreadsAction extends TransportNodesAction { +public class TransportNodesHotThreadsAction extends TransportNodesAction { @Inject public TransportNodesHotThreadsAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, NodesHotThreadsAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, NodesHotThreadsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC); + indexNameExpressionResolver, NodesHotThreadsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeHotThreads.class); } @Override - protected NodesHotThreadsResponse newResponse(NodesHotThreadsRequest request, AtomicReferenceArray responses) { - final List nodes = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeHotThreads) { - nodes.add((NodeHotThreads) resp); - } - } - return new NodesHotThreadsResponse(clusterName, nodes.toArray(new NodeHotThreads[nodes.size()])); + protected NodesHotThreadsResponse newResponse(NodesHotThreadsRequest request, + List responses, List failures) { + return new NodesHotThreadsResponse(clusterName, responses, failures); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index fdb15db8ffc..d5a43eb030e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.info; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -30,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; +import java.util.List; import java.util.Map; /** @@ -40,34 +42,24 @@ public class NodesInfoResponse extends BaseNodesResponse implements To public NodesInfoResponse() { } - public NodesInfoResponse(ClusterName clusterName, NodeInfo[] nodes) { - super(clusterName, nodes); + public NodesInfoResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - nodes = new NodeInfo[in.readVInt()]; - for (int i = 0; i < nodes.length; i++) { - nodes[i] = NodeInfo.readNodeInfo(in); - } + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeInfo::readNodeInfo); } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(nodes.length); - for (NodeInfo node : nodes) { - node.writeTo(out); - } + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("cluster_name", getClusterName().value()); - builder.startObject("nodes"); - for (NodeInfo nodeInfo : this) { + for (NodeInfo nodeInfo : getNodes()) { builder.startObject(nodeInfo.getNode().getId()); builder.field("name", nodeInfo.getNode().getName()); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index f52729faa4f..f68e2d65903 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.info; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; @@ -34,36 +35,32 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesInfoAction extends TransportNodesAction { +public class TransportNodesInfoAction extends TransportNodesAction { private final NodeService nodeService; @Inject public TransportNodesInfoAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - NodeService nodeService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + NodeService nodeService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, NodesInfoAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, NodesInfoRequest::new, NodeInfoRequest::new, ThreadPool.Names.MANAGEMENT); + indexNameExpressionResolver, NodesInfoRequest::new, NodeInfoRequest::new, ThreadPool.Names.MANAGEMENT, NodeInfo.class); this.nodeService = nodeService; } @Override - protected NodesInfoResponse newResponse(NodesInfoRequest nodesInfoRequest, AtomicReferenceArray responses) { - final List nodesInfos = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeInfo) { - nodesInfos.add((NodeInfo) resp); - } - } - return new NodesInfoResponse(clusterName, nodesInfos.toArray(new NodeInfo[nodesInfos.size()])); + protected NodesInfoResponse newResponse(NodesInfoRequest nodesInfoRequest, + List responses, List failures) { + return new NodesInfoResponse(clusterName, responses, failures); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java index af28c1fb5d5..1a9023ab93c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.stats; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; +import java.util.List; /** * @@ -37,34 +39,24 @@ public class NodesStatsResponse extends BaseNodesResponse implements NodesStatsResponse() { } - public NodesStatsResponse(ClusterName clusterName, NodeStats[] nodes) { - super(clusterName, nodes); + public NodesStatsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - nodes = new NodeStats[in.readVInt()]; - for (int i = 0; i < nodes.length; i++) { - nodes[i] = NodeStats.readNodeStats(in); - } + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeStats::readNodeStats); } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(nodes.length); - for (NodeStats node : nodes) { - node.writeTo(out); - } + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("cluster_name", getClusterName().value()); - builder.startObject("nodes"); - for (NodeStats nodeStats : this) { + for (NodeStats nodeStats : getNodes()) { builder.startObject(nodeStats.getNode().getId()); builder.field("timestamp", nodeStats.getTimestamp()); nodeStats.toXContent(builder, params); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 8ba3d00558b..d61e3f1acce 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.stats; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; @@ -34,36 +35,31 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesStatsAction extends TransportNodesAction { +public class TransportNodesStatsAction extends TransportNodesAction { private final NodeService nodeService; @Inject public TransportNodesStatsAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - NodeService nodeService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, NodesStatsAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - NodesStatsRequest::new, NodeStatsRequest::new, ThreadPool.Names.MANAGEMENT); + NodeService nodeService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, NodesStatsAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, NodesStatsRequest::new, NodeStatsRequest::new, ThreadPool.Names.MANAGEMENT, NodeStats.class); this.nodeService = nodeService; } @Override - protected NodesStatsResponse newResponse(NodesStatsRequest nodesInfoRequest, AtomicReferenceArray responses) { - final List nodeStats = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeStats) { - nodeStats.add((NodeStats) resp); - } - } - return new NodesStatsResponse(clusterName, nodeStats.toArray(new NodeStats[nodeStats.size()])); + protected NodesStatsResponse newResponse(NodesStatsRequest request, List responses, List failures) { + return new NodesStatsResponse(clusterName, responses, failures); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index fb6310a45bf..bc139389460 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -43,18 +43,20 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicReferenceArray; import static java.util.Collections.unmodifiableMap; /** * Transport client that collects snapshot shard statuses from data nodes */ -public class TransportNodesSnapshotsStatus extends TransportNodesAction { +public class TransportNodesSnapshotsStatus extends TransportNodesAction { public static final String ACTION_NAME = SnapshotsStatusAction.NAME + "[nodes]"; @@ -66,7 +68,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction nodesList = new ArrayList<>(); - final List failures = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeSnapshotStatus) { // will also filter out null response for unallocated ones - nodesList.add((NodeSnapshotStatus) resp); - } else if (resp instanceof FailedNodeException) { - failures.add((FailedNodeException) resp); - } else { - logger.warn("unknown response type [{}], expected NodeSnapshotStatus or FailedNodeException", resp); - } - } - return new NodesSnapshotStatus(clusterName, nodesList.toArray(new NodeSnapshotStatus[nodesList.size()]), - failures.toArray(new FailedNodeException[failures.size()])); + protected NodesSnapshotStatus newResponse(Request request, List responses, List failures) { + return new NodesSnapshotStatus(clusterName, responses, failures); } @Override @@ -169,75 +158,47 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction { - private FailedNodeException[] failures; - NodesSnapshotStatus() { } - public NodesSnapshotStatus(ClusterName clusterName, NodeSnapshotStatus[] nodes, FailedNodeException[] failures) { - super(clusterName, nodes); - this.failures = failures; + public NodesSnapshotStatus(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public FailedNodeException[] failures() { - return failures; + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readStreamableList(NodeSnapshotStatus::new); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - nodes = new NodeSnapshotStatus[in.readVInt()]; - for (int i = 0; i < nodes.length; i++) { - nodes[i] = new NodeSnapshotStatus(); - nodes[i].readFrom(in); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(nodes.length); - for (NodeSnapshotStatus response : nodes) { - response.writeTo(out); - } + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } } public static class NodeRequest extends BaseNodeRequest { - private SnapshotId[] snapshotIds; + private List snapshotIds; public NodeRequest() { } NodeRequest(String nodeId, TransportNodesSnapshotsStatus.Request request) { super(nodeId); - snapshotIds = request.snapshotIds; + snapshotIds = Arrays.asList(request.snapshotIds); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - int n = in.readVInt(); - snapshotIds = new SnapshotId[n]; - for (int i = 0; i < n; i++) { - snapshotIds[i] = SnapshotId.readSnapshotId(in); - } + snapshotIds = in.readList(SnapshotId::readSnapshotId); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (snapshotIds != null) { - out.writeVInt(snapshotIds.length); - for (int i = 0; i < snapshotIds.length; i++) { - snapshotIds[i].writeTo(out); - } - } else { - out.writeVInt(0); - } + out.writeStreamableList(snapshotIds); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java index b23b6467288..8c0c427beea 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java @@ -22,9 +22,6 @@ package org.elasticsearch.action.admin.cluster.stats; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.cache.query.QueryCacheStats; @@ -36,8 +33,9 @@ import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.search.suggest.completion.CompletionStats; import java.io.IOException; +import java.util.List; -public class ClusterStatsIndices implements ToXContent, Streamable { +public class ClusterStatsIndices implements ToXContent { private int indexCount; private ShardStats shards; @@ -49,10 +47,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { private SegmentsStats segments; private PercolatorQueryCacheStats percolatorCache; - private ClusterStatsIndices() { - } - - public ClusterStatsIndices(ClusterStatsNodeResponse[] nodeResponses) { + public ClusterStatsIndices(List nodeResponses) { ObjectObjectHashMap countsPerIndex = new ObjectObjectHashMap<>(); this.docs = new DocsStats(); @@ -131,38 +126,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable { return percolatorCache; } - @Override - public void readFrom(StreamInput in) throws IOException { - indexCount = in.readVInt(); - shards = ShardStats.readShardStats(in); - docs = DocsStats.readDocStats(in); - store = StoreStats.readStoreStats(in); - fieldData = FieldDataStats.readFieldDataStats(in); - queryCache = QueryCacheStats.readQueryCacheStats(in); - completion = CompletionStats.readCompletionStats(in); - segments = SegmentsStats.readSegmentsStats(in); - percolatorCache = PercolatorQueryCacheStats.readPercolateStats(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(indexCount); - shards.writeTo(out); - docs.writeTo(out); - store.writeTo(out); - fieldData.writeTo(out); - queryCache.writeTo(out); - completion.writeTo(out); - segments.writeTo(out); - percolatorCache.writeTo(out); - } - - public static ClusterStatsIndices readIndicesStats(StreamInput in) throws IOException { - ClusterStatsIndices indicesStats = new ClusterStatsIndices(); - indicesStats.readFrom(in); - return indicesStats; - } - static final class Fields { static final String COUNT = "count"; } @@ -181,7 +144,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable { return builder; } - public static class ShardStats implements ToXContent, Streamable { + public static class ShardStats implements ToXContent { int indices; int total; @@ -326,40 +289,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable { } } - public static ShardStats readShardStats(StreamInput in) throws IOException { - ShardStats c = new ShardStats(); - c.readFrom(in); - return c; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - indices = in.readVInt(); - total = in.readVInt(); - primaries = in.readVInt(); - minIndexShards = in.readVInt(); - maxIndexShards = in.readVInt(); - minIndexPrimaryShards = in.readVInt(); - maxIndexPrimaryShards = in.readVInt(); - minIndexReplication = in.readDouble(); - totalIndexReplication = in.readDouble(); - maxIndexReplication = in.readDouble(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(indices); - out.writeVInt(total); - out.writeVInt(primaries); - out.writeVInt(minIndexShards); - out.writeVInt(maxIndexShards); - out.writeVInt(minIndexPrimaryShards); - out.writeVInt(maxIndexPrimaryShards); - out.writeDouble(minIndexReplication); - out.writeDouble(totalIndexReplication); - out.writeDouble(maxIndexReplication); - } - static final class Fields { static final String SHARDS = "shards"; static final String TOTAL = "total"; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index 05b7753ef3a..017b4481240 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -26,9 +26,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; @@ -48,7 +45,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -public class ClusterStatsNodes implements ToXContent, Writeable { +public class ClusterStatsNodes implements ToXContent { private final Counts counts; private final Set versions; @@ -58,33 +55,12 @@ public class ClusterStatsNodes implements ToXContent, Writeable { private final FsInfo.Path fs; private final Set plugins; - ClusterStatsNodes(StreamInput in) throws IOException { - this.counts = new Counts(in); - - int size = in.readVInt(); - this.versions = new HashSet<>(size); - for (int i = 0; i < size; i++) { - this.versions.add(Version.readVersion(in)); - } - - this.os = new OsStats(in); - this.process = new ProcessStats(in); - this.jvm = new JvmStats(in); - this.fs = new FsInfo.Path(in); - - size = in.readVInt(); - this.plugins = new HashSet<>(size); - for (int i = 0; i < size; i++) { - this.plugins.add(PluginInfo.readFromStream(in)); - } - } - - ClusterStatsNodes(ClusterStatsNodeResponse[] nodeResponses) { + ClusterStatsNodes(List nodeResponses) { this.versions = new HashSet<>(); this.fs = new FsInfo.Path(); this.plugins = new HashSet<>(); - Set seenAddresses = new HashSet<>(nodeResponses.length); + Set seenAddresses = new HashSet<>(nodeResponses.size()); List nodeInfos = new ArrayList<>(); List nodeStats = new ArrayList<>(); for (ClusterStatsNodeResponse nodeResponse : nodeResponses) { @@ -140,21 +116,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable { return plugins; } - @Override - public void writeTo(StreamOutput out) throws IOException { - counts.writeTo(out); - out.writeVInt(versions.size()); - for (Version v : versions) Version.writeVersion(v, out); - os.writeTo(out); - process.writeTo(out); - jvm.writeTo(out); - fs.writeTo(out); - out.writeVInt(plugins.size()); - for (PluginInfo p : plugins) { - p.writeTo(out); - } - } - static final class Fields { static final String COUNT = "count"; static final String VERSIONS = "versions"; @@ -200,18 +161,12 @@ public class ClusterStatsNodes implements ToXContent, Writeable { return builder; } - public static class Counts implements Writeable, ToXContent { + public static class Counts implements ToXContent { static final String COORDINATING_ONLY = "coordinating_only"; private final int total; private final Map roles; - @SuppressWarnings("unchecked") - private Counts(StreamInput in) throws IOException { - this.total = in.readVInt(); - this.roles = (Map)in.readGenericValue(); - } - private Counts(List nodeInfos) { this.roles = new HashMap<>(); for (DiscoveryNode.Role role : DiscoveryNode.Role.values()) { @@ -243,12 +198,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable { return roles; } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(total); - out.writeGenericValue(roles); - } - static final class Fields { static final String TOTAL = "total"; } @@ -263,7 +212,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable { } } - public static class OsStats implements ToXContent, Writeable { + public static class OsStats implements ToXContent { final int availableProcessors; final int allocatedProcessors; final ObjectIntHashMap names; @@ -287,30 +236,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable { this.allocatedProcessors = allocatedProcessors; } - /** - * Read from a stream. - */ - private OsStats(StreamInput in) throws IOException { - this.availableProcessors = in.readVInt(); - this.allocatedProcessors = in.readVInt(); - int size = in.readVInt(); - this.names = new ObjectIntHashMap<>(); - for (int i = 0; i < size; i++) { - names.addTo(in.readString(), in.readVInt()); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(availableProcessors); - out.writeVInt(allocatedProcessors); - out.writeVInt(names.size()); - for (ObjectIntCursor name : names) { - out.writeString(name.key); - out.writeVInt(name.value); - } - } - public int getAvailableProcessors() { return availableProcessors; } @@ -343,7 +268,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable { } } - public static class ProcessStats implements ToXContent, Writeable { + public static class ProcessStats implements ToXContent { final int count; final int cpuPercent; @@ -384,27 +309,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable { this.maxOpenFileDescriptors = maxOpenFileDescriptors; } - /** - * Read from a stream. - */ - private ProcessStats(StreamInput in) throws IOException { - this.count = in.readVInt(); - this.cpuPercent = in.readVInt(); - this.totalOpenFileDescriptors = in.readVLong(); - this.minOpenFileDescriptors = in.readLong(); - this.maxOpenFileDescriptors = in.readLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(count); - out.writeVInt(cpuPercent); - out.writeVLong(totalOpenFileDescriptors); - out.writeLong(minOpenFileDescriptors); - out.writeLong(maxOpenFileDescriptors); - } - - /** * Cpu usage in percentages - 100 is 1 core. */ @@ -456,7 +360,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable { } } - public static class JvmStats implements Writeable, ToXContent { + public static class JvmStats implements ToXContent { private final ObjectIntHashMap versions; private final long threads; @@ -497,34 +401,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable { this.heapMax = heapMax; } - /** - * Read from a stream. - */ - private JvmStats(StreamInput in) throws IOException { - int size = in.readVInt(); - this.versions = new ObjectIntHashMap<>(size); - for (int i = 0; i < size; i++) { - this.versions.addTo(new JvmVersion(in), in.readVInt()); - } - this.threads = in.readVLong(); - this.maxUptime = in.readVLong(); - this.heapUsed = in.readVLong(); - this.heapMax = in.readVLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(versions.size()); - for (ObjectIntCursor v : versions) { - v.key.writeTo(out); - out.writeVInt(v.value); - } - out.writeVLong(threads); - out.writeVLong(maxUptime); - out.writeVLong(heapUsed); - out.writeVLong(heapMax); - } - public ObjectIntHashMap getVersions() { return versions; } @@ -598,7 +474,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable { } } - public static class JvmVersion implements Writeable { + public static class JvmVersion { String version; String vmName; String vmVersion; @@ -611,27 +487,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable { vmVendor = jvmInfo.getVmVendor(); } - /** - * Read from a stream. - */ - JvmVersion(StreamInput in) throws IOException { - version = in.readString(); - vmName = in.readString(); - vmVersion = in.readString(); - vmVendor = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(version); - out.writeString(vmName); - out.writeString(vmVersion); - out.writeString(vmVendor); - } - - JvmVersion() { - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java index c272e6d6fbe..efc72d104f8 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -29,9 +30,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import java.io.IOException; -import java.util.Iterator; +import java.util.List; import java.util.Locale; -import java.util.Map; /** * @@ -48,8 +48,9 @@ public class ClusterStatsResponse extends BaseNodesResponse nodes, List failures) { + super(clusterName, nodes, failures); this.timestamp = timestamp; this.clusterUUID = clusterUUID; nodesStats = new ClusterStatsNodes(nodes); @@ -79,77 +80,53 @@ public class ClusterStatsResponse extends BaseNodesResponse getNodesMap() { - throw new UnsupportedOperationException(); - } - - @Override - public ClusterStatsNodeResponse getAt(int position) { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator iterator() { - throw new UnsupportedOperationException(); - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); timestamp = in.readVLong(); - status = null; - if (in.readBoolean()) { - // it may be that the master switched on us while doing the operation. In this case the status may be null. - status = ClusterHealthStatus.fromValue(in.readByte()); - } clusterUUID = in.readString(); - nodesStats = new ClusterStatsNodes(in); - indicesStats = ClusterStatsIndices.readIndicesStats(in); + // it may be that the master switched on us while doing the operation. In this case the status may be null. + status = in.readOptionalWriteable(ClusterHealthStatus::readFrom); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVLong(timestamp); - if (status == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeByte(status.value()); - } out.writeString(clusterUUID); - nodesStats.writeTo(out); - indicesStats.writeTo(out); + out.writeOptionalWriteable(status); } - static final class Fields { - static final String NODES = "nodes"; - static final String INDICES = "indices"; - static final String UUID = "uuid"; - static final String CLUSTER_NAME = "cluster_name"; - static final String STATUS = "status"; + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + List nodes = in.readList(ClusterStatsNodeResponse::readNodeResponse); + + // built from nodes rather than from the stream directly + nodesStats = new ClusterStatsNodes(nodes); + indicesStats = new ClusterStatsIndices(nodes); + + return nodes; + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + // nodeStats and indicesStats are rebuilt from nodes + out.writeStreamableList(nodes); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("timestamp", getTimestamp()); - builder.field(Fields.CLUSTER_NAME, getClusterName().value()); if (params.paramAsBoolean("output_uuid", false)) { - builder.field(Fields.UUID, clusterUUID); + builder.field("uuid", clusterUUID); } if (status != null) { - builder.field(Fields.STATUS, status.name().toLowerCase(Locale.ROOT)); + builder.field("status", status.name().toLowerCase(Locale.ROOT)); } - builder.startObject(Fields.INDICES); + builder.startObject("indices"); indicesStats.toXContent(builder, params); builder.endObject(); - builder.startObject(Fields.NODES); + builder.startObject("nodes"); nodesStats.toXContent(builder, params); builder.endObject(); return builder; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 4a0eb33c0b5..bae7b20694d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStats; @@ -46,7 +47,6 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * @@ -68,22 +68,17 @@ public class TransportClusterStatsAction extends TransportNodesAction nodeStats = new ArrayList<>(responses.length()); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof ClusterStatsNodeResponse) { - nodeStats.add((ClusterStatsNodeResponse) resp); - } - } - return new ClusterStatsResponse(System.currentTimeMillis(), clusterName, - clusterService.state().metaData().clusterUUID(), nodeStats.toArray(new ClusterStatsNodeResponse[nodeStats.size()])); + protected ClusterStatsResponse newResponse(ClusterStatsRequest request, + List responses, List failures) { + return new ClusterStatsResponse(System.currentTimeMillis(), clusterName, clusterService.state().metaData().clusterUUID(), + responses, failures); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 5dbac12f694..250e4123bba 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -53,6 +53,7 @@ import org.elasticsearch.transport.TransportService; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; @@ -152,7 +153,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc } @Override - protected synchronized void processAsyncFetch(ShardId shardId, NodeGatewayStartedShards[] responses, FailedNodeException[] failures) { + protected synchronized void processAsyncFetch(ShardId shardId, List responses, List failures) { fetchResponses.add(new Response(shardId, responses, failures)); if (expectedOps.countDown()) { finish(); @@ -220,10 +221,10 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc public class Response { private final ShardId shardId; - private final NodeGatewayStartedShards[] responses; - private final FailedNodeException[] failures; + private final List responses; + private final List failures; - public Response(ShardId shardId, NodeGatewayStartedShards[] responses, FailedNodeException[] failures) { + public Response(ShardId shardId, List responses, List failures) { this.shardId = shardId; this.responses = responses; this.failures = failures; diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index e1a34413e2c..bea5a2c8bc3 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -80,7 +80,7 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction ingestInfos = new HashMap<>(); - for (NodeInfo nodeInfo : nodeInfos) { + for (NodeInfo nodeInfo : nodeInfos.getNodes()) { ingestInfos.put(nodeInfo.getNode(), nodeInfo.getIngest()); } pipelineStore.put(clusterService, ingestInfos, request, listener); diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java b/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java index 01401bc7c6e..a49864154db 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java @@ -22,61 +22,77 @@ package org.elasticsearch.action.support.nodes; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.HashMap; -import java.util.Iterator; +import java.util.List; import java.util.Map; +import java.util.Objects; /** * */ -public abstract class BaseNodesResponse extends ActionResponse implements Iterable { +public abstract class BaseNodesResponse extends ActionResponse { private ClusterName clusterName; - protected TNodeResponse[] nodes; + private List failures; + private List nodes; private Map nodesMap; protected BaseNodesResponse() { } - protected BaseNodesResponse(ClusterName clusterName, TNodeResponse[] nodes) { - this.clusterName = clusterName; - this.nodes = nodes; + protected BaseNodesResponse(ClusterName clusterName, List nodes, List failures) { + this.clusterName = Objects.requireNonNull(clusterName); + this.failures = Objects.requireNonNull(failures); + this.nodes = Objects.requireNonNull(nodes); } /** - * The failed nodes, if set to be captured. + * Get the {@link ClusterName} associated with all of the nodes. + * + * @return Never {@code null}. */ - @Nullable - public FailedNodeException[] failures() { - return null; - } - public ClusterName getClusterName() { - return this.clusterName; + return clusterName; } - public String getClusterNameAsString() { - return this.clusterName.value(); + /** + * Get the failed node exceptions. + * + * @return Never {@code null}. Can be empty. + */ + public List failures() { + return failures; } - public TNodeResponse[] getNodes() { + /** + * Determine if there are any node failures in {@link #failures}. + * + * @return {@code true} if {@link #failures} contains at least 1 {@link FailedNodeException}. + */ + public boolean hasFailures() { + return failures.isEmpty() == false; + } + + /** + * Get the successful node responses. + * + * @return Never {@code null}. Can be empty. + * @see #hasFailures() + */ + public List getNodes() { return nodes; } - public TNodeResponse getAt(int position) { - return nodes[position]; - } - - @Override - public Iterator iterator() { - return getNodesMap().values().iterator(); - } - + /** + * Lazily build and get a map of Node ID to node response. + * + * @return Never {@code null}. Can be empty. + * @see #getNodes() + */ public Map getNodesMap() { if (nodesMap == null) { nodesMap = new HashMap<>(); @@ -91,11 +107,28 @@ public abstract class BaseNodesResponse public void readFrom(StreamInput in) throws IOException { super.readFrom(in); clusterName = ClusterName.readClusterName(in); + nodes = readNodesFrom(in); + failures = in.readList(FailedNodeException::new); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); clusterName.writeTo(out); + writeNodesTo(out, nodes); + out.writeList(failures); } + + /** + * Read the {@link #nodes} from the stream. + * + * @return Never {@code null}. + */ + protected abstract List readNodesFrom(StreamInput in) throws IOException; + + /** + * Write the {@link #nodes} to the stream. + */ + protected abstract void writeNodesTo(StreamOutput out, List nodes) throws IOException; + } diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index c996d530dce..2767bc80bf3 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -43,6 +43,9 @@ import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; @@ -50,22 +53,30 @@ import java.util.function.Supplier; /** * */ -public abstract class TransportNodesAction, NodesResponse extends BaseNodesResponse, NodeRequest extends BaseNodeRequest, NodeResponse extends BaseNodeResponse> extends HandledTransportAction { +public abstract class TransportNodesAction, + NodesResponse extends BaseNodesResponse, + NodeRequest extends BaseNodeRequest, + NodeResponse extends BaseNodeResponse> + extends HandledTransportAction { protected final ClusterName clusterName; protected final ClusterService clusterService; protected final TransportService transportService; + protected final Class nodeResponseClass; final String transportNodeAction; protected TransportNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, Supplier nodeRequest, - String nodeExecutor) { + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier request, Supplier nodeRequest, + String nodeExecutor, + Class nodeResponseClass) { super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); - this.clusterName = clusterName; - this.clusterService = clusterService; - this.transportService = transportService; + this.clusterName = Objects.requireNonNull(clusterName); + this.clusterService = Objects.requireNonNull(clusterService); + this.transportService = Objects.requireNonNull(transportService); + this.nodeResponseClass = Objects.requireNonNull(nodeResponseClass); this.transportNodeAction = actionName + "[n]"; @@ -87,7 +98,46 @@ public abstract class TransportNodesAction responses = new ArrayList<>(); + final List failures = new ArrayList<>(); + + for (int i = 0; i < nodesResponses.length(); ++i) { + Object response = nodesResponses.get(i); + + if (nodeResponseClass.isInstance(response)) { + responses.add(nodeResponseClass.cast(response)); + } else if (response instanceof FailedNodeException) { + failures.add((FailedNodeException)response); + } else { + logger.warn("ignoring unexpected response [{}] of type [{}], expected [{}] or [{}]", + response, response != null ? response.getClass().getName() : null, + nodeResponseClass.getSimpleName(), FailedNodeException.class.getSimpleName()); + } + } + + return newResponse(request, responses, failures); + } + + /** + * Create a new {@link NodesResponse} (multi-node response). + * + * @param request The associated request. + * @param responses All successful node-level responses. + * @param failures All node-level failures. + * @return Never {@code null}. + * @throws NullPointerException if any parameter is {@code null}. + */ + protected abstract NodesResponse newResponse(NodesRequest request, List responses, List failures); protected abstract NodeRequest newNodeRequest(String nodeId, NodesRequest request); @@ -165,7 +215,8 @@ public abstract class TransportNodesAction() { + transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), + new BaseTransportResponseHandler() { @Override public NodeResponse newInstance() { return newNodeResponse(); @@ -238,4 +289,5 @@ public abstract class TransportNodesAction nodeStatsArray, ImmutableOpenMap.Builder newLeastAvaiableUsages, ImmutableOpenMap.Builder newMostAvaiableUsages) { for (NodeStats nodeStats : nodeStatsArray) { diff --git a/core/src/main/java/org/elasticsearch/cluster/health/ClusterHealthStatus.java b/core/src/main/java/org/elasticsearch/cluster/health/ClusterHealthStatus.java index 6d3e136eb1a..a261d28f537 100644 --- a/core/src/main/java/org/elasticsearch/cluster/health/ClusterHealthStatus.java +++ b/core/src/main/java/org/elasticsearch/cluster/health/ClusterHealthStatus.java @@ -20,10 +20,16 @@ package org.elasticsearch.cluster.health; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + /** * */ -public enum ClusterHealthStatus { +public enum ClusterHealthStatus implements Writeable { GREEN((byte) 0), YELLOW((byte) 1), RED((byte) 2); @@ -38,7 +44,21 @@ public enum ClusterHealthStatus { return value; } - public static ClusterHealthStatus fromValue(byte value) { + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(value); + } + + /** + * Read from a stream. + * + * @throws IllegalArgumentException if the value is unrecognized + */ + public static ClusterHealthStatus readFrom(StreamInput in) throws IOException { + return fromValue(in.readByte()); + } + + public static ClusterHealthStatus fromValue(byte value) throws IOException { switch (value) { case 0: return GREEN; diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 8df43f687f6..ec56103e7a0 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -736,6 +736,29 @@ public abstract class StreamInput extends InputStream { return null; } + /** + * Read a {@link List} of {@link Streamable} objects, using the {@code constructor} to instantiate each instance. + *

    + * This is expected to take the form: + * + * List<MyStreamableClass> list = in.readStreamList(MyStreamableClass::new); + * + * + * @param constructor Streamable instance creator + * @return Never {@code null}. + * @throws IOException if any step fails + */ + public List readStreamableList(Supplier constructor) throws IOException { + int count = readVInt(); + List builder = new ArrayList<>(count); + for (int i=0; i list) throws IOException { + writeVInt(list.size()); + for (Streamable obj: list) { + obj.writeTo(this); + } + } + /** * Writes a list of {@link Writeable} objects */ - public void writeList(List list) throws IOException { + public void writeList(List list) throws IOException { writeVInt(list.size()); - for (T obj: list) { + for (Writeable obj: list) { obj.writeTo(this); } } diff --git a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index cd6268d04ec..b74507a4acc 100644 --- a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -35,9 +35,11 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.transport.ReceiveTimeoutTransportException; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Set; @@ -57,24 +59,24 @@ public abstract class AsyncShardFetch implements Rel /** * An action that lists the relevant shard data that needs to be fetched. */ - public interface List, NodeResponse extends BaseNodeResponse> { + public interface Lister, NodeResponse extends BaseNodeResponse> { void list(ShardId shardId, String[] nodesIds, ActionListener listener); } protected final ESLogger logger; protected final String type; private final ShardId shardId; - private final List, T> action; + private final Lister, T> action; private final Map> cache = new HashMap<>(); private final Set nodesToIgnore = new HashSet<>(); private boolean closed; @SuppressWarnings("unchecked") - protected AsyncShardFetch(ESLogger logger, String type, ShardId shardId, List, T> action) { + protected AsyncShardFetch(ESLogger logger, String type, ShardId shardId, Lister, T> action) { this.logger = logger; this.type = type; this.shardId = shardId; - this.action = (List, T>) action; + this.action = (Lister, T>) action; } @Override @@ -167,7 +169,7 @@ public abstract class AsyncShardFetch implements Rel * the shard (response + failures), issuing a reroute at the end of it to make sure there will be another round * of allocations taking this new data into account. */ - protected synchronized void processAsyncFetch(ShardId shardId, T[] responses, FailedNodeException[] failures) { + protected synchronized void processAsyncFetch(ShardId shardId, List responses, List failures) { if (closed) { // we are closed, no need to process this async fetch at all logger.trace("{} ignoring fetched [{}] results, already closed", shardId, type); @@ -276,9 +278,9 @@ public abstract class AsyncShardFetch implements Rel @Override public void onFailure(Throwable e) { - FailedNodeException[] failures = new FailedNodeException[nodesIds.length]; - for (int i = 0; i < failures.length; i++) { - failures[i] = new FailedNodeException(nodesIds[i], "total failure in fetching", e); + List failures = new ArrayList<>(nodesIds.length); + for (String nodeId : nodesIds) { + failures.add(new FailedNodeException(nodeId, "total failure in fetching", e)); } processAsyncFetch(shardId, null, failures); } diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java index 4da789b43fc..0e6b1959279 100644 --- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -82,7 +82,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener { int requiredAllocation = Math.max(1, minimumMasterNodesProvider.get()); - if (nodesState.failures().length > 0) { + if (nodesState.hasFailures()) { for (FailedNodeException failedNodeException : nodesState.failures()) { logger.warn("failed to fetch state from node", failedNodeException); } @@ -91,7 +91,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener { ObjectFloatHashMap indices = new ObjectFloatHashMap<>(); MetaData electedGlobalState = null; int found = 0; - for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) { + for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState.getNodes()) { if (nodeState.metaData() == null) { continue; } @@ -119,7 +119,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener { Index index = (Index) keys[i]; IndexMetaData electedIndexMetaData = null; int indexMetaDataCount = 0; - for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) { + for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState.getNodes()) { if (nodeState.metaData() == null) { continue; } diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index e76e8085e86..15cd0e2bf63 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -125,7 +125,7 @@ public class GatewayAllocator extends AbstractComponent { class InternalAsyncFetch extends AsyncShardFetch { - public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List, T> action) { + public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, Lister, T> action) { super(logger, type, shardId, action); } diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 0fd1fd35809..2d0b894939d 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -43,14 +43,15 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesListGatewayMetaState extends TransportNodesAction { +public class TransportNodesListGatewayMetaState extends TransportNodesAction { public static final String ACTION_NAME = "internal:gateway/local/meta_state"; @@ -61,7 +62,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction nodesList = new ArrayList<>(); - final List failures = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeGatewayMetaState) { // will also filter out null response for unallocated ones - nodesList.add((NodeGatewayMetaState) resp); - } else if (resp instanceof FailedNodeException) { - failures.add((FailedNodeException) resp); - } else { - logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp); - } - } - return new NodesGatewayMetaState(clusterName, nodesList.toArray(new NodeGatewayMetaState[nodesList.size()]), - failures.toArray(new FailedNodeException[failures.size()])); + protected NodesGatewayMetaState newResponse(Request request, List responses, List failures) { + return new NodesGatewayMetaState(clusterName, responses, failures); } @Override @@ -142,47 +130,30 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction { - private FailedNodeException[] failures; - NodesGatewayMetaState() { } - public NodesGatewayMetaState(ClusterName clusterName, NodeGatewayMetaState[] nodes, FailedNodeException[] failures) { - super(clusterName, nodes); - this.failures = failures; - } - - public FailedNodeException[] failures() { - return failures; + public NodesGatewayMetaState(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - nodes = new NodeGatewayMetaState[in.readVInt()]; - for (int i = 0; i < nodes.length; i++) { - nodes[i] = new NodeGatewayMetaState(); - nodes[i].readFrom(in); - } + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readStreamableList(NodeGatewayMetaState::new); } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(nodes.length); - for (NodeGatewayMetaState response : nodes) { - response.writeTo(out); - } + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } } - public static class NodeRequest extends BaseNodeRequest { public NodeRequest() { } - NodeRequest(String nodeId, TransportNodesListGatewayMetaState.Request request) { + NodeRequest(String nodeId) { super(nodeId); } diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index bdeb6d1660f..675c0088082 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -48,9 +48,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * This transport action is used to fetch the shard version from each node during primary allocation in {@link GatewayAllocator}. @@ -63,7 +61,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesListGatewayStartedShards.NodeRequest, TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> implements - AsyncShardFetch.List { public static final String ACTION_NAME = "internal:gateway/local/started_shards"; @@ -77,7 +75,8 @@ public class TransportNodesListGatewayStartedShards extends IndexNameExpressionResolver indexNameExpressionResolver, NodeEnvironment env) { super(settings, ACTION_NAME, clusterName, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, Request::new, NodeRequest::new, ThreadPool.Names.FETCH_SHARD_STARTED); + indexNameExpressionResolver, Request::new, NodeRequest::new, ThreadPool.Names.FETCH_SHARD_STARTED, + NodeGatewayStartedShards.class); this.nodeEnv = env; } @@ -110,23 +109,9 @@ public class TransportNodesListGatewayStartedShards extends } @Override - protected NodesGatewayStartedShards newResponse(Request request, AtomicReferenceArray responses) { - final List nodesList = new ArrayList<>(); - final List failures = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeGatewayStartedShards) { // will also filter out null response for unallocated ones - nodesList.add((NodeGatewayStartedShards) resp); - } else if (resp instanceof FailedNodeException) { - failures.add((FailedNodeException) resp); - } else { - logger.warn("unknown response type [{}], expected NodeLocalGatewayStartedShards or FailedNodeException", - resp); - } - } - return new NodesGatewayStartedShards(clusterName, - nodesList.toArray(new NodeGatewayStartedShards[nodesList.size()]), - failures.toArray(new FailedNodeException[failures.size()])); + protected NodesGatewayStartedShards newResponse(Request request, + List responses, List failures) { + return new NodesGatewayStartedShards(clusterName, responses, failures); } @Override @@ -217,36 +202,19 @@ public class TransportNodesListGatewayStartedShards extends public static class NodesGatewayStartedShards extends BaseNodesResponse { - private FailedNodeException[] failures; - - public NodesGatewayStartedShards(ClusterName clusterName, NodeGatewayStartedShards[] nodes, - FailedNodeException[] failures) { - super(clusterName, nodes); - this.failures = failures; + public NodesGatewayStartedShards(ClusterName clusterName, List nodes, + List failures) { + super(clusterName, nodes, failures); } @Override - public FailedNodeException[] failures() { - return failures; + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readStreamableList(NodeGatewayStartedShards::new); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - nodes = new NodeGatewayStartedShards[in.readVInt()]; - for (int i = 0; i < nodes.length; i++) { - nodes[i] = new NodeGatewayStartedShards(); - nodes[i].readFrom(in); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(nodes.length); - for (NodeGatewayStartedShards response : nodes) { - response.writeTo(out); - } + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } } @@ -258,7 +226,7 @@ public class TransportNodesListGatewayStartedShards extends public NodeRequest() { } - NodeRequest(String nodeId, Request request) { + public NodeRequest(String nodeId, Request request) { super(nodeId); this.shardId = request.shardId(); } diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index e4a1709db55..92c9bd3b575 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -54,18 +54,20 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesListShardStoreMetaData extends TransportNodesAction - implements AsyncShardFetch.List { +public class TransportNodesListShardStoreMetaData extends TransportNodesAction + implements AsyncShardFetch.Lister { public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store"; @@ -74,11 +76,12 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction nodeStoreFilesMetaDatas = new ArrayList<>(); - final List failures = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeStoreFilesMetaData) { // will also filter out null response for unallocated ones - nodeStoreFilesMetaDatas.add((NodeStoreFilesMetaData) resp); - } else if (resp instanceof FailedNodeException) { - failures.add((FailedNodeException) resp); - } else { - logger.warn("unknown response type [{}], expected NodeStoreFilesMetaData or FailedNodeException", resp); - } - } - return new NodesStoreFilesMetaData(clusterName, nodeStoreFilesMetaDatas.toArray(new NodeStoreFilesMetaData[nodeStoreFilesMetaDatas.size()]), - failures.toArray(new FailedNodeException[failures.size()])); + protected NodesStoreFilesMetaData newResponse(Request request, + List responses, List failures) { + return new NodesStoreFilesMetaData(clusterName, responses, failures); } @Override @@ -293,37 +284,21 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction { - private FailedNodeException[] failures; - NodesStoreFilesMetaData() { } - public NodesStoreFilesMetaData(ClusterName clusterName, NodeStoreFilesMetaData[] nodes, FailedNodeException[] failures) { - super(clusterName, nodes); - this.failures = failures; + public NodesStoreFilesMetaData(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public FailedNodeException[] failures() { - return failures; + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeStoreFilesMetaData::readListShardStoreNodeOperationResponse); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - nodes = new NodeStoreFilesMetaData[in.readVInt()]; - for (int i = 0; i < nodes.length; i++) { - nodes[i] = NodeStoreFilesMetaData.readListShardStoreNodeOperationResponse(in); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(nodes.length); - for (NodeStoreFilesMetaData response : nodes) { - response.writeTo(out); - } + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java index 53bec14f967..19bee19509a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java @@ -69,7 +69,7 @@ public class RestNodesHotThreadsAction extends BaseRestHandler { @Override public RestResponse buildResponse(NodesHotThreadsResponse response) throws Exception { StringBuilder sb = new StringBuilder(); - for (NodeHotThreads node : response) { + for (NodeHotThreads node : response.getNodes()) { sb.append("::: ").append(node.getNode().toString()).append("\n"); Strings.spaceify(3, node.getHotThreads(), sb); sb.append('\n'); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java index bd6637cb788..65b7715385f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java @@ -20,22 +20,17 @@ package org.elasticsearch.rest.action.admin.cluster.node.info; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.rest.action.support.RestActions.NodesResponseRestListener; import java.util.Set; @@ -106,15 +101,6 @@ public class RestNodesInfoAction extends BaseRestHandler { settingsFilter.addFilterSettingParams(request); - client.admin().cluster().nodesInfo(nodesInfoRequest, new RestBuilderListener(channel) { - - @Override - public RestResponse buildResponse(NodesInfoResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - response.toXContent(builder, request); - builder.endObject(); - return new BytesRestResponse(RestStatus.OK, builder); - } - }); + client.admin().cluster().nodesInfo(nodesInfoRequest, new NodesResponseRestListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java index 1e2aece1646..47cce5283a9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest.action.admin.cluster.node.stats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.client.Client; @@ -31,7 +30,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestToXContentListener; +import org.elasticsearch.rest.action.support.RestActions.NodesResponseRestListener; import java.util.Set; @@ -114,6 +113,6 @@ public class RestNodesStatsAction extends BaseRestHandler { nodesStatsRequest.indices().includeSegmentFileSizes(true); } - client.admin().cluster().nodesStats(nodesStatsRequest, new RestToXContentListener<>(channel)); + client.admin().cluster().nodesStats(nodesStatsRequest, new NodesResponseRestListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java index a02dcfccb98..29cc6377494 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java @@ -27,8 +27,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestToXContentListener; - +import org.elasticsearch.rest.action.support.RestActions.NodesResponseRestListener; /** * @@ -46,6 +45,6 @@ public class RestClusterStatsAction extends BaseRestHandler { public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { ClusterStatsRequest clusterStatsRequest = new ClusterStatsRequest().nodesIds(request.paramAsStringArray("nodeId", null)); clusterStatsRequest.timeout(request.param("timeout")); - client.admin().cluster().clusterStats(clusterStatsRequest, new RestToXContentListener<>(channel)); + client.admin().cluster().clusterStats(clusterStatsRequest, new NodesResponseRestListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 652cb8c61e9..51d5089ec6f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -21,13 +21,17 @@ package org.elasticsearch.rest.action.support; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -38,9 +42,14 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.List; /** * @@ -63,25 +72,21 @@ public class RestActions { return (version == Versions.MATCH_ANY) ? defaultVersion : version; } - static final class Fields { - static final String _SHARDS = "_shards"; - static final String TOTAL = "total"; - static final String SUCCESSFUL = "successful"; - static final String FAILED = "failed"; - static final String FAILURES = "failures"; + public static void buildBroadcastShardsHeader(XContentBuilder builder, Params params, BroadcastResponse response) throws IOException { + buildBroadcastShardsHeader(builder, params, + response.getTotalShards(), response.getSuccessfulShards(), response.getFailedShards(), + response.getShardFailures()); } - public static void buildBroadcastShardsHeader(XContentBuilder builder, ToXContent.Params params, BroadcastResponse response) throws IOException { - buildBroadcastShardsHeader(builder, params, response.getTotalShards(), response.getSuccessfulShards(), response.getFailedShards(), response.getShardFailures()); - } - - public static void buildBroadcastShardsHeader(XContentBuilder builder, ToXContent.Params params, int total, int successful, int failed, ShardOperationFailedException[] shardFailures) throws IOException { - builder.startObject(Fields._SHARDS); - builder.field(Fields.TOTAL, total); - builder.field(Fields.SUCCESSFUL, successful); - builder.field(Fields.FAILED, failed); + public static void buildBroadcastShardsHeader(XContentBuilder builder, Params params, + int total, int successful, int failed, + ShardOperationFailedException[] shardFailures) throws IOException { + builder.startObject("_shards"); + builder.field("total", total); + builder.field("successful", successful); + builder.field("failed", failed); if (shardFailures != null && shardFailures.length > 0) { - builder.startArray(Fields.FAILURES); + builder.startArray("failures"); final boolean group = params.paramAsBoolean("group_shard_failures", true); // we group by default for (ShardOperationFailedException shardFailure : group ? ExceptionsHelper.groupBy(shardFailures) : shardFailures) { builder.startObject(); @@ -92,6 +97,94 @@ public class RestActions { } builder.endObject(); } + /** + * Create the XContent header for any {@link BaseNodesResponse}. + * + * @param builder XContent builder. + * @param params XContent parameters. + * @param response The response containing individual, node-level responses. + * @see #buildNodesHeader(XContentBuilder, Params, int, int, int, List) + */ + public static void buildNodesHeader(final XContentBuilder builder, final Params params, + final BaseNodesResponse response) + throws IOException { + final int successful = response.getNodes().size(); + final int failed = response.failures().size(); + + buildNodesHeader(builder, params, successful + failed, successful, failed, response.failures()); + } + + /** + * Create the XContent header for any {@link BaseNodesResponse}. This looks like: + * + * "_nodes" : { + * "total" : 3, + * "successful" : 1, + * "failed" : 2, + * "failures" : [ { ... }, { ... } ] + * } + * + * Prefer the overload that properly invokes this method to calling this directly. + * + * @param builder XContent builder. + * @param params XContent parameters. + * @param total The total number of nodes touched. + * @param successful The successful number of responses received. + * @param failed The number of failures (effectively {@code total - successful}). + * @param failures The failure exceptions related to {@code failed}. + * @see #buildNodesHeader(XContentBuilder, Params, BaseNodesResponse) + */ + public static void buildNodesHeader(final XContentBuilder builder, final Params params, + final int total, final int successful, final int failed, + final List failures) throws IOException { + builder.startObject("_nodes"); + builder.field("total", total); + builder.field("successful", successful); + builder.field("failed", failed); + + if (failures.isEmpty() == false) { + builder.startArray("failures"); + for (FailedNodeException failure : failures) { + builder.startObject(); + failure.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + } + + builder.endObject(); + } + + /** + * Automatically transform the {@link ToXContent}-compatible, nodes-level {@code response} into a a {@link BytesRestResponse}. + *

    + * This looks like: + * + * { + * "_nodes" : { ... }, + * "cluster_name" : "...", + * ... + * } + * + * + * @param builder XContent builder. + * @param params XContent parameters. + * @param response The nodes-level (plural) response. + * @return Never {@code null}. + * @throws IOException if building the response causes an issue + */ + public static BytesRestResponse nodesResponse(final XContentBuilder builder, + final Params params, + final NodesResponse response) + throws IOException { + builder.startObject(); + RestActions.buildNodesHeader(builder, params, response); + builder.field("cluster_name", response.getClusterName().value()); + response.toXContent(builder, params); + builder.endObject(); + + return new BytesRestResponse(RestStatus.OK, builder); + } public static QueryBuilder urlParamsToQueryBuilder(RestRequest request) { String queryString = request.param("q"); @@ -130,7 +223,8 @@ public class RestActions { return content; } - public static QueryBuilder getQueryContent(BytesReference source, IndicesQueriesRegistry indicesQueriesRegistry, ParseFieldMatcher parseFieldMatcher) { + public static QueryBuilder getQueryContent(BytesReference source, IndicesQueriesRegistry indicesQueriesRegistry, + ParseFieldMatcher parseFieldMatcher) { try (XContentParser requestParser = XContentFactory.xContent(source).createParser(source)) { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, requestParser, parseFieldMatcher); return context.parseTopLevelQueryBuilder(); @@ -158,4 +252,32 @@ public class RestActions { public static boolean hasBodyContent(final RestRequest request) { return request.hasContent() || request.hasParam("source"); } + + /** + * {@code NodesResponseRestBuilderListener} automatically translates any {@link BaseNodesResponse} (multi-node) response that is + * {@link ToXContent}-compatible into a {@link RestResponse} with the necessary header info (e.g., "cluster_name"). + *

    + * This is meant to avoid a slew of anonymous classes doing (or worse): + * + * client.admin().cluster().request(nodesRequest, new RestBuilderListener<NodesResponse>(channel) { + * public RestResponse buildResponse(NodesResponse response, XContentBuilder builder) throws Exception { + * return RestActions.nodesResponse(builder, ToXContent.EMPTY_PARAMS, response); + * } + * }); + * + */ + public static class NodesResponseRestListener + extends RestBuilderListener { + + public NodesResponseRestListener(RestChannel channel) { + super(channel); + } + + @Override + public RestResponse buildResponse(NodesResponse response, XContentBuilder builder) throws Exception { + return RestActions.nodesResponse(builder, ToXContent.EMPTY_PARAMS, response); + } + + } + } diff --git a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java index 47823307ffd..480c103df81 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -86,7 +86,7 @@ public class HotThreadsIT extends ESIntegTestCase { assertThat(nodeHotThreads, notNullValue()); Map nodesMap = nodeHotThreads.getNodesMap(); assertThat(nodesMap.size(), equalTo(cluster().size())); - for (NodeHotThreads ht : nodeHotThreads) { + for (NodeHotThreads ht : nodeHotThreads.getNodes()) { assertNotNull(ht.getHotThreads()); //logger.info(ht.getHotThreads()); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java index 8168d1a8819..628e26bc4cf 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.HashMap; import java.util.List; @@ -37,7 +36,6 @@ import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** * Tests for the cluster allocation explanation @@ -54,7 +52,7 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { @Override public void run() { NodesStatsResponse resp = client().admin().cluster().prepareNodesStats().get(); - assertThat(resp.getNodes().length, equalTo(3)); + assertThat(resp.getNodes().size(), equalTo(3)); } }); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index d96531b2f6c..9af2bb07417 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -255,12 +255,12 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that the request was successful assertNull(throwableReference.get()); assertNotNull(responseReference.get()); - assertEquals(nodesCount, responseReference.get().getNodes().length); + assertEquals(nodesCount, responseReference.get().getNodes().size()); assertEquals(0, responseReference.get().failureCount()); } else { // We canceled the request, in this case it should have fail, but we should get partial response assertNull(throwableReference.get()); - assertEquals(nodesCount, responseReference.get().failureCount() + responseReference.get().getNodes().length); + assertEquals(nodesCount, responseReference.get().failureCount() + responseReference.get().getNodes().size()); // and we should have at least as many failures as the number of blocked operations // (we might have cancelled some non-blocked operations before they even started and that's ok) assertThat(responseReference.get().failureCount(), greaterThanOrEqualTo(blockedNodesCount)); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 9c0e2bfcafd..ffc19aa0dcd 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -50,11 +50,9 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; -import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; import static java.util.Collections.emptyMap; @@ -88,7 +86,6 @@ public abstract class TaskManagerTestCase extends ESTestCase { testNodes = new TestNode[nodesCount]; for (int i = 0; i < testNodes.length; i++) { testNodes[i] = new TestNode("node" + i, threadPool, settings); - ; } } @@ -113,27 +110,22 @@ public abstract class TaskManagerTestCase extends ESTestCase { static class NodesResponse extends BaseNodesResponse { - private int failureCount; - - protected NodesResponse(ClusterName clusterName, NodeResponse[] nodes, int failureCount) { - super(clusterName, nodes); - this.failureCount = failureCount; + protected NodesResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - failureCount = in.readVInt(); + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readStreamableList(NodeResponse::new); } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(failureCount); + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } public int failureCount() { - return failureCount; + return failures().size(); } } @@ -148,24 +140,12 @@ public abstract class TaskManagerTestCase extends ESTestCase { Supplier nodeRequest) { super(settings, actionName, clusterName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), - request, nodeRequest, ThreadPool.Names.GENERIC); + request, nodeRequest, ThreadPool.Names.GENERIC, NodeResponse.class); } @Override - protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses) { - final List nodesList = new ArrayList<>(); - int failureCount = 0; - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeResponse) { // will also filter out null response for unallocated ones - nodesList.add((NodeResponse) resp); - } else if (resp instanceof FailedNodeException) { - failureCount++; - } else { - logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp); - } - } - return new NodesResponse(clusterName, nodesList.toArray(new NodeResponse[nodesList.size()]), failureCount); + protected NodesResponse newResponse(NodesRequest request, List responses, List failures) { + return new NodesResponse(clusterName, responses, failures); } @Override diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 7f1d6c8b835..ec05ff9c984 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -56,7 +56,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; import static org.elasticsearch.test.ESTestCase.awaitBusy; @@ -111,31 +110,26 @@ public class TestTaskPlugin extends Plugin { public static class NodesResponse extends BaseNodesResponse { - private int failureCount; - NodesResponse() { } - public NodesResponse(ClusterName clusterName, NodeResponse[] nodes, int failureCount) { - super(clusterName, nodes); - this.failureCount = failureCount; + public NodesResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - failureCount = in.readVInt(); + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readStreamableList(NodeResponse::new); } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(failureCount); + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } public int failureCount() { - return failureCount; + return failures().size(); } } @@ -219,25 +213,13 @@ public class TestTaskPlugin extends Plugin { public TransportTestTaskAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService) { super(settings, TestTaskAction.NAME, clusterName, threadPool, clusterService, transportService, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), - NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC); + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeResponse.class); } @Override - protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses) { - final List nodesList = new ArrayList<>(); - int failureCount = 0; - for (int i = 0; i < responses.length(); i++) { - Object resp = responses.get(i); - if (resp instanceof NodeResponse) { // will also filter out null response for unallocated ones - nodesList.add((NodeResponse) resp); - } else if (resp instanceof FailedNodeException) { - failureCount++; - } else { - logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp); - } - } - return new NodesResponse(clusterName, nodesList.toArray(new NodeResponse[nodesList.size()]), failureCount); + protected NodesResponse newResponse(NodesRequest request, List responses, List failures) { + return new NodesResponse(clusterName, responses, failures); } @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 6a7f7ac3398..a3335d87fd6 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support.nodes; import org.elasticsearch.Version; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeActionTests; @@ -28,6 +29,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.test.ESTestCase; @@ -39,6 +42,7 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -53,6 +57,7 @@ import java.util.function.Supplier; import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.mockito.Mockito.mock; public class TransportNodesActionTests extends ESTestCase { @@ -92,6 +97,42 @@ public class TransportNodesActionTests extends ESTestCase { assertEquals(clusterService.state().nodes().resolveNodesIds(finalNodesIds).length, capturedRequests.size()); } + public void testNewResponseNullArray() { + expectThrows(NullPointerException.class, () -> action.newResponse(new TestNodesRequest(), null)); + } + + public void testNewResponse() { + TestNodesRequest request = new TestNodesRequest(); + List expectedNodeResponses = mockList(TestNodeResponse.class, randomIntBetween(0, 2)); + expectedNodeResponses.add(new TestNodeResponse()); + List nodeResponses = new ArrayList<>(expectedNodeResponses); + // This should be ignored: + nodeResponses.add(new OtherNodeResponse()); + List failures = mockList(FailedNodeException.class, randomIntBetween(0, 2)); + + List allResponses = new ArrayList<>(expectedNodeResponses); + allResponses.addAll(failures); + + Collections.shuffle(allResponses, random()); + + AtomicReferenceArray atomicArray = new AtomicReferenceArray<>(allResponses.toArray()); + + TestNodesResponse response = action.newResponse(request, atomicArray); + + assertSame(request, response.request); + // note: I shuffled the overall list, so it's not possible to guarantee that it's in the right order + assertTrue(expectedNodeResponses.containsAll(response.getNodes())); + assertTrue(failures.containsAll(response.failures())); + } + + private List mockList(Class clazz, int size) { + List failures = new ArrayList<>(size); + for (int i = 0; i < size; ++i) { + failures.add(mock(clazz)); + } + return failures; + } + private enum NodeSelector { LOCAL("_local"), ELECTED_MASTER("_master"), MASTER_ELIGIBLE("master:true"), DATA("data:true"), CUSTOM_ATTRIBUTE("attr:value"); @@ -165,26 +206,20 @@ public class TransportNodesActionTests extends ESTestCase { return new DiscoveryNode(node, node, DummyTransportAddress.INSTANCE, attributes, roles, Version.CURRENT); } - private static class TestTransportNodesAction extends TransportNodesAction { + private static class TestTransportNodesAction + extends TransportNodesAction { TestTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor) { super(settings, "indices:admin/test", CLUSTER_NAME, threadPool, clusterService, transportService, actionFilters, - null, request, nodeRequest, nodeExecutor); + null, request, nodeRequest, nodeExecutor, TestNodeResponse.class); } @Override - protected TestNodesResponse newResponse(TestNodesRequest request, AtomicReferenceArray nodesResponses) { - final List nodeResponses = new ArrayList<>(); - for (int i = 0; i < nodesResponses.length(); i++) { - Object resp = nodesResponses.get(i); - if (resp instanceof TestNodeResponse) { - nodeResponses.add((TestNodeResponse) resp); - } - } - return new TestNodesResponse(nodeResponses); + protected TestNodesResponse newResponse(TestNodesRequest request, + List responses, List failures) { + return new TestNodesResponse(request, responses, failures); } @Override @@ -216,16 +251,28 @@ public class TransportNodesActionTests extends ESTestCase { private static class TestNodesResponse extends BaseNodesResponse { - private final List nodeResponses; + private final TestNodesRequest request; - TestNodesResponse(List nodeResponses) { - this.nodeResponses = nodeResponses; + TestNodesResponse(TestNodesRequest request, List nodeResponses, List failures) { + super(CLUSTER_NAME, nodeResponses, failures); + this.request = request; + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readStreamableList(TestNodeResponse::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); } } - private static class TestNodeRequest extends BaseNodeRequest { - } + private static class TestNodeRequest extends BaseNodeRequest { } + + private static class TestNodeResponse extends BaseNodeResponse { } + + private static class OtherNodeResponse extends BaseNodeResponse { } - private static class TestNodeResponse extends BaseNodeResponse { - } } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java index 81d894e3349..a4427befea2 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java @@ -44,7 +44,7 @@ public class ClusterStateBackwardsCompatIT extends ESBackcompatTestCase { createIndex("test"); // connect to each node with a custom TransportClient, issue a ClusterStateRequest to test serialization - for (NodeInfo n : clusterNodes()) { + for (NodeInfo n : clusterNodes().getNodes()) { try (TransportClient tc = newTransportClient()) { tc.addTransportAddress(n.getNode().getAddress()); ClusterStateResponse response = tc.admin().cluster().prepareState().execute().actionGet(); @@ -68,7 +68,7 @@ public class ClusterStateBackwardsCompatIT extends ESBackcompatTestCase { try { enableIndexBlock("test-blocks", block.getKey()); - for (NodeInfo n : clusterNodes()) { + for (NodeInfo n : clusterNodes().getNodes()) { try (TransportClient tc = newTransportClient()) { tc.addTransportAddress(n.getNode().getAddress()); diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index 1826ced24ea..31c9c21b8e7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -41,6 +41,8 @@ import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.test.ESTestCase; import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -196,14 +198,14 @@ public class DiskUsageTests extends ESTestCase { new FsInfo.Path("/least", "/dev/sda", 100, 90, 70), new FsInfo.Path("/most", "/dev/sda", 100, 90, 80), }; - NodeStats[] nodeStats = new NodeStats[] { + List nodeStats = Arrays.asList( new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null, null) - }; + ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages); DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1"); DiskUsage mostNode_1 = newMostAvaiableUsages.get("node_1"); @@ -237,14 +239,14 @@ public class DiskUsageTests extends ESTestCase { new FsInfo.Path("/most", "/dev/sda", 100, 90, 70), new FsInfo.Path("/least", "/dev/sda", 10, -8, 0), }; - NodeStats[] nodeStats = new NodeStats[] { + List nodeStats = Arrays.asList( new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null, null), new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null, null) - }; + ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvailableUsages, newMostAvailableUsages); DiskUsage leastNode_1 = newLeastAvailableUsages.get("node_1"); DiskUsage mostNode_1 = newMostAvailableUsages.get("node_1"); diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java index 8e2dea63d6a..8d13f04240e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java @@ -81,7 +81,7 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase { NodesInfoResponse nodesInfo = client().admin().cluster().prepareNodesInfo().get(); String excludedNodeId = null; - for (NodeInfo nodeInfo : nodesInfo) { + for (NodeInfo nodeInfo : nodesInfo.getNodes()) { if (nodeInfo.getNode().isDataNode()) { excludedNodeId = nodeInfo.getNode().getId(); break; @@ -124,7 +124,7 @@ public class AckClusterUpdateSettingsIT extends ESIntegTestCase { NodesInfoResponse nodesInfo = client().admin().cluster().prepareNodesInfo().get(); String excludedNodeId = null; - for (NodeInfo nodeInfo : nodesInfo) { + for (NodeInfo nodeInfo : nodesInfo.getNodes()) { if (nodeInfo.getNode().isDataNode()) { excludedNodeId = nodeInfo.getNode().getId(); break; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 81a28adee2d..ef54c6fd637 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -59,7 +59,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase { @Override public void run() { NodesStatsResponse resp = client().admin().cluster().prepareNodesStats().get(); - assertThat(resp.getNodes().length, equalTo(3)); + assertThat(resp.getNodes().size(), equalTo(3)); } }); diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index 381120fdc0f..9fcbb708156 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -22,20 +22,20 @@ package org.elasticsearch.common.io.stream; import org.apache.lucene.util.Constants; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; @@ -431,6 +431,33 @@ public class BytesStreamsTests extends ESTestCase { endsWith(" claims to have a different name [intentionally-broken] than it was read from [test-named-writeable].")); } + public void testWriteStreamableList() throws IOException { + final int size = randomIntBetween(0, 5); + final List expected = new ArrayList<>(size); + + for (int i = 0; i < size; ++i) { + expected.add(new TestStreamable(randomBoolean())); + } + + final BytesStreamOutput out = new BytesStreamOutput(); + out.writeStreamableList(expected); + + final StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + + List loaded = in.readStreamableList(TestStreamable::new); + + assertThat(loaded, hasSize(expected.size())); + + for (int i = 0; i < expected.size(); ++i) { + assertEquals(expected.get(i).value, loaded.get(i).value); + } + + assertEquals(0, in.available()); + + in.close(); + out.close(); + } + private static abstract class BaseNamedWriteable implements NamedWriteable { } @@ -544,4 +571,25 @@ public class BytesStreamsTests extends ESTestCase { assertEquals(point, geoPoint); } } + + private static class TestStreamable implements Streamable { + + private boolean value; + + public TestStreamable() { } + + public TestStreamable(boolean value) { + this.value = value; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + value = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(value); + } + } } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 2de9d669511..d12a0ff55a3 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -349,9 +349,9 @@ public class ZenDiscoveryIT extends ESIntegTestCase { logger.info("--> request node discovery stats"); NodesStatsResponse statsResponse = client().admin().cluster().prepareNodesStats().clear().setDiscovery(true).get(); - assertThat(statsResponse.getNodes().length, equalTo(1)); + assertThat(statsResponse.getNodes().size(), equalTo(1)); - DiscoveryStats stats = statsResponse.getNodes()[0].getDiscoveryStats(); + DiscoveryStats stats = statsResponse.getNodes().get(0).getDiscoveryStats(); assertThat(stats.getQueueStats(), notNullValue()); assertThat(stats.getQueueStats().getTotal(), equalTo(0)); assertThat(stats.getQueueStats().getCommitted(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index 3ab15baf2e0..b710aa50ee0 100644 --- a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -284,10 +284,11 @@ public class AsyncShardFetchTests extends ESTestCase { assert entry != null; entry.executeLatch.await(); if (entry.failure != null) { - processAsyncFetch(shardId, null, new FailedNodeException[]{new FailedNodeException(nodeId, - "unexpected", entry.failure)}); + processAsyncFetch(shardId, null, Collections.singletonList(new FailedNodeException(nodeId, + "unexpected", + entry.failure))); } else { - processAsyncFetch(shardId, new Response[]{entry.response}, null); + processAsyncFetch(shardId, Collections.singletonList(entry.response), null); } } catch (Throwable e) { logger.error("unexpected failure", e); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 702e83e7d55..aaa29ad1970 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -63,9 +63,9 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -568,12 +568,12 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { .execute(new TransportNodesListGatewayStartedShards.Request(shardId, new String[]{node.getId()})) .get(); - assertThat(response.getNodes(), arrayWithSize(1)); - assertThat(response.getNodes()[0].allocationId(), notNullValue()); + assertThat(response.getNodes(), hasSize(1)); + assertThat(response.getNodes().get(0).allocationId(), notNullValue()); if (corrupt) { - assertThat(response.getNodes()[0].storeException(), notNullValue()); + assertThat(response.getNodes().get(0).storeException(), notNullValue()); } else { - assertThat(response.getNodes()[0].storeException(), nullValue()); + assertThat(response.getNodes().get(0).storeException(), nullValue()); } // start another node so cluster consistency checks won't time out due to the lack of state diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 367ecf76768..df8736b44ef 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -578,7 +578,7 @@ public class CorruptedFileIT extends ESIntegTestCase { } assertTrue(shardRouting.assignedToNode()); NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(shardRouting.currentNodeId()).setFs(true).get(); - NodeStats nodeStats = nodeStatses.getNodes()[0]; + NodeStats nodeStats = nodeStatses.getNodes().get(0); List files = new ArrayList<>(); filesToNodes.put(nodeStats.getNode().getName(), files); for (FsInfo.Path info : nodeStats.getFs()) { @@ -615,7 +615,7 @@ public class CorruptedFileIT extends ESIntegTestCase { String nodeId = shardRouting.currentNodeId(); NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(nodeId).setFs(true).get(); Set files = new TreeSet<>(); // treeset makes sure iteration order is deterministic - for (FsInfo.Path info : nodeStatses.getNodes()[0].getFs()) { + for (FsInfo.Path info : nodeStatses.getNodes().get(0).getFs()) { String path = info.getPath(); Path file = PathUtils.get(path).resolve("indices").resolve(test.getUUID()).resolve(Integer.toString(shardRouting.getId())).resolve("index"); if (Files.exists(file)) { // multi data path might only have one path in use @@ -678,9 +678,9 @@ public class CorruptedFileIT extends ESIntegTestCase { NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(routing.currentNodeId()).setFs(true).get(); ClusterState state = client().admin().cluster().prepareState().get().getState(); final Index test = state.metaData().index("test").getIndex(); - assertThat(routing.toString(), nodeStatses.getNodes().length, equalTo(1)); + assertThat(routing.toString(), nodeStatses.getNodes().size(), equalTo(1)); List files = new ArrayList<>(); - for (FsInfo.Path info : nodeStatses.getNodes()[0].getFs()) { + for (FsInfo.Path info : nodeStatses.getNodes().get(0).getFs()) { String path = info.getPath(); Path file = PathUtils.get(path).resolve("indices/" + test.getUUID() + "/" + Integer.toString(routing.getId()) + "/index"); if (Files.exists(file)) { // multi data path might only have one path in use diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 94ee490c729..018bce16566 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -121,7 +121,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase { String nodeId = shardRouting.currentNodeId(); NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(nodeId).setFs(true).get(); Set files = new TreeSet<>(); // treeset makes sure iteration order is deterministic - for (FsInfo.Path fsPath : nodeStatses.getNodes()[0].getFs()) { + for (FsInfo.Path fsPath : nodeStatses.getNodes().get(0).getFs()) { String path = fsPath.getPath(); final String relativeDataLocationPath = "indices/"+ test.getUUID() +"/" + Integer.toString(shardRouting.getId()) + "/translog"; Path file = PathUtils.get(path).resolve(relativeDataLocationPath); diff --git a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java index 0d850632393..cb0397990ab 100644 --- a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java +++ b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java @@ -119,10 +119,9 @@ public class SuggestStatsIT extends ESIntegTestCase { assertThat(suggest.getSuggestTimeInMillis(), lessThanOrEqualTo(totalShards * (endTime - startTime))); NodesStatsResponse nodeStats = client().admin().cluster().prepareNodesStats().execute().actionGet(); - NodeStats[] nodes = nodeStats.getNodes(); Set nodeIdsWithIndex = nodeIdsWithIndex("test1", "test2"); int num = 0; - for (NodeStats stat : nodes) { + for (NodeStats stat : nodeStats.getNodes()) { SearchStats.Stats suggestStats = stat.getIndices().getSearch().getTotal(); logger.info("evaluating {}", stat.getNode()); if (nodeIdsWithIndex.contains(stat.getNode().getId())) { diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 2f039f6c472..351742d9712 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -101,7 +101,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { /** Returns true if any of the nodes used a noop breaker */ private boolean noopBreakerUsed() { NodesStatsResponse stats = client().admin().cluster().prepareNodesStats().setBreaker(true).get(); - for (NodeStats nodeStats : stats) { + for (NodeStats nodeStats : stats.getNodes()) { if (nodeStats.getBreaker().getStats(CircuitBreaker.REQUEST).getLimit() == NoopCircuitBreaker.LIMIT) { return true; } @@ -230,7 +230,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // We need the request limit beforehand, just from a single node because the limit should always be the same long beforeReqLimit = client.admin().cluster().prepareNodesStats().setBreaker(true).get() - .getNodes()[0].getBreaker().getStats(CircuitBreaker.REQUEST).getLimit(); + .getNodes().get(0).getBreaker().getStats(CircuitBreaker.REQUEST).getLimit(); Settings resetSettings = Settings.builder() .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "10b") diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 17b1b8e7d70..77cffc20ae1 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -69,10 +69,10 @@ import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -315,7 +315,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { @Override public void run() { NodesStatsResponse statsResponse = client().admin().cluster().prepareNodesStats().clear().setIndices(new CommonStatsFlags(CommonStatsFlags.Flag.Recovery)).get(); - assertThat(statsResponse.getNodes(), arrayWithSize(2)); + assertThat(statsResponse.getNodes(), hasSize(2)); for (NodeStats nodeStats : statsResponse.getNodes()) { final RecoveryStats recoveryStats = nodeStats.getIndices().getRecoveryStats(); if (nodeStats.getNode().getName().equals(nodeA)) { @@ -344,7 +344,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { validateIndexRecoveryState(recoveryStates.get(0).getIndex()); statsResponse = client().admin().cluster().prepareNodesStats().clear().setIndices(new CommonStatsFlags(CommonStatsFlags.Flag.Recovery)).get(); - assertThat(statsResponse.getNodes(), arrayWithSize(2)); + assertThat(statsResponse.getNodes(), hasSize(2)); for (NodeStats nodeStats : statsResponse.getNodes()) { final RecoveryStats recoveryStats = nodeStats.getIndices().getRecoveryStats(); assertThat(recoveryStats.currentAsSource(), equalTo(0)); @@ -363,7 +363,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { ensureGreen(); statsResponse = client().admin().cluster().prepareNodesStats().clear().setIndices(new CommonStatsFlags(CommonStatsFlags.Flag.Recovery)).get(); - assertThat(statsResponse.getNodes(), arrayWithSize(2)); + assertThat(statsResponse.getNodes(), hasSize(2)); for (NodeStats nodeStats : statsResponse.getNodes()) { final RecoveryStats recoveryStats = nodeStats.getIndices().getRecoveryStats(); assertThat(recoveryStats.currentAsSource(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index b1513908874..4716e7dba78 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -104,7 +104,7 @@ public class IndexStatsIT extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet(); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); IndicesStatsResponse indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).execute().actionGet(); assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L)); @@ -113,7 +113,7 @@ public class IndexStatsIT extends ESIntegTestCase { client().prepareSearch().addSort("field", SortOrder.ASC).execute().actionGet(); nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet(); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).execute().actionGet(); assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); @@ -123,9 +123,9 @@ public class IndexStatsIT extends ESIntegTestCase { // now check the per field stats nodesStats = client().admin().cluster().prepareNodesStats().setIndices(new CommonStatsFlags().set(CommonStatsFlags.Flag.FieldData, true).fieldDataFields("*")).execute().actionGet(); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getFields().get("field") + nodesStats.getNodes()[1].getIndices().getFieldData().getFields().get("field"), greaterThan(0L)); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getFields().get("field") + nodesStats.getNodes()[1].getIndices().getFieldData().getFields().get("field"), lessThan(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes())); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getFields().get("field") + nodesStats.getNodes().get(1).getIndices().getFieldData().getFields().get("field"), greaterThan(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getFields().get("field") + nodesStats.getNodes().get(1).getIndices().getFieldData().getFields().get("field"), lessThan(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes())); indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).setFieldDataFields("*").execute().actionGet(); assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); @@ -134,7 +134,7 @@ public class IndexStatsIT extends ESIntegTestCase { client().admin().indices().prepareClearCache().setFieldDataCache(true).execute().actionGet(); nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet(); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).execute().actionGet(); assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L)); @@ -152,8 +152,8 @@ public class IndexStatsIT extends ESIntegTestCase { NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true) .execute().actionGet(); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); - assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); IndicesStatsResponse indicesStats = client().admin().indices().prepareStats("test") .clear().setFieldData(true).setQueryCache(true) @@ -173,8 +173,8 @@ public class IndexStatsIT extends ESIntegTestCase { nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true) .execute().actionGet(); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); - assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), greaterThan(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getQueryCache().getMemorySizeInBytes(), greaterThan(0L)); indicesStats = client().admin().indices().prepareStats("test") .clear().setFieldData(true).setQueryCache(true) @@ -186,8 +186,8 @@ public class IndexStatsIT extends ESIntegTestCase { Thread.sleep(100); // Make sure the filter cache entries have been removed... nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true) .execute().actionGet(); - assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); - assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L)); + assertThat(nodesStats.getNodes().get(0).getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes().get(1).getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); indicesStats = client().admin().indices().prepareStats("test") .clear().setFieldData(true).setQueryCache(true) diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index c38aeb967ef..b7dcf2872e2 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -54,29 +54,29 @@ public class SimpleNodesInfoIT extends ESIntegTestCase { logger.info("--> started nodes: {} and {}", server1NodeId, server2NodeId); NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().execute().actionGet(); - assertThat(response.getNodes().length, is(2)); + assertThat(response.getNodes().size(), is(2)); assertThat(response.getNodesMap().get(server1NodeId), notNullValue()); assertThat(response.getNodesMap().get(server2NodeId), notNullValue()); response = client().admin().cluster().nodesInfo(nodesInfoRequest()).actionGet(); - assertThat(response.getNodes().length, is(2)); + assertThat(response.getNodes().size(), is(2)); assertThat(response.getNodesMap().get(server1NodeId), notNullValue()); assertThat(response.getNodesMap().get(server2NodeId), notNullValue()); response = client().admin().cluster().nodesInfo(nodesInfoRequest(server1NodeId)).actionGet(); - assertThat(response.getNodes().length, is(1)); + assertThat(response.getNodes().size(), is(1)); assertThat(response.getNodesMap().get(server1NodeId), notNullValue()); response = client().admin().cluster().nodesInfo(nodesInfoRequest(server1NodeId)).actionGet(); - assertThat(response.getNodes().length, is(1)); + assertThat(response.getNodes().size(), is(1)); assertThat(response.getNodesMap().get(server1NodeId), notNullValue()); response = client().admin().cluster().nodesInfo(nodesInfoRequest(server2NodeId)).actionGet(); - assertThat(response.getNodes().length, is(1)); + assertThat(response.getNodes().size(), is(1)); assertThat(response.getNodesMap().get(server2NodeId), notNullValue()); response = client().admin().cluster().nodesInfo(nodesInfoRequest(server2NodeId)).actionGet(); - assertThat(response.getNodes().length, is(1)); + assertThat(response.getNodes().size(), is(1)); assertThat(response.getNodesMap().get(server2NodeId), notNullValue()); } @@ -99,7 +99,7 @@ public class SimpleNodesInfoIT extends ESIntegTestCase { NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().execute().actionGet(); - assertThat(response.getNodes().length, is(2)); + assertThat(response.getNodes().size(), is(2)); assertThat(response.getNodesMap().get(server1NodeId), notNullValue()); assertThat(response.getNodesMap().get(server2NodeId), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index 95a931d1d54..baf017bc7b9 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -168,8 +168,8 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { // Check that node info is correct NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().all().execute().actionGet(); - for (int i = 0; i < 2; i++) { - NodeInfo nodeInfo = nodesInfoResponse.getNodes()[i]; + assertEquals(2, nodesInfoResponse.getNodes().size()); + for (NodeInfo nodeInfo : nodesInfoResponse.getNodes()) { boolean found = false; for (ThreadPool.Info info : nodeInfo.getThreadPool()) { if (info.getName().equals(Names.SEARCH)) { diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java index 0fceda31664..75faa8c49b4 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java @@ -67,7 +67,7 @@ public class NettyTransportPublishAddressIT extends ESIntegTestCase { logger.info("--> checking if boundAddress matching publishAddress has same port"); NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().get(); - for (NodeInfo nodeInfo : nodesInfoResponse) { + for (NodeInfo nodeInfo : nodesInfoResponse.getNodes()) { BoundTransportAddress boundTransportAddress = nodeInfo.getTransport().getAddress(); if (nodeInfo.getNode().getName().equals(ipv4OnlyNode)) { assertThat(boundTransportAddress.boundAddresses().length, equalTo(1)); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java index 95a2691d1c4..52b2f5af797 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java @@ -130,10 +130,10 @@ public class SearchStatsTests extends ESIntegTestCase { assertThat(indicesStats.getTotal().getSearch().getGroupStats().get("group1").getFetchCount(), greaterThan(0L)); assertThat(indicesStats.getTotal().getSearch().getGroupStats().get("group1").getFetchTimeInMillis(), greaterThan(0L)); NodesStatsResponse nodeStats = client().admin().cluster().prepareNodesStats().execute().actionGet(); - NodeStats[] nodes = nodeStats.getNodes(); + Set nodeIdsWithIndex = nodeIdsWithIndex("test1", "test2"); int num = 0; - for (NodeStats stat : nodes) { + for (NodeStats stat : nodeStats.getNodes()) { Stats total = stat.getIndices().getSearch().getTotal(); if (nodeIdsWithIndex.contains(stat.getNode().getId())) { assertThat(total.getQueryCount(), greaterThan(0L)); diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java index 3f78b1f6683..0c57ec3f16e 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java @@ -65,6 +65,6 @@ public abstract class AbstractAzureComputeServiceTestCase extends ESIntegTestCas NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().execute().actionGet(); assertNotNull(nodeInfos); assertNotNull(nodeInfos.getNodes()); - assertEquals(expected, nodeInfos.getNodes().length); + assertEquals(expected, nodeInfos.getNodes().size()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index d66b1f9fc9e..ef8857475b3 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -38,6 +38,8 @@ import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Arrays; +import java.util.Collections; import java.util.concurrent.CountDownLatch; import static java.util.Collections.emptyMap; @@ -107,7 +109,7 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { @Override public CountDownLatch updateNodeStats(final ActionListener listener) { - NodesStatsResponse response = new NodesStatsResponse(clusterName, stats); + NodesStatsResponse response = new NodesStatsResponse(clusterName, Arrays.asList(stats), Collections.emptyList()); listener.onResponse(response); return new CountDownLatch(0); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a31ef76272e..247a340d8c4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -2046,8 +2046,8 @@ public abstract class ESIntegTestCase extends ESTestCase { protected HttpRequestBuilder httpClient(CloseableHttpClient httpClient) { final NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); - final NodeInfo[] nodes = nodeInfos.getNodes(); - assertTrue(nodes.length > 0); + final List nodes = nodeInfos.getNodes(); + assertFalse(nodeInfos.hasFailures()); TransportAddress publishAddress = randomFrom(nodes).getHttp().address().publishAddress(); assertEquals(1, publishAddress.uniqueAddressTypeId()); InetSocketAddress address = ((InetSocketTransportAddress) publishAddress).address(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 296f34637dd..4625aa77e25 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -154,8 +154,7 @@ final class ExternalNode implements Closeable { static boolean waitForNode(final Client client, final String name) throws InterruptedException { return ESTestCase.awaitBusy(() -> { final NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().get(); - final NodeInfo[] nodes = nodeInfos.getNodes(); - for (NodeInfo info : nodes) { + for (NodeInfo info : nodeInfos.getNodes()) { if (name.equals(info.getNode().getName())) { return true; } @@ -166,8 +165,7 @@ final class ExternalNode implements Closeable { static NodeInfo nodeInfo(final Client client, final String nodeName) { final NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().get(); - final NodeInfo[] nodes = nodeInfos.getNodes(); - for (NodeInfo info : nodes) { + for (NodeInfo info : nodeInfos.getNodes()) { if (nodeName.equals(info.getNode().getName())) { return info; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 17c3e3d0805..5372c319dae 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -87,12 +87,12 @@ public final class ExternalTestCluster extends TestCluster { try { client.addTransportAddresses(transportAddresses); NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().clear().setSettings(true).setHttp(true).get(); - httpAddresses = new InetSocketAddress[nodeInfos.getNodes().length]; + httpAddresses = new InetSocketAddress[nodeInfos.getNodes().size()]; this.clusterName = nodeInfos.getClusterName().value(); int dataNodes = 0; int masterAndDataNodes = 0; - for (int i = 0; i < nodeInfos.getNodes().length; i++) { - NodeInfo nodeInfo = nodeInfos.getNodes()[i]; + for (int i = 0; i < nodeInfos.getNodes().size(); i++) { + NodeInfo nodeInfo = nodeInfos.getNodes().get(i); httpAddresses[i] = ((InetSocketTransportAddress) nodeInfo.getHttp().address().publishAddress()).address(); if (DiscoveryNode.isDataNode(nodeInfo.getSettings())) { dataNodes++; From 2c3d3d91cb1aa99656a345ba6481ef1d8a644ace Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 6 May 2016 13:09:48 -0700 Subject: [PATCH 0118/1311] Remove unnecessary nebula source or javadoc plugins --- core/build.gradle | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/build.gradle b/core/build.gradle index e12a80fcbf7..a549f3260fa 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -26,8 +26,6 @@ apply plugin: 'com.bmuschko.nexus' apply plugin: 'nebula.optional-base' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -//apply plugin: 'nebula.source-jar' -//apply plugin: 'nebula.javadoc-jar' publishing { publications { From d0d2d2be8c2cbd94949273b4bc848aa0430c10a4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 6 May 2016 16:14:43 -0400 Subject: [PATCH 0119/1311] Fix exception message in lifecycle This commit fixes the exception messages for lifecycles when stopping in illegal states. Relates #18189 --- .../elasticsearch/common/component/Lifecycle.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/component/Lifecycle.java b/core/src/main/java/org/elasticsearch/common/component/Lifecycle.java index 479496dd9b6..4f0ef4c6887 100644 --- a/core/src/main/java/org/elasticsearch/common/component/Lifecycle.java +++ b/core/src/main/java/org/elasticsearch/common/component/Lifecycle.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.component; - /** * Lifecycle state. Allows the following transitions: *