diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties index ac8682d9769..2d2ec788fa7 100644 --- a/.ci/java-versions.properties +++ b/.ci/java-versions.properties @@ -7,4 +7,4 @@ ES_BUILD_JAVA=openjdk12 ES_RUNTIME_JAVA=java8 GRADLE_TASK=build -GRADLE_EXTRA_ARGS=--no-parallel +GRADLE_EXTRA_ARGS= diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 25d9732ecb5..4377403065a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -129,7 +129,6 @@ class BuildPlugin implements Plugin { project.getTasks().register("buildResources", ExportElasticsearchBuildResourcesTask) - setupSeed(project) configureRepositories(project) project.extensions.getByType(ExtraPropertiesExtension).set('versions', VersionProperties.versions) configureInputNormalization(project) @@ -963,32 +962,6 @@ class BuildPlugin implements Plugin { } } - /** - * Pins the test seed at configuration time so it isn't different on every - * {@link Test} execution. This is useful if random - * decisions in one run of {@linkplain Test} influence the - * outcome of subsequent runs. Pinning the seed up front like this makes - * the reproduction line from one run be useful on another run. - */ - static String setupSeed(Project project) { - ExtraPropertiesExtension ext = project.rootProject.extensions.getByType(ExtraPropertiesExtension) - if (ext.has('testSeed')) { - /* Skip this if we've already pinned the testSeed. It is important - * that this checks the rootProject so that we know we've only ever - * initialized one time. */ - return ext.get('testSeed') - } - - String testSeed = System.getProperty('tests.seed') - if (testSeed == null) { - long seed = new Random(System.currentTimeMillis()).nextLong() - testSeed = Long.toUnsignedString(seed, 16).toUpperCase(Locale.ROOT) - } - - ext.set('testSeed', testSeed) - return testSeed - } - private static class TestFailureReportingPlugin implements Plugin { @Override void apply(Project project) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index 6a522af12dd..393c93c54e8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -23,6 +23,7 @@ import groovy.transform.PackageScope import org.elasticsearch.gradle.doc.SnippetsTask.Snippet import org.gradle.api.InvalidUserDataException import org.gradle.api.tasks.Input +import org.gradle.api.tasks.Internal import org.gradle.api.tasks.OutputDirectory import java.nio.file.Files @@ -58,6 +59,9 @@ class RestTestsFromSnippetsTask extends SnippetsTask { @OutputDirectory File testRoot = project.file('build/rest') + @Internal + Set names = new HashSet<>() + RestTestsFromSnippetsTask() { project.afterEvaluate { // Wait to set this so testRoot can be customized diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index efd7fe9a2a4..5e324995336 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -163,7 +163,7 @@ class PluginBuildPlugin implements Plugin { private static void configureDependencies(Project project) { project.dependencies { - if (ClasspathUtils.isElasticsearchProject()) { + if (ClasspathUtils.isElasticsearchProject(project)) { compileOnly project.project(':server') testCompile project.project(':test:framework') } else { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 524a78d7a77..a08f553d0e3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -47,7 +47,7 @@ class PrecommitTasks { } Configuration jarHellConfig = project.configurations.create("jarHell") - if (ClasspathUtils.isElasticsearchProject() && project.path.equals(":libs:elasticsearch-core") == false) { + if (ClasspathUtils.isElasticsearchProject(project) && project.path.equals(":libs:elasticsearch-core") == false) { // External plugins will depend on this already via transitive dependencies. // Internal projects are not all plugins, so make sure the check is available // we are not doing this for this project itself to avoid jar hell with itself @@ -252,7 +252,7 @@ class PrecommitTasks { } private static TaskProvider configureLoggerUsage(Project project) { - Object dependency = ClasspathUtils.isElasticsearchProject() ? project.project(':test:logger-usage') : + Object dependency = ClasspathUtils.isElasticsearchProject(project) ? project.project(':test:logger-usage') : "org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}" project.configurations.create('loggerUsagePlugin') diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index ee07be24d8a..879a22292d3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -21,7 +21,6 @@ package org.elasticsearch.gradle.test import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.ElasticsearchCluster import org.elasticsearch.gradle.testclusters.RestTestRunnerTask -import org.elasticsearch.gradle.testclusters.TestClustersPlugin import org.elasticsearch.gradle.tool.Boilerplate import org.elasticsearch.gradle.tool.ClasspathUtils import org.gradle.api.DefaultTask @@ -121,7 +120,7 @@ class RestIntegTestTask extends DefaultTask { Boilerplate.maybeCreate(project.configurations, 'restSpec') { project.dependencies.add( 'restSpec', - ClasspathUtils.isElasticsearchProject() ? project.project(':rest-api-spec') : + ClasspathUtils.isElasticsearchProject(project) ? project.project(':rest-api-spec') : "org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch}" ) } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java index 3f4c6a9900e..2d37e30391c 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.gradle; +import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -35,10 +36,12 @@ public class ReaperPlugin implements Plugin { throw new IllegalArgumentException("ReaperPlugin can only be applied to the root project of a build"); } + project.getPlugins().apply(GlobalBuildInfoPlugin.class); + Path inputDir = project.getRootDir().toPath().resolve(".gradle") .resolve("reaper").resolve("build-" + ProcessHandle.current().pid()); ReaperService service = project.getExtensions().create("reaper", ReaperService.class, - project.getLogger(), project.getBuildDir().toPath(), inputDir); + project, project.getBuildDir().toPath(), inputDir); project.getGradle().buildFinished(result -> service.shutdown()); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java index c52cee026ef..329534a4dd2 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ReaperService.java @@ -21,6 +21,7 @@ package org.elasticsearch.gradle; import org.elasticsearch.gradle.tool.ClasspathUtils; import org.gradle.api.GradleException; +import org.gradle.api.Project; import org.gradle.api.logging.Logger; import org.gradle.internal.jvm.Jvm; @@ -40,14 +41,16 @@ public class ReaperService { private static final String REAPER_CLASS = "org/elasticsearch/gradle/reaper/Reaper.class"; private static final Pattern REAPER_JAR_PATH_PATTERN = Pattern.compile("file:(.*)!/" + REAPER_CLASS); - private Logger logger; - private Path buildDir; - private Path inputDir; - private Path logFile; + private final Logger logger; + private final boolean isInternal; + private final Path buildDir; + private final Path inputDir; + private final Path logFile; private volatile Process reaperProcess; - public ReaperService(Logger logger, Path buildDir, Path inputDir) { - this.logger = logger; + public ReaperService(Project project, Path buildDir, Path inputDir) { + this.logger = project.getLogger(); + this.isInternal = ClasspathUtils.isElasticsearchProject(project); this.buildDir = buildDir; this.inputDir = inputDir; this.logFile = inputDir.resolve("reaper.log"); @@ -137,7 +140,7 @@ public class ReaperService { } private Path locateReaperJar() { - if (ClasspathUtils.isElasticsearchProject()) { + if (isInternal) { // when running inside the Elasticsearch build just pull find the jar in the runtime classpath URL main = this.getClass().getClassLoader().getResource(REAPER_CLASS); String mainPath = main.getFile(); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java index b59d9806a0b..46f2db818a4 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/info/GlobalBuildInfoPlugin.java @@ -25,7 +25,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.Random; import java.util.stream.Collectors; public class GlobalBuildInfoPlugin implements Plugin { @@ -46,6 +48,15 @@ public class GlobalBuildInfoPlugin implements Plugin { File compilerJavaHome = findCompilerJavaHome(); File runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome); + String testSeedProperty = System.getProperty("tests.seed"); + final String testSeed; + if (testSeedProperty == null) { + long seed = new Random(System.currentTimeMillis()).nextLong(); + testSeed = Long.toUnsignedString(seed, 16).toUpperCase(Locale.ROOT); + } else { + testSeed = testSeedProperty; + } + final List javaVersions = new ArrayList<>(); for (int version = 8; version <= Integer.parseInt(minimumCompilerVersion.getMajorVersion()); version++) { if (System.getenv(getJavaHomeEnvVarName(Integer.toString(version))) != null) { @@ -95,7 +106,9 @@ public class GlobalBuildInfoPlugin implements Plugin { ext.set("gradleJavaVersion", Jvm.current().getJavaVersion()); ext.set("gitRevision", gitRevision(project.getRootProject().getRootDir())); ext.set("buildDate", ZonedDateTime.now(ZoneOffset.UTC)); + ext.set("testSeed", testSeed); ext.set("isCi", System.getenv("JENKINS_URL") != null); + ext.set("isInternal", GlobalBuildInfoPlugin.class.getResource("/buildSrc.marker") != null); }); } @@ -265,5 +278,4 @@ public class GlobalBuildInfoPlugin implements Plugin { .findFirst() .orElseThrow(() -> new IOException("file [" + path + "] is empty")); } - } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index d523d3a88fa..66209caf02a 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -104,6 +104,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { } } + @Internal ElasticsearchNode getFirstNode() { return nodes.getAt(clusterName + "-0"); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 729477837b3..323f34c87d6 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -183,6 +183,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { return distributions.get(currentDistro).getVersion(); } + @Internal public Path getDistroDir() { return workingDir.resolve("distro").resolve(getVersion() + "-" + testDistribution); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index d7a7bbcc41b..39c0c44275e 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -23,7 +23,7 @@ public class RunTask extends DefaultTestClustersTask { description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch." ) public void setDebug(boolean enabled) { - this.debug = debug; + this.debug = enabled; } @Input diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java index 40ec6bd7183..ed064b4ff04 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/ClasspathUtils.java @@ -1,12 +1,9 @@ package org.elasticsearch.gradle.tool; -public class ClasspathUtils { - private static boolean isElasticsearchProject; +import org.gradle.api.Project; +import org.gradle.api.plugins.ExtraPropertiesExtension; - static { - // look for buildSrc marker file, if it exists then we are running in the context of the elastic/elasticsearch build - isElasticsearchProject = ClasspathUtils.class.getResource("/buildSrc.marker") != null; - } +public class ClasspathUtils { private ClasspathUtils() { } @@ -17,7 +14,8 @@ public class ClasspathUtils { * * @return if we are currently running in the `elastic/elasticsearch` project */ - public static boolean isElasticsearchProject() { - return isElasticsearchProject; + public static boolean isElasticsearchProject(Project project) { + ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class); + return (boolean) ext.get("isInternal"); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java index a8522c7f60e..3ef0d6dd332 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java @@ -24,8 +24,8 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.XContentParser; import java.util.Arrays; import java.util.Collection; @@ -299,12 +299,14 @@ public final class Role { public static final String NONE = "none"; public static final String ALL = "all"; public static final String MONITOR = "monitor"; - public static final String MONITOR_DATA_FRAME_TRANSFORMS = "monitor_data_frame_transforms"; + public static final String MONITOR_TRANSFORM_DEPRECATED = "monitor_data_frame_transforms"; + public static final String MONITOR_TRANSFORM = "monitor_transform"; public static final String MONITOR_ML = "monitor_ml"; public static final String MONITOR_WATCHER = "monitor_watcher"; public static final String MONITOR_ROLLUP = "monitor_rollup"; public static final String MANAGE = "manage"; - public static final String MANAGE_DATA_FRAME_TRANSFORMS = "manage_data_frame_transforms"; + public static final String MANAGE_TRANSFORM_DEPRECATED = "manage_data_frame_transforms"; + public static final String MANAGE_TRANSFORM = "manage_transform"; public static final String MANAGE_ML = "manage_ml"; public static final String MANAGE_WATCHER = "manage_watcher"; public static final String MANAGE_ROLLUP = "manage_rollup"; @@ -321,8 +323,8 @@ public final class Role { public static final String MANAGE_ILM = "manage_ilm"; public static final String READ_ILM = "read_ilm"; public static final String MANAGE_ENRICH = "manage_enrich"; - public static final String[] ALL_ARRAY = new String[] { NONE, ALL, MONITOR, MONITOR_DATA_FRAME_TRANSFORMS, MONITOR_ML, - MONITOR_WATCHER, MONITOR_ROLLUP, MANAGE, MANAGE_DATA_FRAME_TRANSFORMS, + public static final String[] ALL_ARRAY = new String[] { NONE, ALL, MONITOR, MONITOR_TRANSFORM_DEPRECATED, MONITOR_TRANSFORM, + MONITOR_ML, MONITOR_WATCHER, MONITOR_ROLLUP, MANAGE, MANAGE_TRANSFORM_DEPRECATED, MANAGE_TRANSFORM, MANAGE_ML, MANAGE_WATCHER, MANAGE_ROLLUP, MANAGE_INDEX_TEMPLATES, MANAGE_INGEST_PIPELINES, TRANSPORT_CLIENT, MANAGE_SECURITY, MANAGE_SAML, MANAGE_OIDC, MANAGE_TOKEN, MANAGE_PIPELINE, MANAGE_CCR, READ_CCR, MANAGE_ILM, READ_ILM, MANAGE_ENRICH }; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java index 256e38da858..1c33a7e183e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; +import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; @@ -40,7 +41,6 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.index.reindex.ReindexRequest; -import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.index.reindex.UpdateByQueryAction; import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.rest.RestStatus; @@ -179,10 +179,10 @@ public class ReindexIT extends ESRestHighLevelClientTestCase { final BulkByScrollResponse response = highLevelClient().reindex(reindexRequest, RequestOptions.DEFAULT); assertThat(response.getVersionConflicts(), equalTo(2L)); - assertThat(response.getBulkFailures(), empty()); - assertThat(response.getSearchFailures(), hasSize(2)); + assertThat(response.getSearchFailures(), empty()); + assertThat(response.getBulkFailures(), hasSize(2)); assertThat( - response.getSearchFailures().stream().map(ScrollableHitSource.SearchFailure::toString).collect(Collectors.toSet()), + response.getBulkFailures().stream().map(BulkItemResponse.Failure::getMessage).collect(Collectors.toSet()), everyItem(containsString("version conflict")) ); @@ -328,10 +328,10 @@ public class ReindexIT extends ESRestHighLevelClientTestCase { final BulkByScrollResponse response = highLevelClient().updateByQuery(updateByQueryRequest, RequestOptions.DEFAULT); assertThat(response.getVersionConflicts(), equalTo(1L)); - assertThat(response.getBulkFailures(), empty()); - assertThat(response.getSearchFailures(), hasSize(1)); + assertThat(response.getSearchFailures(), empty()); + assertThat(response.getBulkFailures(), hasSize(1)); assertThat( - response.getSearchFailures().stream().map(ScrollableHitSource.SearchFailure::toString).collect(Collectors.toSet()), + response.getBulkFailures().stream().map(BulkItemResponse.Failure::getMessage).collect(Collectors.toSet()), everyItem(containsString("version conflict")) ); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java index 77fe59b359e..2093e14d7c8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.security.AuthenticateResponse; +import org.elasticsearch.client.security.AuthenticateResponse.RealmInfo; import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.ClearRealmCacheRequest; import org.elasticsearch.client.security.ClearRealmCacheResponse; @@ -79,7 +80,6 @@ import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.PutUserResponse; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.client.security.TemplateRoleName; -import org.elasticsearch.client.security.AuthenticateResponse.RealmInfo; import org.elasticsearch.client.security.support.ApiKey; import org.elasticsearch.client.security.support.CertificateInfo; import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; @@ -99,8 +99,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; import org.hamcrest.Matchers; -import javax.crypto.SecretKeyFactory; -import javax.crypto.spec.PBEKeySpec; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; @@ -120,6 +118,9 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import javax.crypto.SecretKeyFactory; +import javax.crypto.spec.PBEKeySpec; + import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -679,8 +680,8 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { List roles = response.getRoles(); assertNotNull(response); - // 27 system roles plus the three we created - assertThat(roles.size(), equalTo(31)); + // 29 system roles plus the three we created + assertThat(roles.size(), equalTo(33)); } { diff --git a/docs/reference/indices/split-index.asciidoc b/docs/reference/indices/split-index.asciidoc index 285f6bee2f1..6389307ddfb 100644 --- a/docs/reference/indices/split-index.asciidoc +++ b/docs/reference/indices/split-index.asciidoc @@ -21,9 +21,9 @@ POST /twitter/_split/split-twitter-index [[split-index-api-request]] ==== {api-request-title} -`POST //_shrink/` +`POST //_split/` -`PUT //_shrink/` +`PUT //_split/` [[split-index-api-prereqs]] diff --git a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc index 2dbf178d4a3..e41f0bae8e9 100644 --- a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc @@ -46,6 +46,26 @@ If the destination index already exists, then it will be use as is. This makes it possible to set up the destination index in advance with custom settings and mappings. +[[ml-put-dfanalytics-supported-fields]] +===== Supported fields + +====== {oldetection-cap} + +{oldetection-cap} requires numeric or boolean data to analyze. The algorithms +don't support missing values therefore fields that have data types other than +numeric or boolean are ignored. Documents where included fields contain missing +values, null values, or an array are also ignored. Therefore the `dest` index +may contain documents that don't have an {olscore}. + + +====== {regression-cap} + +{regression-cap} supports fields that are numeric, boolean, text, keyword and ip. It +is also tolerant of missing values. Fields that are supported are included in +the analysis, other fields are ignored. Documents where included fields contain +an array with two or more values are also ignored. Documents in the `dest` index +that don’t contain a results field are not included in the {reganalysis}. + [[ml-put-dfanalytics-path-params]] ==== {api-path-parms-title} @@ -61,19 +81,21 @@ and mappings. ==== {api-request-body-title} `analysis`:: - (Required, object) Defines the type of {dfanalytics} you want to perform on your source - index. For example: `outlier_detection`. See <>. + (Required, object) Defines the type of {dfanalytics} you want to perform on + your source index. For example: `outlier_detection`. See + <>. `analyzed_fields`:: (Optional, object) You can specify both `includes` and/or `excludes` patterns. If `analyzed_fields` is not set, only the relevant fields will be included. - For example, all the numeric fields for {oldetection}. + For example, all the numeric fields for {oldetection}. For the supported field + types, see <>. - `analyzed_fields.includes`::: + `includes`::: (Optional, array) An array of strings that defines the fields that will be included in the analysis. - `analyzed_fields.excludes`::: + `excludes`::: (Optional, array) An array of strings that defines the fields that will be excluded from the analysis. diff --git a/docs/reference/modules/discovery/bootstrapping.asciidoc b/docs/reference/modules/discovery/bootstrapping.asciidoc index cc7cb0ea912..ba6cdc71274 100644 --- a/docs/reference/modules/discovery/bootstrapping.asciidoc +++ b/docs/reference/modules/discovery/bootstrapping.asciidoc @@ -4,11 +4,11 @@ Starting an Elasticsearch cluster for the very first time requires the initial set of <> to be explicitly defined on one or more of the master-eligible nodes in the cluster. This is known as _cluster -bootstrapping_. This is only required the very first time the cluster starts -up: nodes that have already joined a cluster store this information in their -data folder for use in a <>, and -freshly-started nodes that are joining a running cluster obtain this -information from the cluster's elected master. +bootstrapping_. This is only required the first time a cluster starts up: nodes +that have already joined a cluster store this information in their data folder +for use in a <>, and freshly-started nodes +that are joining a running cluster obtain this information from the cluster's +elected master. The initial set of master-eligible nodes is defined in the <>. This should be @@ -30,12 +30,8 @@ node: When you start a master-eligible node, you can provide this setting on the command line or in the `elasticsearch.yml` file. After the cluster has formed, -this setting is no longer required and is ignored. It need not be set on -master-ineligible nodes, nor on master-eligible nodes that are started to join -an existing cluster. Note that master-eligible nodes should use storage that -persists across restarts. If they do not, and `cluster.initial_master_nodes` is -set, and a full cluster restart occurs, then another brand-new cluster will -form and this may result in data loss. +this setting is no longer required. It should not be set for master-ineligible +nodes, master-eligible nodes joining an existing cluster, or cluster restarts. It is technically sufficient to set `cluster.initial_master_nodes` on a single master-eligible node in the cluster, and only to mention that single node in the diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index c7a5e72a9f0..632090b1979 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -587,8 +587,9 @@ end::scroll_size[] tag::search_timeout[] `search_timeout`:: -(Optional, <> Explicit timeout for each search -request. Defaults to no timeout. +(Optional, <>) +Explicit timeout for each search request. +Defaults to no timeout. end::search_timeout[] tag::search_type[] diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index b5e1a4571bd..db4c80805e4 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -73,7 +73,7 @@ include::{docdir}/rest-api/common-parms.asciidoc[tag=search_type] include::{docdir}/rest-api/common-parms.asciidoc[tag=terminate_after] -include::{docdir}/rest-api/common-parms.asciidoc[tag=timeout] +include::{docdir}/rest-api/common-parms.asciidoc[tag=search_timeout] Out of the above, the `search_type`, `request_cache` and the diff --git a/docs/reference/setup/important-settings/discovery-settings.asciidoc b/docs/reference/setup/important-settings/discovery-settings.asciidoc index 942c076a33a..d0f82619f89 100644 --- a/docs/reference/setup/important-settings/discovery-settings.asciidoc +++ b/docs/reference/setup/important-settings/discovery-settings.asciidoc @@ -40,8 +40,9 @@ settings configured, this step is automatically performed by the nodes themselves. As this auto-bootstrapping is <>, when you start a brand new cluster in <>, you must explicitly list the master-eligible nodes whose votes should be -counted in the very first election. This list is set using the -`cluster.initial_master_nodes` setting. +counted in the very first election. This list is set using the +`cluster.initial_master_nodes` setting. You should not use this setting when +restarting a cluster or adding a new node to an existing cluster. [source,yaml] -------------------------------------------------- diff --git a/docs/reference/sql/functions/grouping.asciidoc b/docs/reference/sql/functions/grouping.asciidoc index 6f2f5a1b6e4..2a2f61790fb 100644 --- a/docs/reference/sql/functions/grouping.asciidoc +++ b/docs/reference/sql/functions/grouping.asciidoc @@ -86,6 +86,16 @@ the multiple of a day. E.g.: for `HISTOGRAM(CAST(birth_date AS DATE), INTERVAL ' actually used will be `INTERVAL '2' DAY`. If the interval specified is less than 1 day, e.g.: `HISTOGRAM(CAST(birth_date AS DATE), INTERVAL '20' HOUR)` then the interval used will be `INTERVAL '1' DAY`. +[IMPORTANT] +All intervals specified for a date/time HISTOGRAM will use a <> +in their `date_histogram` aggregation definition, with the notable exception of `INTERVAL '1' YEAR` where a calendar interval is used. +The choice for a calendar interval was made for having a more intuitive result for YEAR groupings. Calendar intervals consider a one year +bucket as the one starting on January 1st that specific year, whereas a fixed interval one-year-bucket considers one year as a number +of milliseconds (for example, `31536000000ms` corresponding to 365 days, 24 hours per day, 60 minutes per hour etc.). With fixed intervals, +the day of February 5th, 2019 for example, belongs to a bucket that starts on December 20th, 2018 and {es} (and implicitly {es-sql}) would +have returned the year 2018 for a date that's actually in 2019. With calendar interval this behavior is more intuitive, having the day of +February 5th, 2019 actually belonging to the 2019 year bucket. + [IMPORTANT] Histogram in SQL cannot be applied applied on **TIME** type. E.g.: `HISTOGRAM(CAST(birth_date AS TIME), INTERVAL '10' MINUTES)` is currently not supported. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java index 2adb7618aa4..31cefbcf0c2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java @@ -28,8 +28,6 @@ import java.util.Map; */ public class Globals { private final Map constantInitializers = new HashMap<>(); - private final Map> classBindings = new HashMap<>(); - private final Map instanceBindings = new HashMap<>(); private final BitSet statements; /** Create a new Globals from the set of statement boundaries */ @@ -44,34 +42,11 @@ public class Globals { } } - /** Adds a new class binding to be written as a local variable */ - public String addClassBinding(Class type) { - String name = "$class_binding$" + classBindings.size(); - classBindings.put(name, type); - - return name; - } - - /** Adds a new binding to be written as a local variable */ - public String addInstanceBinding(Object instance) { - return instanceBindings.computeIfAbsent(instance, key -> "$instance_binding$" + instanceBindings.size()); - } - /** Returns the current initializers */ public Map getConstantInitializers() { return constantInitializers; } - /** Returns the current bindings */ - public Map> getClassBindings() { - return classBindings; - } - - /** Returns the current bindings */ - public Map getInstanceBindings() { - return instanceBindings; - } - /** Returns the set of statement boundaries */ public BitSet getStatements() { return statements; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index 24c5245f61e..40d3a37df6a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -40,6 +40,9 @@ import java.util.Objects; import java.util.Set; import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; +import static org.objectweb.asm.Opcodes.ACC_PRIVATE; +import static org.objectweb.asm.Opcodes.ACC_PUBLIC; +import static org.objectweb.asm.Opcodes.ACC_STATIC; /** * Represents a user-defined call. @@ -54,6 +57,7 @@ public final class ECallLocal extends AExpression { private PainlessClassBinding classBinding = null; private int classBindingOffset = 0; private PainlessInstanceBinding instanceBinding = null; + private String bindingName = null; public ECallLocal(Location location, String name, List arguments) { super(location); @@ -138,9 +142,15 @@ public final class ECallLocal extends AExpression { } else if (classBinding != null) { typeParameters = new ArrayList<>(classBinding.typeParameters); actual = classBinding.returnType; + bindingName = scriptRoot.getNextSyntheticName("class_binding"); + scriptRoot.getClassNode().addField(new SField(location, + ACC_PRIVATE, bindingName, classBinding.javaConstructor.getDeclaringClass(), null)); } else if (instanceBinding != null) { typeParameters = new ArrayList<>(instanceBinding.typeParameters); actual = instanceBinding.returnType; + bindingName = scriptRoot.getNextSyntheticName("instance_binding"); + scriptRoot.getClassNode().addField(new SField(location, + ACC_STATIC | ACC_PUBLIC, bindingName, instanceBinding.targetInstance.getClass(), instanceBinding.targetInstance)); } else { throw new IllegalStateException("Illegal tree structure."); } @@ -178,14 +188,13 @@ public final class ECallLocal extends AExpression { methodWriter.invokeStatic(Type.getType(importedMethod.targetClass), new Method(importedMethod.javaMethod.getName(), importedMethod.methodType.toMethodDescriptorString())); } else if (classBinding != null) { - String name = globals.addClassBinding(classBinding.javaConstructor.getDeclaringClass()); Type type = Type.getType(classBinding.javaConstructor.getDeclaringClass()); int javaConstructorParameterCount = classBinding.javaConstructor.getParameterCount() - classBindingOffset; Label nonNull = new Label(); methodWriter.loadThis(); - methodWriter.getField(CLASS_TYPE, name, type); + methodWriter.getField(CLASS_TYPE, bindingName, type); methodWriter.ifNonNull(nonNull); methodWriter.loadThis(); methodWriter.newInstance(type); @@ -200,11 +209,11 @@ public final class ECallLocal extends AExpression { } methodWriter.invokeConstructor(type, Method.getMethod(classBinding.javaConstructor)); - methodWriter.putField(CLASS_TYPE, name, type); + methodWriter.putField(CLASS_TYPE, bindingName, type); methodWriter.mark(nonNull); methodWriter.loadThis(); - methodWriter.getField(CLASS_TYPE, name, type); + methodWriter.getField(CLASS_TYPE, bindingName, type); for (int argument = 0; argument < classBinding.javaMethod.getParameterCount(); ++argument) { arguments.get(argument + javaConstructorParameterCount).write(classWriter, methodWriter, globals); @@ -212,11 +221,10 @@ public final class ECallLocal extends AExpression { methodWriter.invokeVirtual(type, Method.getMethod(classBinding.javaMethod)); } else if (instanceBinding != null) { - String name = globals.addInstanceBinding(instanceBinding.targetInstance); Type type = Type.getType(instanceBinding.targetInstance.getClass()); methodWriter.loadThis(); - methodWriter.getStatic(CLASS_TYPE, name, type); + methodWriter.getStatic(CLASS_TYPE, bindingName, type); for (int argument = 0; argument < instanceBinding.javaMethod.getParameterCount(); ++argument) { arguments.get(argument).write(classWriter, methodWriter, globals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java index aee002c8170..e9776d0abba 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java @@ -83,6 +83,7 @@ public final class SClass extends AStatement { private final String name; private final Printer debugStream; private final List functions = new ArrayList<>(); + private final List fields = new ArrayList<>(); private final Globals globals; private final List statements; @@ -112,6 +113,10 @@ public final class SClass extends AStatement { functions.add(function); } + void addField(SField field) { + fields.add(field); + } + @Override public void storeSettings(CompilerSettings settings) { for (SFunction function : functions) { @@ -289,6 +294,11 @@ public final class SClass extends AStatement { function.write(classWriter, globals); } + // Write all fields: + for (SField field : fields) { + field.write(classWriter); + } + // Write the constants if (false == globals.getConstantInitializers().isEmpty()) { Collection inits = globals.getConstantInitializers().values(); @@ -315,20 +325,6 @@ public final class SClass extends AStatement { clinit.endMethod(); } - // Write class binding variables - for (Map.Entry> classBinding : globals.getClassBindings().entrySet()) { - String name = classBinding.getKey(); - String descriptor = Type.getType(classBinding.getValue()).getDescriptor(); - classVisitor.visitField(Opcodes.ACC_PRIVATE, name, descriptor, null, null).visitEnd(); - } - - // Write instance binding variables - for (Map.Entry instanceBinding : globals.getInstanceBindings().entrySet()) { - String name = instanceBinding.getValue(); - String descriptor = Type.getType(instanceBinding.getKey().getClass()).getDescriptor(); - classVisitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, name, descriptor, null, null).visitEnd(); - } - // Write any needsVarName methods for used variables for (org.objectweb.asm.commons.Method needsMethod : scriptClassInfo.getNeedsMethods()) { String name = needsMethod.getName(); @@ -349,8 +345,10 @@ public final class SClass extends AStatement { Map statics = new HashMap<>(); statics.put("$FUNCTIONS", table.getFunctionTable()); - for (Map.Entry instanceBinding : globals.getInstanceBindings().entrySet()) { - statics.put(instanceBinding.getValue(), instanceBinding.getKey()); + for (SField field : fields) { + if (field.getInstance() != null) { + statics.put(field.getName(), field.getInstance()); + } } return statics; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SField.java new file mode 100644 index 00000000000..fbacb6867fd --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SField.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.ClassWriter; +import org.elasticsearch.painless.CompilerSettings; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.ScriptRoot; +import org.objectweb.asm.Type; + +import java.util.Set; + +/** + * Represents a member field for its parent class (internal only). + */ +public class SField extends ANode { + + private final int access; + private final String name; + private final Class type; + private final Object instance; + + /** + * Standard constructor. + * @param location original location in the source + * @param access asm constants for field modifiers + * @param name name of the field + * @param type type of the field + * @param instance initial value for the field + */ + public SField(Location location, int access, String name, Class type, Object instance) { + super(location); + + this.access = access; + this.name = name; + this.type = type; + this.instance = instance; + } + + public String getName() { + return name; + } + + public Object getInstance() { + return instance; + } + + @Override + void storeSettings(CompilerSettings settings) { + throw createError(new UnsupportedOperationException("unexpected node")); + } + + @Override + void extractVariables(Set variables) { + throw createError(new UnsupportedOperationException("unexpected node")); + } + + @Override + void analyze(ScriptRoot scriptRoot, Locals locals) { + throw createError(new UnsupportedOperationException("unexpected node")); + } + + @Override + void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals) { + throw createError(new UnsupportedOperationException("unexpected node")); + } + + void write(ClassWriter classWriter) { + classWriter.getClassVisitor().visitField(access, name, Type.getType(type).getDescriptor(), null, null).visitEnd(); + } + + @Override + public String toString() { + return singleLineToString(name, type); + } +} diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 0338e0fba91..d8003d93183 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; @@ -40,7 +40,6 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SubSearchContext; import java.io.IOException; import java.util.ArrayList; @@ -100,15 +99,19 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { for (Object matchedSlot : field.getValues()) { int slot = (int) matchedSlot; BytesReference document = percolateQuery.getDocuments().get(slot); - SubSearchContext subSearchContext = - createSubSearchContext(context, percolatorLeafReaderContext, document, slot); - subSearchContext.parsedQuery(new ParsedQuery(query)); + SearchContextHighlight highlight = new SearchContextHighlight(context.highlight().fields()); + // Enforce highlighting by source, because MemoryIndex doesn't support stored fields. + highlight.globalForceSource(true); + QueryShardContext shardContext = new QueryShardContext(context.getQueryShardContext()); + shardContext.freezeContext(); + shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot); + shardContext.lookup().source().setSource(document); hitContext.reset( new SearchHit(slot, "unknown", new Text(hit.getType()), Collections.emptyMap()), percolatorLeafReaderContext, slot, percolatorIndexSearcher ); hitContext.cache().clear(); - highlightPhase.hitExecute(subSearchContext, hitContext); + highlightPhase.hitExecute(context.shardTarget(), shardContext, query, highlight, hitContext); for (Map.Entry entry : hitContext.hit().getHighlightFields().entrySet()) { if (percolateQuery.getDocuments().size() == 1) { String hlFieldName; @@ -166,15 +169,4 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { } return Collections.emptyList(); } - - private SubSearchContext createSubSearchContext(SearchContext context, LeafReaderContext leafReaderContext, - BytesReference source, int docId) { - SubSearchContext subSearchContext = new SubSearchContext(context); - subSearchContext.highlight(new SearchContextHighlight(context.highlight().fields())); - // Enforce highlighting by source, because MemoryIndex doesn't support stored fields. - subSearchContext.highlight().globalForceSource(true); - subSearchContext.lookup().source().setSegmentAndDocument(leafReaderContext, docId); - subSearchContext.lookup().source().setSource(source); - return subSearchContext; - } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java index 8e5dff170d4..d64bcf8662e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java @@ -19,11 +19,10 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; -import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; @@ -67,7 +66,7 @@ public class BulkIndexByScrollResponseContentListener extends RestBuilderListene } } for (SearchFailure failure: response.getSearchFailures()) { - RestStatus failureStatus = ExceptionsHelper.status(failure.getReason()); + RestStatus failureStatus = failure.getStatus(); if (failureStatus.getStatus() > status.getStatus()) { status = failureStatus; } diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java index 6b1a1c9254c..4287c9b57fe 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java @@ -26,9 +26,9 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer; import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -45,18 +45,21 @@ public class AnnotatedTextHighlighter extends UnifiedHighlighter { // Convert the marked-up values held on-disk to plain-text versions for highlighting @Override - protected List loadFieldValues(MappedFieldType fieldType, Field field, SearchContext context, HitContext hitContext) - throws IOException { - List fieldValues = super.loadFieldValues(fieldType, field, context, hitContext); + protected List loadFieldValues(MappedFieldType fieldType, + Field field, + QueryShardContext context, + HitContext hitContext, + boolean forceSource) throws IOException { + List fieldValues = super.loadFieldValues(fieldType, field, context, hitContext, forceSource); String[] fieldValuesAsString = fieldValues.toArray(new String[fieldValues.size()]); - + AnnotatedText[] annotations = new AnnotatedText[fieldValuesAsString.length]; for (int i = 0; i < fieldValuesAsString.length; i++) { annotations[i] = AnnotatedText.parse(fieldValuesAsString[i]); } // Store the annotations in the hitContext hitContext.cache().put(AnnotatedText.class.getName(), annotations); - + ArrayList result = new ArrayList<>(annotations.length); for (int i = 0; i < annotations.length; i++) { result.add(annotations[i].textMinusMarkup); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index 37963648a74..aaf7dc6391b 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -147,13 +147,7 @@ public class AzureBlobContainer extends AbstractBlobContainer { // Executing deletes in parallel since Azure SDK 8 is using blocking IO while Azure does not provide a bulk delete API endpoint // TODO: Upgrade to newer non-blocking Azure SDK 11 and execute delete requests in parallel that way. for (String blobName : blobNames) { - executor.execute(new ActionRunnable(listener) { - @Override - protected void doRun() throws IOException { - deleteBlobIgnoringIfNotExists(blobName); - listener.onResponse(null); - } - }); + executor.execute(ActionRunnable.run(listener, () -> deleteBlobIgnoringIfNotExists(blobName))); } } try { diff --git a/server/src/main/java/org/elasticsearch/action/ActionRunnable.java b/server/src/main/java/org/elasticsearch/action/ActionRunnable.java index 86be481f27f..f9ab268fab8 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRunnable.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRunnable.java @@ -20,6 +20,8 @@ package org.elasticsearch.action; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.CheckedRunnable; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.util.concurrent.AbstractRunnable; /** @@ -30,6 +32,32 @@ public abstract class ActionRunnable extends AbstractRunnable { protected final ActionListener listener; + /** + * Creates a {@link Runnable} that invokes the given listener with {@code null} after the given runnable has executed. + * @param listener Listener to invoke + * @param runnable Runnable to execute + * @return Wrapped {@code Runnable} + */ + public static ActionRunnable run(ActionListener listener, CheckedRunnable runnable) { + return new ActionRunnable(listener) { + @Override + protected void doRun() throws Exception { + runnable.run(); + listener.onResponse(null); + } + }; + } + + /** + * Creates a {@link Runnable} that invokes the given listener with the return of the given supplier. + * @param listener Listener to invoke + * @param supplier Supplier that provides value to pass to listener + * @return Wrapped {@code Runnable} + */ + public static ActionRunnable supply(ActionListener listener, CheckedSupplier supplier) { + return ActionRunnable.wrap(listener, l -> l.onResponse(supplier.get())); + } + /** * Creates a {@link Runnable} that wraps the given listener and a consumer of it that is executed when the {@link Runnable} is run. * Invokes {@link ActionListener#onFailure(Exception)} on it if an exception is thrown on executing the consumer. diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 1d0c3ed4d8c..7644c9d82c4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -121,8 +121,8 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction threadPool.executor(ThreadPool.Names.GENERIC).execute( - ActionRunnable.wrap(listener, l -> l.onResponse(buildResponse(request, snapshotsService.currentSnapshots( - request.repository(), Arrays.asList(request.snapshots())), nodeSnapshotStatuses)))), listener::onFailure)); + ActionRunnable.supply(listener, () -> buildResponse(request, snapshotsService.currentSnapshots( + request.repository(), Arrays.asList(request.snapshots())), nodeSnapshotStatuses))), listener::onFailure)); } else { // We don't have any in-progress shards, just return current stats listener.onResponse(buildResponse(request, currentSnapshots, null)); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index b3fe85abaa5..18051aa99db 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -352,7 +352,7 @@ abstract class AbstractSearchAsyncAction exten } } - private ShardSearchFailure[] buildShardFailures() { + ShardSearchFailure[] buildShardFailures() { AtomicArray shardFailures = this.shardFailures.get(); if (shardFailures == null) { return ShardSearchFailure.EMPTY_ARRAY; @@ -510,20 +510,23 @@ abstract class AbstractSearchAsyncAction exten return request; } - protected final SearchResponse buildSearchResponse(InternalSearchResponse internalSearchResponse, String scrollId) { - ShardSearchFailure[] failures = buildShardFailures(); - Boolean allowPartialResults = request.allowPartialSearchResults(); - assert allowPartialResults != null : "SearchRequest missing setting for allowPartialSearchResults"; - if (allowPartialResults == false && failures.length > 0){ - raisePhaseFailure(new SearchPhaseExecutionException("", "Shard failures", null, failures)); - } + protected final SearchResponse buildSearchResponse(InternalSearchResponse internalSearchResponse, + String scrollId, + ShardSearchFailure[] failures) { return new SearchResponse(internalSearchResponse, scrollId, getNumShards(), successfulOps.get(), skippedOps.get(), buildTookInMillis(), failures, clusters); } @Override public void sendSearchResponse(InternalSearchResponse internalSearchResponse, String scrollId) { - listener.onResponse(buildSearchResponse(internalSearchResponse, scrollId)); + ShardSearchFailure[] failures = buildShardFailures(); + Boolean allowPartialResults = request.allowPartialSearchResults(); + assert allowPartialResults != null : "SearchRequest missing setting for allowPartialSearchResults"; + if (allowPartialResults == false && failures.length > 0){ + raisePhaseFailure(new SearchPhaseExecutionException("", "Shard failures", null, failures)); + } else { + listener.onResponse(buildSearchResponse(internalSearchResponse, scrollId, failures)); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index c9ca9aa4c67..6725c92c808 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -300,6 +300,6 @@ public abstract class TransportBroadcastAction< private void asyncShardOperation(ShardRequest request, Task task, ActionListener listener) { transportService.getThreadPool().executor(shardExecutor) - .execute(ActionRunnable.wrap(listener, l -> l.onResponse(shardOperation(request, task)))); + .execute(ActionRunnable.supply(listener, () -> shardOperation(request, task))); } } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 8fae3ebab54..cc75c3863e2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -107,7 +107,7 @@ public abstract class TransportSingleShardAction listener) throws IOException { threadPool.executor(getExecutor(request, shardId)) - .execute(ActionRunnable.wrap(listener, l -> l.onResponse((shardOperation(request, shardId))))); + .execute(ActionRunnable.supply(listener, () -> shardOperation(request, shardId))); } protected abstract Writeable.Reader getResponseReader(); diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index 7e7cbac051f..43ef22be0fe 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -337,7 +337,7 @@ public class GeoUtils { } } - private static double centeredModulus(double dividend, double divisor) { + public static double centeredModulus(double dividend, double divisor) { double rtn = dividend % divisor; if (rtn <= 0) { rtn += divisor; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java index 78c1b995683..efa490a2357 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeIndexer.java @@ -23,6 +23,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.LatLonShape; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; @@ -50,6 +51,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.lucene.geo.GeoUtils.orient; import static org.elasticsearch.common.geo.GeoUtils.normalizeLat; import static org.elasticsearch.common.geo.GeoUtils.normalizeLon; +import static org.elasticsearch.common.geo.GeoUtils.normalizePoint; /** * Utility class that converts geometries into Lucene-compatible form @@ -160,8 +162,9 @@ public final class GeoShapeIndexer implements AbstractGeometryFieldMapper.Indexe @Override public Geometry visit(Point point) { - //TODO: Just remove altitude for now. We need to add normalization later - return new Point(point.getX(), point.getY()); + double[] latlon = new double[]{point.getX(), point.getY()}; + normalizePoint(latlon); + return new Point(latlon[0], latlon[1]); } @Override @@ -222,88 +225,113 @@ public final class GeoShapeIndexer implements AbstractGeometryFieldMapper.Indexe * Splits the specified line by datelines and adds them to the supplied lines array */ private List decomposeGeometry(Line line, List lines) { - - for (Line partPlus : decompose(+DATELINE, line)) { - for (Line partMinus : decompose(-DATELINE, partPlus)) { - double[] lats = new double[partMinus.length()]; - double[] lons = new double[partMinus.length()]; - for (int i = 0; i < partMinus.length(); i++) { - lats[i] = normalizeLat(partMinus.getY(i)); - lons[i] = normalizeLonMinus180Inclusive(partMinus.getX(i)); - } - lines.add(new Line(lons, lats)); + for (Line part : decompose(line)) { + double[] lats = new double[part.length()]; + double[] lons = new double[part.length()]; + for (int i = 0; i < part.length(); i++) { + lats[i] = normalizeLat(part.getY(i)); + lons[i] = normalizeLonMinus180Inclusive(part.getX(i)); } + lines.add(new Line(lons, lats)); } return lines; } /** - * Decompose a linestring given as array of coordinates at a vertical line. - * - * @param dateline x-axis intercept of the vertical line - * @param line linestring that should be decomposed - * @return array of linestrings given as coordinate arrays + * Calculates how many degres the given longitude needs to be moved east in order to be in -180 - +180. +180 is inclusive only + * if include180 is true. */ - private List decompose(double dateline, Line line) { - double[] lons = line.getX(); - double[] lats = line.getY(); - return decompose(dateline, lons, lats); + double calculateShift(double lon, boolean include180) { + double normalized = GeoUtils.centeredModulus(lon, 360); + double shift = Math.round(normalized - lon); + if (!include180 && normalized == 180.0) { + shift = shift - 360; + } + return shift; } /** - * Decompose a linestring given as two arrays of coordinates at a vertical line. + * Decompose a linestring given as array of coordinates by anti-meridian. + * + * @param line linestring that should be decomposed + * @return array of linestrings given as coordinate arrays */ - private List decompose(double dateline, double[] lons, double[] lats) { + private List decompose(Line line) { + double[] lons = line.getX(); + double[] lats = line.getY(); int offset = 0; ArrayList parts = new ArrayList<>(); - double lastLon = lons[0]; - double shift = lastLon > DATELINE ? DATELINE : (lastLon < -DATELINE ? -DATELINE : 0); - - for (int i = 1; i < lons.length; i++) { - double t = intersection(lastLon, lons[i], dateline); - lastLon = lons[i]; - if (Double.isNaN(t) == false) { - double[] partLons = Arrays.copyOfRange(lons, offset, i + 1); - double[] partLats = Arrays.copyOfRange(lats, offset, i + 1); - if (t < 1) { - Point intersection = position(new Point(lons[i - 1], lats[i - 1]), new Point(lons[i], lats[i]), t); - partLons[partLons.length - 1] = intersection.getX(); - partLats[partLats.length - 1] = intersection.getY(); - - lons[offset + i - 1] = intersection.getX(); - lats[offset + i - 1] = intersection.getY(); - - shift(shift, partLons); - offset = i - 1; - shift = lons[i] > DATELINE ? DATELINE : (lons[i] < -DATELINE ? -DATELINE : 0); - } else { - shift(shift, partLons); - offset = i; - } + double shift = 0; + int i = 1; + while (i < lons.length) { + // Check where the line is going east (+1), west (-1) or directly north/south (0) + int direction = Double.compare(lons[i], lons[i - 1]); + double newShift = calculateShift(lons[i - 1], direction < 0); + // first point lon + shift is always between -180.0 and +180.0 + if (i - offset > 1 && newShift != shift) { + // Jumping over anti-meridian - we need to start a new segment + double[] partLons = Arrays.copyOfRange(lons, offset, i); + double[] partLats = Arrays.copyOfRange(lats, offset, i); + performShift(shift, partLons); + shift = newShift; + offset = i - 1; parts.add(new Line(partLons, partLats)); + } else { + // Check if new point intersects with anti-meridian + shift = newShift; + double t = intersection(lons[i - 1] + shift, lons[i] + shift); + if (Double.isNaN(t) == false) { + // Found intersection, all previous segments are now part of the linestring + double[] partLons = Arrays.copyOfRange(lons, offset, i + 1); + double[] partLats = Arrays.copyOfRange(lats, offset, i + 1); + lons[i - 1] = partLons[partLons.length - 1] = (direction > 0 ? DATELINE : -DATELINE) - shift; + lats[i - 1] = partLats[partLats.length - 1] = lats[i - 1] + (lats[i] - lats[i - 1]) * t; + performShift(shift, partLons); + offset = i - 1; + parts.add(new Line(partLons, partLats)); + } else { + // Didn't find intersection - just continue checking + i++; + } } } if (offset == 0) { - shift(shift, lons); + performShift(shift, lons); parts.add(new Line(lons, lats)); } else if (offset < lons.length - 1) { double[] partLons = Arrays.copyOfRange(lons, offset, lons.length); double[] partLats = Arrays.copyOfRange(lats, offset, lats.length); - shift(shift, partLons); + performShift(shift, partLons); parts.add(new Line(partLons, partLats)); } return parts; } /** - * shifts all coordinates by (- shift * 2) + * Checks it the segment from p1x to p2x intersects with anti-meridian + * p1x must be with in -180 +180 range */ - private static void shift(double shift, double[] lons) { + private static double intersection(double p1x, double p2x) { + if (p1x == p2x) { + return Double.NaN; + } + final double t = ((p1x < p2x ? DATELINE : -DATELINE) - p1x) / (p2x - p1x); + if (t >= 1 || t <= 0) { + return Double.NaN; + } else { + return t; + } + } + + /** + * shifts all coordinates by shift + */ + private static void performShift(double shift, double[] lons) { if (shift != 0) { for (int j = 0; j < lons.length; j++) { - lons[j] = lons[j] - 2 * shift; + lons[j] = lons[j] + shift; } } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java index 53e34251f4a..59ac32c6661 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java @@ -241,10 +241,10 @@ public class BulkByScrollResponse extends ActionResponse implements ToXContentFr } else if (token == Token.START_OBJECT) { switch (name) { case SearchFailure.REASON_FIELD: - bulkExc = ElasticsearchException.fromXContent(parser); + searchExc = ElasticsearchException.fromXContent(parser); break; case Failure.CAUSE_FIELD: - searchExc = ElasticsearchException.fromXContent(parser); + bulkExc = ElasticsearchException.fromXContent(parser); break; default: parser.skipChildren(); @@ -285,7 +285,11 @@ public class BulkByScrollResponse extends ActionResponse implements ToXContentFr if (bulkExc != null) { return new Failure(index, type, id, bulkExc, RestStatus.fromCode(status)); } else if (searchExc != null) { - return new SearchFailure(searchExc, index, shardId, nodeId); + if (status == null) { + return new SearchFailure(searchExc, index, shardId, nodeId); + } else { + return new SearchFailure(searchExc, index, shardId, nodeId, RestStatus.fromCode(status)); + } } else { throw new ElasticsearchParseException("failed to parse failures array. At least one of {reason,cause} must be present"); } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java index 269bed2ddc8..07d22ddb663 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -21,8 +21,10 @@ package org.elasticsearch.index.reindex; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -35,6 +37,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; @@ -356,6 +359,7 @@ public abstract class ScrollableHitSource { */ public static class SearchFailure implements Writeable, ToXContentObject { private final Throwable reason; + private final RestStatus status; @Nullable private final String index; @Nullable @@ -367,12 +371,19 @@ public abstract class ScrollableHitSource { public static final String SHARD_FIELD = "shard"; public static final String NODE_FIELD = "node"; public static final String REASON_FIELD = "reason"; + public static final String STATUS_FIELD = BulkItemResponse.Failure.STATUS_FIELD; public SearchFailure(Throwable reason, @Nullable String index, @Nullable Integer shardId, @Nullable String nodeId) { + this(reason, index, shardId, nodeId, ExceptionsHelper.status(reason)); + } + + public SearchFailure(Throwable reason, @Nullable String index, @Nullable Integer shardId, @Nullable String nodeId, + RestStatus status) { this.index = index; this.shardId = shardId; this.reason = requireNonNull(reason, "reason cannot be null"); this.nodeId = nodeId; + this.status = status; } /** @@ -390,6 +401,7 @@ public abstract class ScrollableHitSource { index = in.readOptionalString(); shardId = in.readOptionalVInt(); nodeId = in.readOptionalString(); + status = ExceptionsHelper.status(reason); } @Override @@ -408,6 +420,10 @@ public abstract class ScrollableHitSource { return shardId; } + public RestStatus getStatus() { + return this.status; + } + public Throwable getReason() { return reason; } @@ -429,6 +445,7 @@ public abstract class ScrollableHitSource { if (nodeId != null) { builder.field(NODE_FIELD, nodeId); } + builder.field(STATUS_FIELD, status.getStatus()); builder.field(REASON_FIELD); { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index e5cfade7bc9..08266f2bc6f 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1678,6 +1678,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * required generation */ public void trimUnreferencedReaders() throws IOException { + // move most of the data to disk to reduce the time the lock is held + sync(); try (ReleasableLock ignored = writeLock.acquire()) { if (closed.get()) { // we're shutdown potentially on some tragic event, don't delete anything @@ -1705,6 +1707,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC // We now update the checkpoint to ignore the file we are going to remove. // Note that there is a provision in recoverFromFiles to allow for the case where we synced the checkpoint // but crashed before we could delete the file. + // sync at once to make sure that there's at most one unreferenced generation. current.sync(); deleteReaderFiles(reader); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 0c976cabac9..439565ced36 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -409,10 +409,7 @@ public class RecoverySourceHandler { // TODO: We shouldn't use the generic thread pool here as we already execute this from the generic pool. // While practically unlikely at a min pool size of 128 we could technically block the whole pool by waiting on futures // below and thus make it impossible for the store release to execute which in turn would block the futures forever - threadPool.generic().execute(ActionRunnable.wrap(future, l -> { - store.decRef(); - l.onResponse(null); - })); + threadPool.generic().execute(ActionRunnable.run(future, store::decRef)); FutureUtils.get(future); }); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 4f83bdc0192..b81e7f1ea33 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -490,14 +490,13 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp }, listener::onFailure), 2); final Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); - executor.execute(ActionRunnable.wrap(groupedListener, l -> { + executor.execute(ActionRunnable.supply(groupedListener, () -> { List deletedBlobs = cleanupStaleRootFiles(staleRootBlobs(newRepoData, rootBlobs.keySet())); - l.onResponse( - new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> rootBlobs.get(name).length()).sum())); + return new DeleteResult(deletedBlobs.size(), deletedBlobs.stream().mapToLong(name -> rootBlobs.get(name).length()).sum()); })); final Set survivingIndexIds = newRepoData.getIndices().values().stream().map(IndexId::getId).collect(Collectors.toSet()); - executor.execute(ActionRunnable.wrap(groupedListener, l -> l.onResponse(cleanupStaleIndices(foundIndices, survivingIndexIds)))); + executor.execute(ActionRunnable.supply(groupedListener, () -> cleanupStaleIndices(foundIndices, survivingIndexIds))); } /** @@ -712,26 +711,22 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp // that decrements the generation it points at // Write Global MetaData - executor.execute(ActionRunnable.wrap(allMetaListener, l -> { - globalMetaDataFormat.write(clusterMetaData, blobContainer(), snapshotId.getUUID(), false); - l.onResponse(null); - })); + executor.execute(ActionRunnable.run(allMetaListener, + () -> globalMetaDataFormat.write(clusterMetaData, blobContainer(), snapshotId.getUUID(), false))); // write the index metadata for each index in the snapshot for (IndexId index : indices) { - executor.execute(ActionRunnable.wrap(allMetaListener, l -> { - indexMetaDataFormat.write(clusterMetaData.index(index.getName()), indexContainer(index), snapshotId.getUUID(), false); - l.onResponse(null); - })); + executor.execute(ActionRunnable.run(allMetaListener, () -> + indexMetaDataFormat.write(clusterMetaData.index(index.getName()), indexContainer(index), snapshotId.getUUID(), false))); } - executor.execute(ActionRunnable.wrap(allMetaListener, afterMetaListener -> { + executor.execute(ActionRunnable.supply(allMetaListener, () -> { final SnapshotInfo snapshotInfo = new SnapshotInfo(snapshotId, indices.stream().map(IndexId::getName).collect(Collectors.toList()), startTime, failure, threadPool.absoluteTimeInMillis(), totalShards, shardFailures, includeGlobalState, userMetadata); snapshotFormat.write(snapshotInfo, blobContainer(), snapshotId.getUUID(), false); - afterMetaListener.onResponse(snapshotInfo); + return snapshotInfo; })); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index f3daa34eb4f..0b352028a75 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -134,7 +134,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv /** * Enables low-level, frequent search cancellation checks. Enabling low-level checks will make long running searches to react - * to the cancellation request faster. It will produce more cancellation checks but benchmarking has shown these did not + * to the cancellation request faster. It will produce more cancellation checks but benchmarking has shown these did not * noticeably slow down searches. */ public static final Setting LOW_LEVEL_CANCELLATION_SETTING = @@ -341,7 +341,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } private void runAsync(long id, Supplier executable, ActionListener listener) { - getExecutor(id).execute(ActionRunnable.wrap(listener, l -> l.onResponse(executable.get()))); + getExecutor(id).execute(ActionRunnable.supply(listener, executable::get)); } private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchTask task) throws Exception { @@ -1053,7 +1053,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv Executor executor = getExecutor(shard); ActionListener actionListener = ActionListener.wrap(r -> // now we need to check if there is a pending refresh and register - shard.awaitShardSearchActive(b -> executor.execute(ActionRunnable.wrap(listener, l -> l.onResponse(request)))), + shard.awaitShardSearchActive(b -> executor.execute(ActionRunnable.supply(listener, () -> request))), listener::onFailure); // we also do rewrite on the coordinating node (TransportSearchService) but we also need to do it here for BWC as well as // AliasFilters that might need to be rewritten. These are edge-cases but we are every efficient doing the rewrite here so it's not diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java index 2e177e59e06..85d66ec30a0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java @@ -45,7 +45,7 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { - if (context.innerHits().isEmpty()) { + if ((context.innerHits() != null && context.innerHits().size() > 0) == false) { return; } @@ -72,6 +72,10 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase { } innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); innerHits.setUid(new Uid(hit.getType(), hit.getId())); + innerHits.lookup().source().setSource(context.lookup().source().internalSourceRef()); + if (context.lookup().source().source() != null) { + innerHits.lookup().source().setSource(context.lookup().source().source()); + } fetchPhase.execute(innerHits); FetchSearchResult fetchResult = innerHits.fetchResult(); SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index d9cc0d0be06..f011293c68d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -37,11 +37,11 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions; -import org.elasticsearch.search.internal.SearchContext; import java.text.BreakIterator; import java.util.Collections; @@ -69,7 +69,7 @@ public class FastVectorHighlighter implements Highlighter { @Override public HighlightField highlight(HighlighterContext highlighterContext) { SearchContextHighlight.Field field = highlighterContext.field; - SearchContext context = highlighterContext.context; + QueryShardContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; MappedFieldType fieldType = highlighterContext.fieldType; @@ -93,7 +93,7 @@ public class FastVectorHighlighter implements Highlighter { BaseFragmentsBuilder fragmentsBuilder; final BoundaryScanner boundaryScanner = getBoundaryScanner(field); - boolean forceSource = context.highlight().forceSource(field); + boolean forceSource = highlighterContext.highlight.forceSource(field); if (field.fieldOptions().numberOfFragments() == 0) { fragListBuilder = new SingleFragListBuilder(); @@ -203,7 +203,7 @@ public class FastVectorHighlighter implements Highlighter { return null; } catch (Exception e) { - throw new FetchPhaseExecutionException(context.shardTarget(), + throw new FetchPhaseExecutionException(highlighterContext.shardTarget, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index b4cbd031167..12d251a954a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -25,6 +25,8 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -45,17 +47,25 @@ public class HighlightPhase implements FetchSubPhase { if (context.highlight() == null) { return; } + hitExecute(context.shardTarget(), context.getQueryShardContext(), context.parsedQuery().query(), context.highlight(), hitContext); + } + + public void hitExecute(SearchShardTarget shardTarget, + QueryShardContext context, + Query query, + SearchContextHighlight highlight, + HitContext hitContext) { Map highlightFields = new HashMap<>(); - for (SearchContextHighlight.Field field : context.highlight().fields()) { + for (SearchContextHighlight.Field field : highlight.fields()) { Collection fieldNamesToHighlight; if (Regex.isSimpleMatchPattern(field.field())) { - fieldNamesToHighlight = context.mapperService().simpleMatchToFullName(field.field()); + fieldNamesToHighlight = context.getMapperService().simpleMatchToFullName(field.field()); } else { fieldNamesToHighlight = Collections.singletonList(field.field()); } - if (context.highlight().forceSource(field)) { - SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().getType()).sourceMapper(); + if (highlight.forceSource(field)) { + SourceFieldMapper sourceFieldMapper = context.getMapperService().documentMapper(hitContext.hit().getType()).sourceMapper(); if (!sourceFieldMapper.enabled()) { throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().getType() + "] has disabled _source"); @@ -64,7 +74,7 @@ public class HighlightPhase implements FetchSubPhase { boolean fieldNameContainsWildcards = field.field().contains("*"); for (String fieldName : fieldNamesToHighlight) { - MappedFieldType fieldType = context.mapperService().fullName(fieldName); + MappedFieldType fieldType = context.getMapperService().fullName(fieldName); if (fieldType == null) { continue; } @@ -90,15 +100,15 @@ public class HighlightPhase implements FetchSubPhase { Highlighter highlighter = highlighters.get(highlighterType); if (highlighter == null) { throw new IllegalArgumentException("unknown highlighter type [" + highlighterType - + "] for the field [" + fieldName + "]"); + + "] for the field [" + fieldName + "]"); } Query highlightQuery = field.fieldOptions().highlightQuery(); if (highlightQuery == null) { - highlightQuery = context.parsedQuery().query(); + highlightQuery = query; } HighlighterContext highlighterContext = new HighlighterContext(fieldType.name(), - field, fieldType, context, hitContext, highlightQuery); + field, fieldType, shardTarget, context, highlight, hitContext, highlightQuery); if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) { // if several fieldnames matched the wildcard then we want to skip those that we cannot highlight diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java index e9ec29cef4a..3651c21e439 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -23,8 +23,8 @@ import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -46,14 +46,13 @@ public final class HighlightUtils { /** * Load field values for highlighting. */ - public static List loadFieldValues(SearchContextHighlight.Field field, - MappedFieldType fieldType, - SearchContext searchContext, - FetchSubPhase.HitContext hitContext) throws IOException { + public static List loadFieldValues(MappedFieldType fieldType, + QueryShardContext context, + FetchSubPhase.HitContext hitContext, + boolean forceSource) throws IOException { //percolator needs to always load from source, thus it sets the global force source to true - boolean forceSource = searchContext.highlight().forceSource(field); List textsToHighlight; - if (!forceSource && fieldType.stored()) { + if (forceSource == false && fieldType.stored()) { CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); textsToHighlight = fieldVisitor.fields().get(fieldType.name()); @@ -62,7 +61,7 @@ public final class HighlightUtils { textsToHighlight = Collections.emptyList(); } } else { - SourceLookup sourceLookup = searchContext.lookup().source(); + SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); textsToHighlight = sourceLookup.extractRawValues(fieldType.name()); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java index 3efa19539d1..aedb6b3999c 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java @@ -20,28 +20,35 @@ package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.internal.SearchContext; public class HighlighterContext { public final String fieldName; public final SearchContextHighlight.Field field; public final MappedFieldType fieldType; - public final SearchContext context; + public final SearchShardTarget shardTarget; + public final QueryShardContext context; + public final SearchContextHighlight highlight; public final FetchSubPhase.HitContext hitContext; public final Query query; public HighlighterContext(String fieldName, SearchContextHighlight.Field field, MappedFieldType fieldType, - SearchContext context, + SearchShardTarget shardTarget, + QueryShardContext context, + SearchContextHighlight highlight, FetchSubPhase.HitContext hitContext, Query query) { this.fieldName = fieldName; this.field = field; this.fieldType = fieldType; + this.shardTarget = shardTarget; this.context = context; + this.highlight = highlight; this.hitContext = hitContext; this.query = query; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 09ec505ca1b..62e58750f09 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -37,9 +37,9 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; @@ -56,7 +56,7 @@ public class PlainHighlighter implements Highlighter { @Override public HighlightField highlight(HighlighterContext highlighterContext) { SearchContextHighlight.Field field = highlighterContext.field; - SearchContext context = highlighterContext.context; + QueryShardContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; MappedFieldType fieldType = highlighterContext.fieldType; @@ -101,18 +101,19 @@ public class PlainHighlighter implements Highlighter { int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); - final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset(); + Analyzer analyzer = context.getMapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); + final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset(); try { - textsToHighlight = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext); + textsToHighlight = HighlightUtils.loadFieldValues(fieldType, context, hitContext, + highlighterContext.highlight.forceSource(field)); for (Object textToHighlight : textsToHighlight) { String text = convertFieldValue(fieldType, textToHighlight); if (text.length() > maxAnalyzedOffset) { throw new IllegalArgumentException( "The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() + - "] doc of [" + context.indexShard().shardId().getIndexName() + "] index " + + "] doc of [" + context.index().getName() + "] index " + "has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " + "This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() + "] index level setting. " + "For large texts, indexing with offsets or term vectors, and highlighting " + @@ -139,7 +140,7 @@ public class PlainHighlighter implements Highlighter { // the plain highlighter will parse the source and try to analyze it. return null; } else { - throw new FetchPhaseExecutionException(context.shardTarget(), + throw new FetchPhaseExecutionException(highlighterContext.shardTarget, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } @@ -179,7 +180,7 @@ public class PlainHighlighter implements Highlighter { try { end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents); } catch (Exception e) { - throw new FetchPhaseExecutionException(context.shardTarget(), + throw new FetchPhaseExecutionException(highlighterContext.shardTarget, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } if (end > 0) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java index 4ce0c6a47a3..78a15659c3c 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java @@ -27,7 +27,7 @@ import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo; import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -37,22 +37,22 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder private final MappedFieldType fieldType; - private final SearchContext searchContext; + private final QueryShardContext context; public SourceScoreOrderFragmentsBuilder(MappedFieldType fieldType, - SearchContext searchContext, + QueryShardContext context, String[] preTags, String[] postTags, BoundaryScanner boundaryScanner) { super(preTags, postTags, boundaryScanner); this.fieldType = fieldType; - this.searchContext = searchContext; + this.context = context; } @Override protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException { // we know its low level reader, and matching docId, since that's how we call the highlighter with - SourceLookup sourceLookup = searchContext.lookup().source(); + SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); List values = sourceLookup.extractRawValues(fieldType.name()); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java index a7eb65de3b6..8d61fb431cf 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java @@ -24,7 +24,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -32,15 +32,15 @@ import java.util.List; public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { - private final SearchContext searchContext; + private final QueryShardContext context; public SourceSimpleFragmentsBuilder(MappedFieldType fieldType, - SearchContext searchContext, + QueryShardContext context, String[] preTags, String[] postTags, BoundaryScanner boundaryScanner) { super(fieldType, preTags, postTags, boundaryScanner); - this.searchContext = searchContext; + this.context = context; } public static final Field[] EMPTY_FIELDS = new Field[0]; @@ -48,7 +48,7 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { @Override protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException { // we know its low level reader, and matching docId, since that's how we call the highlighter with - SourceLookup sourceLookup = searchContext.lookup().source(); + SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); List values = sourceLookup.extractRawValues(fieldType.name()); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 67db3f287b3..35064def777 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -37,10 +37,10 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.text.BreakIterator; @@ -61,18 +61,19 @@ public class UnifiedHighlighter implements Highlighter { public HighlightField highlight(HighlighterContext highlighterContext) { MappedFieldType fieldType = highlighterContext.fieldType; SearchContextHighlight.Field field = highlighterContext.field; - SearchContext context = highlighterContext.context; + QueryShardContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; - final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset(); + final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset(); List snippets = new ArrayList<>(); int numberOfFragments; try { - final Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), + final Analyzer analyzer = getAnalyzer(context.getMapperService().documentMapper(hitContext.hit().getType()), hitContext); - List fieldValues = loadFieldValues(fieldType, field, context, hitContext); + List fieldValues = loadFieldValues(fieldType, field, context, hitContext, + highlighterContext.highlight.forceSource(field)); if (fieldValues.size() == 0) { return null; } @@ -84,7 +85,7 @@ public class UnifiedHighlighter implements Highlighter { if ((offsetSource == OffsetSource.ANALYSIS) && (fieldValue.length() > maxAnalyzedOffset)) { throw new IllegalArgumentException( "The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() + - "] doc of [" + context.indexShard().shardId().getIndexName() + "] index " + "has exceeded [" + + "] doc of [" + context.index().getName() + "] index " + "has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " + "This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() + "] index level setting. " + "For large texts, indexing with offsets or term vectors is recommended!"); @@ -123,7 +124,7 @@ public class UnifiedHighlighter implements Highlighter { } } } catch (IOException e) { - throw new FetchPhaseExecutionException(context.shardTarget(), + throw new FetchPhaseExecutionException(highlighterContext.shardTarget, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } @@ -153,10 +154,13 @@ public class UnifiedHighlighter implements Highlighter { protected Analyzer getAnalyzer(DocumentMapper docMapper, HitContext hitContext) { return docMapper.mappers().indexAnalyzer(); } - - protected List loadFieldValues(MappedFieldType fieldType, SearchContextHighlight.Field field, SearchContext context, - FetchSubPhase.HitContext hitContext) throws IOException { - List fieldValues = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext); + + protected List loadFieldValues(MappedFieldType fieldType, + SearchContextHighlight.Field field, + QueryShardContext context, + FetchSubPhase.HitContext hitContext, + boolean forceSource) throws IOException { + List fieldValues = HighlightUtils.loadFieldValues(fieldType, context, hitContext, forceSource); fieldValues = fieldValues.stream() .map((s) -> convertFieldValue(fieldType, s)) .collect(Collectors.toList()); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index 73e4c252b2c..884e8dee735 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; +import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; @@ -381,4 +382,9 @@ public class SubSearchContext extends FilteredSearchContext { public void innerHits(Map innerHits) { this.innerHits = innerHits; } + + @Override + public SearchLookup lookup() { + return queryShardContext.lookup(); + } } diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 165beeb510e..2583e1a76c4 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -163,7 +163,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { new ArraySearchPhaseResults<>(10), null, false, new AtomicLong()); String scrollId = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); - SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, scrollId); + SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, scrollId, action.buildShardFailures()); assertEquals(scrollId, searchResponse.getScrollId()); assertSame(searchResponse.getAggregations(), internalSearchResponse.aggregations()); assertSame(searchResponse.getSuggest(), internalSearchResponse.suggest()); @@ -179,7 +179,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { new IllegalArgumentException()); String scrollId = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); - SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, scrollId); + SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, scrollId, action.buildShardFailures()); assertEquals(scrollId, searchResponse.getScrollId()); assertSame(searchResponse.getAggregations(), internalSearchResponse.aggregations()); assertSame(searchResponse.getSuggest(), internalSearchResponse.suggest()); @@ -187,7 +187,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { assertSame(searchResponse.getHits(), internalSearchResponse.hits()); } - public void testBuildSearchResponseDisallowPartialFailures() { + public void testSendSearchResponseDisallowPartialFailures() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false); AtomicReference exception = new AtomicReference<>(); ActionListener listener = ActionListener.wrap(response -> fail("onResponse should not be called"), exception::set); @@ -203,7 +203,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { action.onShardFailure(i, new SearchShardTarget(failureNodeId, failureShardId, failureClusterAlias, OriginalIndices.NONE), new IllegalArgumentException()); } - action.buildSearchResponse(InternalSearchResponse.empty(), randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10)); + action.sendSearchResponse(InternalSearchResponse.empty(), randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10)); assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class)); SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException)exception.get(); assertEquals(0, searchPhaseExecutionException.getSuppressed().length); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 994dffc0315..fc40c120735 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -148,7 +148,7 @@ public class SearchAsyncActionTests extends ESTestCase { asyncAction.start(); latch.await(); assertTrue(searchPhaseDidRun.get()); - SearchResponse searchResponse = asyncAction.buildSearchResponse(null, null); + SearchResponse searchResponse = asyncAction.buildSearchResponse(null, null, asyncAction.buildShardFailures()); assertEquals(shardsIter.size() - numSkipped, numRequests.get()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(numSkipped, searchResponse.getSkippedShards()); diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java index 47b02ea6981..24b135be5c6 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.geo; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; @@ -32,7 +33,6 @@ import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.MultiPolygon; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Polygon; -import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.mapper.GeoShapeIndexer; import org.elasticsearch.test.ESTestCase; @@ -41,12 +41,11 @@ import java.text.ParseException; import java.util.Arrays; import java.util.Collections; +import static org.hamcrest.Matchers.instanceOf; + public class GeometryIndexerTests extends ESTestCase { GeoShapeIndexer indexer = new GeoShapeIndexer(true, "test"); - private static final WellKnownText WKT = new WellKnownText(true, geometry -> { - }); - public void testCircle() { UnsupportedOperationException ex = @@ -105,10 +104,96 @@ public class GeometryIndexerTests extends ESTestCase { new Line(new double[]{160, 180}, new double[]{0, 5}), new Line(new double[]{-180, -160, -180}, new double[]{5, 10, 15}), new Line(new double[]{180, 160}, new double[]{15, 20}) - ) - ); + )); assertEquals(indexed, indexer.prepareForIndexing(line)); + + line = new Line(new double[]{0, 720}, new double[]{0, 20}); + indexed = new MultiLine(Arrays.asList( + new Line(new double[]{0, 180}, new double[]{0, 5}), + new Line(new double[]{-180, 180}, new double[]{5, 15}), + new Line(new double[]{-180, 0}, new double[]{15, 20}) + )); + + assertEquals(indexed, indexer.prepareForIndexing(line)); + + line = new Line(new double[]{160, 180, 180, 200, 160, 140}, new double[]{0, 10, 20, 30, 30, 40}); + indexed = new MultiLine(Arrays.asList( + new Line(new double[]{160, 180}, new double[]{0, 10}), + new Line(new double[]{-180, -180, -160, -180}, new double[]{10, 20, 30, 30}), + new Line(new double[]{180, 160, 140}, new double[]{30, 30, 40}) + )); + + assertEquals(indexed, indexer.prepareForIndexing(line)); + + line = new Line(new double[]{-70, 180, 900}, new double[]{0, 0, 4}); + + indexed = new MultiLine(Arrays.asList( + new Line(new double[]{-70, 180}, new double[]{0, 0}), + new Line(new double[]{-180, 180}, new double[]{0, 2}), + new Line(new double[]{-180, 180}, new double[]{2, 4}) + )); + + assertEquals(indexed, indexer.prepareForIndexing(line)); + + line = new Line(new double[]{160, 200, 160, 200, 160, 200}, new double[]{0, 10, 20, 30, 40, 50}); + + indexed = new MultiLine(Arrays.asList( + new Line(new double[]{160, 180}, new double[]{0, 5}), + new Line(new double[]{-180, -160, -180}, new double[]{5, 10, 15}), + new Line(new double[]{180, 160, 180}, new double[]{15, 20, 25}), + new Line(new double[]{-180, -160, -180}, new double[]{25, 30, 35}), + new Line(new double[]{180, 160, 180}, new double[]{35, 40, 45}), + new Line(new double[]{-180, -160}, new double[]{45, 50}) + )); + + assertEquals(indexed, indexer.prepareForIndexing(line)); + } + + /** + * Returns a sum of Euclidean distances between points in the linestring. + */ + public double length(Line line) { + double distance = 0; + for (int i = 1; i < line.length(); i++) { + distance += Math.sqrt((line.getLat(i) - line.getLat(i - 1)) * (line.getLat(i) - line.getLat(i - 1)) + + (line.getLon(i) - line.getLon(i - 1)) * (line.getLon(i) - line.getLon(i - 1))); + } + return distance; + } + + /** + * A simple tests that generates a random lines crossing anti-merdian and checks that the decomposed segments of this line + * have the same total length (measured using Euclidean distances between neighboring points) as the original line. + */ + public void testRandomLine() { + int size = randomIntBetween(2, 20); + int shift = randomIntBetween(-2, 2); + double[] originalLats = new double[size]; + double[] originalLons = new double[size]; + + for (int i = 0; i < size; i++) { + originalLats[i] = GeometryTestUtils.randomLat(); + originalLons[i] = GeometryTestUtils.randomLon() + shift * 360; + if (randomInt(3) == 0) { + shift += randomFrom(-2, -1, 1, 2); + } + } + Line original = new Line(originalLons, originalLats); + + Geometry decomposed = indexer.prepareForIndexing(original); + double decomposedLength = 0; + if (decomposed instanceof Line) { + decomposedLength = length((Line) decomposed); + } else { + assertThat(decomposed, instanceOf(MultiLine.class)); + MultiLine lines = (MultiLine) decomposed; + for (int i = 0; i < lines.size(); i++) { + decomposedLength += length(lines.get(i)); + } + } + + assertEquals("Different Lengths between " + original + " and " + decomposed, length(original), decomposedLength, 0.001); } public void testMultiLine() { @@ -137,6 +222,15 @@ public class GeometryIndexerTests extends ESTestCase { point = new Point(2, 1, 3); assertEquals(indexed, indexer.prepareForIndexing(point)); + + point = new Point(362, 1); + assertEquals(indexed, indexer.prepareForIndexing(point)); + + point = new Point(-178, 179); + assertEquals(indexed, indexer.prepareForIndexing(point)); + + point = new Point(180, 180); + assertEquals(new Point(0, 0), indexer.prepareForIndexing(point)); } public void testMultiPoint() { @@ -254,5 +348,4 @@ public class GeometryIndexerTests extends ESTestCase { return geometryParser.parse(parser); } } - } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java index 7822244b9ce..8791c11bf27 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java @@ -20,14 +20,16 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; +import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.index.reindex.BulkByScrollTask.Status; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; import java.util.HashMap; @@ -38,12 +40,12 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.apache.lucene.util.TestUtil.randomSimpleString; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; -import static org.hamcrest.Matchers.containsString; public class BulkByScrollResponseTests extends AbstractXContentTestCase { private boolean includeUpdated; private boolean includeCreated; + private boolean testExceptions = randomBoolean(); public void testRountTrip() throws IOException { BulkByScrollResponse response = new BulkByScrollResponse(timeValueMillis(randomNonNegativeLong()), @@ -76,7 +78,9 @@ public class BulkByScrollResponseTests extends AbstractXContentTestCase future = PlainActionFuture.newFuture(); - repo.threadPool().generic().execute(new ActionRunnable(future) { - @Override - protected void doRun() throws Exception { - repo.blobStore().blobContainer(path).delete(); - future.onResponse(null); - } - }); + repo.threadPool().generic().execute(ActionRunnable.run(future, () -> repo.blobStore().blobContainer(path).delete())); future.actionGet(); final BlobPath parent = path.parent(); if (parent == null) { @@ -146,19 +140,15 @@ public abstract class AbstractThirdPartyRepositoryTestCase extends ESSingleNodeT final PlainActionFuture future = PlainActionFuture.newFuture(); final Executor genericExec = repo.threadPool().generic(); final int testBlobLen = randomIntBetween(1, 100); - genericExec.execute(new ActionRunnable(future) { - @Override - protected void doRun() throws Exception { - final BlobStore blobStore = repo.blobStore(); - blobStore.blobContainer(repo.basePath().add("foo")) - .writeBlob("nested-blob", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); - blobStore.blobContainer(repo.basePath().add("foo").add("nested")) - .writeBlob("bar", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); - blobStore.blobContainer(repo.basePath().add("foo").add("nested2")) - .writeBlob("blub", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); - future.onResponse(null); - } - }); + genericExec.execute(ActionRunnable.run(future, () -> { + final BlobStore blobStore = repo.blobStore(); + blobStore.blobContainer(repo.basePath().add("foo")) + .writeBlob("nested-blob", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); + blobStore.blobContainer(repo.basePath().add("foo").add("nested")) + .writeBlob("bar", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); + blobStore.blobContainer(repo.basePath().add("foo").add("nested2")) + .writeBlob("blub", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); + })); future.actionGet(); assertChildren(repo.basePath(), Collections.singleton("foo")); assertBlobsByPrefix(repo.basePath(), "fo", Collections.emptyMap()); @@ -243,37 +233,27 @@ public abstract class AbstractThirdPartyRepositoryTestCase extends ESSingleNodeT private void createDanglingIndex(final BlobStoreRepository repo, final Executor genericExec) throws Exception { final PlainActionFuture future = PlainActionFuture.newFuture(); - genericExec.execute(new ActionRunnable(future) { - @Override - protected void doRun() throws Exception { - final BlobStore blobStore = repo.blobStore(); - blobStore.blobContainer(repo.basePath().add("indices").add("foo")) - .writeBlob("bar", new ByteArrayInputStream(new byte[3]), 3, false); - for (String prefix : Arrays.asList("snap-", "meta-")) { - blobStore.blobContainer(repo.basePath()) - .writeBlob(prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); - } - future.onResponse(null); + genericExec.execute(ActionRunnable.run(future, () -> { + final BlobStore blobStore = repo.blobStore(); + blobStore.blobContainer(repo.basePath().add("indices").add("foo")) + .writeBlob("bar", new ByteArrayInputStream(new byte[3]), 3, false); + for (String prefix : Arrays.asList("snap-", "meta-")) { + blobStore.blobContainer(repo.basePath()).writeBlob(prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); } - }); + })); future.actionGet(); assertTrue(assertCorruptionVisible(repo, genericExec)); } protected boolean assertCorruptionVisible(BlobStoreRepository repo, Executor executor) throws Exception { final PlainActionFuture future = PlainActionFuture.newFuture(); - executor.execute(new ActionRunnable(future) { - @Override - protected void doRun() throws Exception { - final BlobStore blobStore = repo.blobStore(); - future.onResponse( - blobStore.blobContainer(repo.basePath().add("indices")).children().containsKey("foo") - && BlobStoreTestUtil.blobExists(blobStore.blobContainer(repo.basePath().add("indices").add("foo")), "bar") - && BlobStoreTestUtil.blobExists(blobStore.blobContainer(repo.basePath()), "meta-foo.dat") - && BlobStoreTestUtil.blobExists(blobStore.blobContainer(repo.basePath()), "snap-foo.dat") - ); - } - }); + executor.execute(ActionRunnable.supply(future, () -> { + final BlobStore blobStore = repo.blobStore(); + return blobStore.blobContainer(repo.basePath().add("indices")).children().containsKey("foo") + && BlobStoreTestUtil.blobExists(blobStore.blobContainer(repo.basePath().add("indices").add("foo")), "bar") + && BlobStoreTestUtil.blobExists(blobStore.blobContainer(repo.basePath()), "meta-foo.dat") + && BlobStoreTestUtil.blobExists(blobStore.blobContainer(repo.basePath()), "snap-foo.dat"); + })); return future.actionGet(); } @@ -298,13 +278,8 @@ public abstract class AbstractThirdPartyRepositoryTestCase extends ESSingleNodeT private Set listChildren(BlobPath path) { final PlainActionFuture> future = PlainActionFuture.newFuture(); final BlobStoreRepository repository = getRepository(); - repository.threadPool().generic().execute(new ActionRunnable>(future) { - @Override - protected void doRun() throws Exception { - final BlobStore blobStore = repository.blobStore(); - future.onResponse(blobStore.blobContainer(path).children().keySet()); - } - }); + repository.threadPool().generic().execute( + ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).children().keySet())); return future.actionGet(); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index bbfaa0d9228..0438d940bbd 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -89,28 +89,24 @@ public final class BlobStoreTestUtil { */ public static void assertConsistency(BlobStoreRepository repository, Executor executor) { final PlainActionFuture listener = PlainActionFuture.newFuture(); - executor.execute(new ActionRunnable(listener) { - @Override - protected void doRun() throws Exception { - final BlobContainer blobContainer = repository.blobContainer(); - final long latestGen; - try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob("index.latest"))) { - latestGen = inputStream.readLong(); - } catch (NoSuchFileException e) { - throw new AssertionError("Could not find index.latest blob for repo [" + repository + "]"); - } - assertIndexGenerations(blobContainer, latestGen); - final RepositoryData repositoryData; - try (InputStream blob = blobContainer.readBlob("index-" + latestGen); - XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, blob)) { - repositoryData = RepositoryData.snapshotsFromXContent(parser, latestGen); - } - assertIndexUUIDs(blobContainer, repositoryData); - assertSnapshotUUIDs(repository, repositoryData); - listener.onResponse(null); + executor.execute(ActionRunnable.run(listener, () -> { + final BlobContainer blobContainer = repository.blobContainer(); + final long latestGen; + try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob("index.latest"))) { + latestGen = inputStream.readLong(); + } catch (NoSuchFileException e) { + throw new AssertionError("Could not find index.latest blob for repo [" + repository + "]"); } - }); + assertIndexGenerations(blobContainer, latestGen); + final RepositoryData repositoryData; + try (InputStream blob = blobContainer.readBlob("index-" + latestGen); + XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, blob)) { + repositoryData = RepositoryData.snapshotsFromXContent(parser, latestGen); + } + assertIndexUUIDs(blobContainer, repositoryData); + assertSnapshotUUIDs(repository, repositoryData); + })); listener.actionGet(TimeValue.timeValueMinutes(1L)); } @@ -186,60 +182,46 @@ public final class BlobStoreTestUtil { throws InterruptedException, ExecutionException { final PlainActionFuture future = PlainActionFuture.newFuture(); final AtomicLong totalSize = new AtomicLong(); - repository.threadPool().generic().execute(new ActionRunnable(future) { - @Override - protected void doRun() throws Exception { - final BlobStore blobStore = repository.blobStore(); - BlobContainer container = - blobStore.blobContainer(repository.basePath().add("indices").add(name)); - for (String file : files) { - int size = randomIntBetween(0, 10); - totalSize.addAndGet(size); - container.writeBlob(file, new ByteArrayInputStream(new byte[size]), size, false); - } - future.onResponse(null); + repository.threadPool().generic().execute(ActionRunnable.run(future, () -> { + final BlobStore blobStore = repository.blobStore(); + BlobContainer container = + blobStore.blobContainer(repository.basePath().add("indices").add(name)); + for (String file : files) { + int size = randomIntBetween(0, 10); + totalSize.addAndGet(size); + container.writeBlob(file, new ByteArrayInputStream(new byte[size]), size, false); } - }); + })); future.get(); return totalSize.get(); } public static void assertCorruptionVisible(BlobStoreRepository repository, Map> indexToFiles) { final PlainActionFuture future = PlainActionFuture.newFuture(); - repository.threadPool().generic().execute(new ActionRunnable(future) { - @Override - protected void doRun() throws Exception { - final BlobStore blobStore = repository.blobStore(); - for (String index : indexToFiles.keySet()) { - if (blobStore.blobContainer(repository.basePath().add("indices")) - .children().containsKey(index) == false) { - future.onResponse(false); - return; - } - for (String file : indexToFiles.get(index)) { - try (InputStream ignored = - blobStore.blobContainer(repository.basePath().add("indices").add(index)).readBlob(file)) { - } catch (NoSuchFileException e) { - future.onResponse(false); - return; - } + repository.threadPool().generic().execute(ActionRunnable.supply(future, () -> { + final BlobStore blobStore = repository.blobStore(); + for (String index : indexToFiles.keySet()) { + if (blobStore.blobContainer(repository.basePath().add("indices")) + .children().containsKey(index) == false) { + return false; + } + for (String file : indexToFiles.get(index)) { + try (InputStream ignored = + blobStore.blobContainer(repository.basePath().add("indices").add(index)).readBlob(file)) { + } catch (NoSuchFileException e) { + return false; } } - future.onResponse(true); } - }); + return true; + })); assertTrue(future.actionGet()); } public static void assertBlobsByPrefix(BlobStoreRepository repository, BlobPath path, String prefix, Map blobs) { final PlainActionFuture> future = PlainActionFuture.newFuture(); - repository.threadPool().generic().execute(new ActionRunnable>(future) { - @Override - protected void doRun() throws Exception { - final BlobStore blobStore = repository.blobStore(); - future.onResponse(blobStore.blobContainer(path).listBlobsByPrefix(prefix)); - } - }); + repository.threadPool().generic().execute( + ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).listBlobsByPrefix(prefix))); Map foundBlobs = future.actionGet(); if (blobs.isEmpty()) { assertThat(foundBlobs.keySet(), empty()); diff --git a/x-pack/docs/en/rest-api/security/get-builtin-privileges.asciidoc b/x-pack/docs/en/rest-api/security/get-builtin-privileges.asciidoc index f809c51ee6a..cedffe7a6f9 100644 --- a/x-pack/docs/en/rest-api/security/get-builtin-privileges.asciidoc +++ b/x-pack/docs/en/rest-api/security/get-builtin-privileges.asciidoc @@ -81,11 +81,13 @@ A successful call returns an object with "cluster" and "index" fields. "manage_security", "manage_slm", "manage_token", + "manage_transform", "manage_watcher", "monitor", "monitor_data_frame_transforms", "monitor_ml", "monitor_rollup", + "monitor_transform", "monitor_watcher", "none", "read_ccr", diff --git a/x-pack/docs/en/security/authorization/privileges.asciidoc b/x-pack/docs/en/security/authorization/privileges.asciidoc index 5da42e32072..6e3072f0470 100644 --- a/x-pack/docs/en/security/authorization/privileges.asciidoc +++ b/x-pack/docs/en/security/authorization/privileges.asciidoc @@ -153,8 +153,21 @@ action. + -- NOTE: This privilege does not restrict the index operation to the creation -of documents but instead restricts API use to the index API. The index API allows a user -to overwrite a previously indexed document. +of documents but instead restricts API use to the index API. The index API +allows a user to overwrite a previously indexed document. See the `create_doc` +privilege for an alternative. + +-- + +`create_doc`:: +Privilege to index documents. Also grants access to the update mapping action. +However, it does not enable a user to update existing documents. ++ +-- +NOTE: When indexing documents with an external `_id` either via the index API or +the bulk API, the request must use `op_type` as `create`. If `_id`s are +generated automatically, the authorization happens as if the `op_type` is set to +`create`. -- diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 13dc84b8582..cca60579ae6 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -596,6 +596,9 @@ public class AutoFollowCoordinator extends AbstractLifecycleComponent implements List followedIndexUUIDs) { List leaderIndicesToFollow = new ArrayList<>(); for (IndexMetaData leaderIndexMetaData : remoteClusterState.getMetaData()) { + if (leaderIndexMetaData.getState() != IndexMetaData.State.OPEN) { + continue; + } if (autoFollowPattern.match(leaderIndexMetaData.getIndex().getName())) { IndexRoutingTable indexRoutingTable = remoteClusterState.routingTable().index(leaderIndexMetaData.getIndex()); if (indexRoutingTable != null && diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 75606699b11..e3edf0489d7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -5,8 +5,10 @@ */ package org.elasticsearch.xpack.ccr.action; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -18,11 +20,13 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -44,10 +48,12 @@ import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; @@ -57,6 +63,7 @@ import static org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollo import static org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollower.recordLeaderIndexAsFollowFunction; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -416,6 +423,26 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(result.get(1).getName(), equalTo("index2")); } + public void testGetLeaderIndicesToFollowWithClosedIndices() { + final AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("*"), + null, null, null, null, null, null, null, null, null, null, null); + + // index is opened + ClusterState remoteState = ClusterStateCreationUtils.stateWithActivePrimary("test-index", true, randomIntBetween(1, 3), 0); + List result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, remoteState, Collections.emptyList()); + assertThat(result.size(), equalTo(1)); + assertThat(result, hasItem(remoteState.metaData().index("test-index").getIndex())); + + // index is closed + remoteState = ClusterState.builder(remoteState) + .metaData(MetaData.builder(remoteState.metaData()) + .put(IndexMetaData.builder(remoteState.metaData().index("test-index")).state(IndexMetaData.State.CLOSE).build(), true) + .build()) + .build(); + result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, remoteState, Collections.emptyList()); + assertThat(result.size(), equalTo(0)); + } + public void testRecordLeaderIndexAsFollowFunction() { AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(Collections.emptyMap(), Collections.singletonMap("pattern1", Collections.emptyList()), Collections.emptyMap()); @@ -763,7 +790,9 @@ public class AutoFollowCoordinatorTests extends ESTestCase { autoFollower.start(); assertThat(allResults.size(), equalTo(states.length)); for (int i = 0; i < states.length; i++) { - assertThat(allResults.get(i).autoFollowExecutionResults.containsKey(new Index("logs-" + i, "_na_")), is(true)); + final String indexName = "logs-" + i; + assertThat(allResults.get(i).autoFollowExecutionResults.keySet().stream() + .anyMatch(index -> index.getName().equals(indexName)), is(true)); } } @@ -1049,6 +1078,87 @@ public class AutoFollowCoordinatorTests extends ESTestCase { } } + public void testClosedIndicesAreNotAutoFollowed() { + final Client client = mock(Client.class); + when(client.getRemoteClusterClient(anyString())).thenReturn(client); + + final String pattern = "pattern1"; + final ClusterState localState = ClusterState.builder(new ClusterName("local")) + .metaData(MetaData.builder() + .putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(Collections.singletonMap(pattern, + new AutoFollowPattern("remote", Collections.singletonList("docs-*"), null, null, null, null, null, null, null, null, + null, null, null)), + Collections.singletonMap(pattern, Collections.emptyList()), + Collections.singletonMap(pattern, Collections.emptyMap())))) + .build(); + + ClusterState remoteState = null; + final int nbLeaderIndices = randomIntBetween(1, 15); + for (int i = 0; i < nbLeaderIndices; i++) { + String indexName = "docs-" + i; + if (remoteState == null) { + remoteState = createRemoteClusterState(indexName, true); + } else { + remoteState = createRemoteClusterState(remoteState, indexName); + } + if (randomBoolean()) { + // randomly close the index + remoteState = ClusterState.builder(remoteState.getClusterName()) + .routingTable(remoteState.routingTable()) + .metaData(MetaData.builder(remoteState.metaData()) + .put(IndexMetaData.builder(remoteState.metaData().index(indexName)).state(IndexMetaData.State.CLOSE).build(), true) + .build()) + .build(); + } + } + + final ClusterState finalRemoteState = remoteState; + final AtomicReference lastModifiedClusterState = new AtomicReference<>(localState); + final List results = new ArrayList<>(); + final Set followedIndices = ConcurrentCollections.newConcurrentSet(); + final AutoFollower autoFollower = + new AutoFollower("remote", results::addAll, localClusterStateSupplier(localState), () -> 1L, Runnable::run) { + @Override + void getRemoteClusterState(String remoteCluster, + long metadataVersion, + BiConsumer handler) { + assertThat(remoteCluster, equalTo("remote")); + handler.accept(new ClusterStateResponse(new ClusterName("remote"), finalRemoteState, false), null); + } + + @Override + void createAndFollow(Map headers, + PutFollowAction.Request followRequest, + Runnable successHandler, + Consumer failureHandler) { + followedIndices.add(followRequest.getLeaderIndex()); + successHandler.run(); + } + + @Override + void updateAutoFollowMetadata(Function updateFunction, Consumer handler) { + lastModifiedClusterState.updateAndGet(updateFunction::apply); + handler.accept(null); + } + + @Override + void cleanFollowedRemoteIndices(ClusterState remoteClusterState, List patterns) { + // Ignore, to avoid invoking updateAutoFollowMetadata(...) twice + } + }; + autoFollower.start(); + + assertThat(results, notNullValue()); + assertThat(results.size(), equalTo(1)); + + for (ObjectObjectCursor index : remoteState.metaData().indices()) { + boolean expect = index.value.getState() == IndexMetaData.State.OPEN; + assertThat(results.get(0).autoFollowExecutionResults.containsKey(index.value.getIndex()), is(expect)); + assertThat(followedIndices.contains(index.key), is(expect)); + } + } + private static ClusterState createRemoteClusterState(String indexName, Boolean enableSoftDeletes) { Settings.Builder indexSettings; if (enableSoftDeletes != null) { @@ -1075,11 +1185,13 @@ public class AutoFollowCoordinatorTests extends ESTestCase { private static ClusterState createRemoteClusterState(ClusterState previous, String indexName) { IndexMetaData indexMetaData = IndexMetaData.builder(indexName) - .settings(settings(Version.CURRENT).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) + .settings(settings(Version.CURRENT) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID(random()))) .numberOfShards(1) .numberOfReplicas(0) .build(); - ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("remote")) + ClusterState.Builder csBuilder = ClusterState.builder(previous.getClusterName()) .metaData(MetaData.builder(previous.metaData()) .version(previous.metaData().version() + 1) .put(indexMetaData, true)); @@ -1087,7 +1199,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { ShardRouting shardRouting = TestShardRouting.newShardRouting(indexName, 0, "1", true, ShardRoutingState.INITIALIZING).moveToStarted(); IndexRoutingTable indexRoutingTable = IndexRoutingTable.builder(indexMetaData.getIndex()).addShard(shardRouting).build(); - csBuilder.routingTable(RoutingTable.builder().add(indexRoutingTable).build()).build(); + csBuilder.routingTable(RoutingTable.builder(previous.routingTable()).add(indexRoutingTable).build()).build(); return csBuilder.build(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index bf28e768336..920d0e6e22f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -50,7 +50,7 @@ public class ClusterPrivilegeResolver { private static final Set MANAGE_TOKEN_PATTERN = Collections.singleton("cluster:admin/xpack/security/token/*"); private static final Set MANAGE_API_KEY_PATTERN = Collections.singleton("cluster:admin/xpack/security/api_key/*"); private static final Set MONITOR_PATTERN = Collections.singleton("cluster:monitor/*"); - private static final Set MONITOR_DATA_FRAME_PATTERN = Collections.unmodifiableSet( + private static final Set MONITOR_TRANSFORM_PATTERN = Collections.unmodifiableSet( Sets.newHashSet("cluster:monitor/data_frame/*", "cluster:monitor/transform/*")); private static final Set MONITOR_ML_PATTERN = Collections.singleton("cluster:monitor/xpack/ml/*"); private static final Set MONITOR_WATCHER_PATTERN = Collections.singleton("cluster:monitor/xpack/watcher/*"); @@ -59,7 +59,7 @@ public class ClusterPrivilegeResolver { Sets.newHashSet("cluster:*", "indices:admin/template/*")); private static final Set MANAGE_ML_PATTERN = Collections.unmodifiableSet( Sets.newHashSet("cluster:admin/xpack/ml/*", "cluster:monitor/xpack/ml/*")); - private static final Set MANAGE_DATA_FRAME_PATTERN = Collections.unmodifiableSet( + private static final Set MANAGE_TRANSFORM_PATTERN = Collections.unmodifiableSet( Sets.newHashSet("cluster:admin/data_frame/*", "cluster:monitor/data_frame/*", "cluster:monitor/transform/*", "cluster:admin/transform/*")); private static final Set MANAGE_WATCHER_PATTERN = Collections.unmodifiableSet( @@ -90,14 +90,18 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege ALL = new ActionClusterPrivilege("all", ALL_CLUSTER_PATTERN); public static final NamedClusterPrivilege MONITOR = new ActionClusterPrivilege("monitor", MONITOR_PATTERN); public static final NamedClusterPrivilege MONITOR_ML = new ActionClusterPrivilege("monitor_ml", MONITOR_ML_PATTERN); - public static final NamedClusterPrivilege MONITOR_DATA_FRAME = - new ActionClusterPrivilege("monitor_data_frame_transforms", MONITOR_DATA_FRAME_PATTERN); + public static final NamedClusterPrivilege MONITOR_TRANSFORM_DEPRECATED = + new ActionClusterPrivilege("monitor_data_frame_transforms", MONITOR_TRANSFORM_PATTERN); + public static final NamedClusterPrivilege MONITOR_TRANSFORM = + new ActionClusterPrivilege("monitor_transform", MONITOR_TRANSFORM_PATTERN); public static final NamedClusterPrivilege MONITOR_WATCHER = new ActionClusterPrivilege("monitor_watcher", MONITOR_WATCHER_PATTERN); public static final NamedClusterPrivilege MONITOR_ROLLUP = new ActionClusterPrivilege("monitor_rollup", MONITOR_ROLLUP_PATTERN); public static final NamedClusterPrivilege MANAGE = new ActionClusterPrivilege("manage", ALL_CLUSTER_PATTERN, ALL_SECURITY_PATTERN); public static final NamedClusterPrivilege MANAGE_ML = new ActionClusterPrivilege("manage_ml", MANAGE_ML_PATTERN); - public static final NamedClusterPrivilege MANAGE_DATA_FRAME = - new ActionClusterPrivilege("manage_data_frame_transforms", MANAGE_DATA_FRAME_PATTERN); + public static final NamedClusterPrivilege MANAGE_TRANSFORM_DEPRECATED = + new ActionClusterPrivilege("manage_data_frame_transforms", MANAGE_TRANSFORM_PATTERN); + public static final NamedClusterPrivilege MANAGE_TRANSFORM = + new ActionClusterPrivilege("manage_transform", MANAGE_TRANSFORM_PATTERN); public static final NamedClusterPrivilege MANAGE_TOKEN = new ActionClusterPrivilege("manage_token", MANAGE_TOKEN_PATTERN); public static final NamedClusterPrivilege MANAGE_WATCHER = new ActionClusterPrivilege("manage_watcher", MANAGE_WATCHER_PATTERN); public static final NamedClusterPrivilege MANAGE_ROLLUP = new ActionClusterPrivilege("manage_rollup", MANAGE_ROLLUP_PATTERN); @@ -133,12 +137,14 @@ public class ClusterPrivilegeResolver { ALL, MONITOR, MONITOR_ML, - MONITOR_DATA_FRAME, + MONITOR_TRANSFORM_DEPRECATED, + MONITOR_TRANSFORM, MONITOR_WATCHER, MONITOR_ROLLUP, MANAGE, MANAGE_ML, - MANAGE_DATA_FRAME, + MANAGE_TRANSFORM_DEPRECATED, + MANAGE_TRANSFORM, MANAGE_TOKEN, MANAGE_WATCHER, MANAGE_IDX_TEMPLATES, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index a5c715ea41e..d19c8f3ee05 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableCluster import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.UsernamesField; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.watch.Watch; @@ -179,28 +180,52 @@ public class ReservedRolesStore implements BiConsumer, ActionListene .application("kibana-*").resources("*").privileges("reserved_ml").build() }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) + // DEPRECATED: to be removed in 9.0.0 .put("data_frame_transforms_admin", new RoleDescriptor("data_frame_transforms_admin", new String[] { "manage_data_frame_transforms" }, new RoleDescriptor.IndicesPrivileges[]{ RoleDescriptor.IndicesPrivileges.builder() - .indices(".data-frame-notifications*") + .indices(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN, + TransformInternalIndexConstants.AUDIT_INDEX_PATTERN_DEPRECATED, + TransformInternalIndexConstants.AUDIT_INDEX_READ_ALIAS) .privileges("view_index_metadata", "read").build() }, new RoleDescriptor.ApplicationResourcePrivileges[] { RoleDescriptor.ApplicationResourcePrivileges.builder() .application("kibana-*").resources("*").privileges("reserved_ml").build() }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) + // DEPRECATED: to be removed in 9.0.0 .put("data_frame_transforms_user", new RoleDescriptor("data_frame_transforms_user", new String[] { "monitor_data_frame_transforms" }, new RoleDescriptor.IndicesPrivileges[]{ RoleDescriptor.IndicesPrivileges.builder() - .indices(".data-frame-notifications*") + .indices(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN, + TransformInternalIndexConstants.AUDIT_INDEX_PATTERN_DEPRECATED, + TransformInternalIndexConstants.AUDIT_INDEX_READ_ALIAS) .privileges("view_index_metadata", "read").build() }, new RoleDescriptor.ApplicationResourcePrivileges[] { RoleDescriptor.ApplicationResourcePrivileges.builder() .application("kibana-*").resources("*").privileges("reserved_ml").build() }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) + .put("transform_admin", new RoleDescriptor("transform_admin", + new String[] { "manage_transform" }, + new RoleDescriptor.IndicesPrivileges[]{ + RoleDescriptor.IndicesPrivileges.builder() + .indices(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN, + TransformInternalIndexConstants.AUDIT_INDEX_PATTERN_DEPRECATED, + TransformInternalIndexConstants.AUDIT_INDEX_READ_ALIAS) + .privileges("view_index_metadata", "read").build() + }, null, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) + .put("transform_user", new RoleDescriptor("transform_user", + new String[] { "monitor_transform" }, + new RoleDescriptor.IndicesPrivileges[]{ + RoleDescriptor.IndicesPrivileges.builder() + .indices(TransformInternalIndexConstants.AUDIT_INDEX_PATTERN, + TransformInternalIndexConstants.AUDIT_INDEX_PATTERN_DEPRECATED, + TransformInternalIndexConstants.AUDIT_INDEX_READ_ALIAS) + .privileges("view_index_metadata", "read").build() + }, null, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) .put("watcher_admin", new RoleDescriptor("watcher_admin", new String[] { "manage_watcher" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(Watch.INDEX, TriggeredWatchStoreField.INDEX_NAME, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java index dcf8707e87c..f0828c281ce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java @@ -30,7 +30,11 @@ public final class TransformInternalIndexConstants { // audit index public static final String AUDIT_TEMPLATE_VERSION = "1"; - public static final String AUDIT_INDEX_PREFIX = ".data-frame-notifications-"; + public static final String AUDIT_INDEX_PREFIX = ".transform-notifications-"; + public static final String AUDIT_INDEX_PATTERN = AUDIT_INDEX_PREFIX + "*"; + public static final String AUDIT_INDEX_PATTERN_DEPRECATED = ".data-frame-notifications-*"; + + public static final String AUDIT_INDEX_READ_ALIAS = ".transform-notifications-read"; public static final String AUDIT_INDEX = AUDIT_INDEX_PREFIX + AUDIT_TEMPLATE_VERSION; private TransformInternalIndexConstants() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 9c23def4283..6174075e2b5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -186,6 +186,8 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(ReservedRolesStore.isReserved("machine_learning_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("data_frame_transforms_user"), is(true)); assertThat(ReservedRolesStore.isReserved("data_frame_transforms_admin"), is(true)); + assertThat(ReservedRolesStore.isReserved("transform_user"), is(true)); + assertThat(ReservedRolesStore.isReserved("transform_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("watcher_user"), is(true)); assertThat(ReservedRolesStore.isReserved("watcher_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_dashboard_only_user"), is(true)); @@ -1121,82 +1123,108 @@ public class ReservedRolesStoreTests extends ESTestCase { new ApplicationPrivilege(otherApplication, "app-reserved_ml", "reserved_ml"), "*"), is(false)); } - public void testDataFrameTransformsAdminRole() { + public void testTransformAdminRole() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = mock(Authentication.class); - RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("data_frame_transforms_admin"); - assertNotNull(roleDescriptor); - assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + RoleDescriptor[] roleDescriptors = { + new ReservedRolesStore().roleDescriptor("data_frame_transforms_admin"), + new ReservedRolesStore().roleDescriptor("transform_admin") + }; - Role role = Role.builder(roleDescriptor, null).build(); - assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); + for (RoleDescriptor roleDescriptor : roleDescriptors) { + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); - assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); + Role role = Role.builder(roleDescriptor, null).build(); + assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); - assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX); - assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); // internal use only + assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); - assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX_READ_ALIAS); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX_PATTERN_DEPRECATED); + assertNoAccessAllowed(role, "foo"); + assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); // internal use only - final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); - assertThat(role.application().grants( - new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), is(false)); - assertThat(role.application().grants( - new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-reserved_ml", "reserved_ml"), "*"), is(true)); + assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); - final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); - assertThat(role.application().grants( - new ApplicationPrivilege(otherApplication, "app-foo", "foo"), "*"), is(false)); - assertThat(role.application().grants( - new ApplicationPrivilege(otherApplication, "app-reserved_ml", "reserved_ml"), "*"), is(false)); + final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); + assertThat(role.application().grants( + new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), is(false)); + + if (roleDescriptor.getName().equals("data_frame_transforms_admin")) { + assertThat(role.application() + .grants(new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-reserved_ml", "reserved_ml"), "*"), is(true)); + } + + final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-foo", "foo"), "*"), is(false)); + if (roleDescriptor.getName().equals("data_frame_transforms_admin")) { + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-reserved_ml", "reserved_ml"), "*"), is(false)); + } + } } public void testDataFrameTransformsUserRole() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = mock(Authentication.class); - RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("data_frame_transforms_user"); - assertNotNull(roleDescriptor); - assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + RoleDescriptor[] roleDescriptors = { + new ReservedRolesStore().roleDescriptor("data_frame_transforms_user"), + new ReservedRolesStore().roleDescriptor("transform_user") + }; - Role role = Role.builder(roleDescriptor, null).build(); - assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); + for (RoleDescriptor roleDescriptor : roleDescriptors) { + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); - assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); + Role role = Role.builder(roleDescriptor, null).build(); + assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(StopTransformAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); - assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX); - assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); + assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); - assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX_READ_ALIAS); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX_PATTERN); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX_PATTERN_DEPRECATED); + assertNoAccessAllowed(role, "foo"); + assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); - final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); - assertThat(role.application().grants( - new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), is(false)); - assertThat(role.application().grants( - new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-reserved_ml", "reserved_ml"), "*"), is(true)); + assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); - final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); - assertThat(role.application().grants( - new ApplicationPrivilege(otherApplication, "app-foo", "foo"), "*"), is(false)); - assertThat(role.application().grants( - new ApplicationPrivilege(otherApplication, "app-reserved_ml", "reserved_ml"), "*"), is(false)); + final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); + assertThat(role.application().grants( + new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), is(false)); + + if (roleDescriptor.getName().equals("data_frame_transforms_user")) { + assertThat(role.application().grants( + new ApplicationPrivilege(kibanaApplicationWithRandomIndex, "app-reserved_ml", "reserved_ml"), "*"), is(true)); + } + + final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-foo", "foo"), "*"), is(false)); + if (roleDescriptor.getName().equals("data_frame_transforms_user")) { + assertThat(role.application().grants( + new ApplicationPrivilege(otherApplication, "app-reserved_ml", "reserved_ml"), "*"), is(false)); + } + } } public void testWatcherAdminRole() { diff --git a/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml b/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml index 81f7064f8ce..c91f98fb18b 100644 --- a/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml +++ b/x-pack/plugin/ilm/qa/rest/src/test/resources/rest-api-spec/test/ilm/40_explain_lifecycle.yml @@ -162,6 +162,9 @@ teardown: --- "Test All Indexes Lifecycle Explain": + - skip: + reason: https://github.com/elastic/elasticsearch/issues/47275 + version: "6.7.0 - " - do: ilm.explain_lifecycle: diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index 747c48ac9a9..237943aa100 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -117,7 +117,7 @@ public class SLMSnapshotBlockingIntegTests extends ESIntegTestCase { return settings.build(); } - + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/47689") public void testSnapshotInProgress() throws Exception { final String indexName = "test"; final String policyName = "test-policy"; @@ -167,6 +167,7 @@ public class SLMSnapshotBlockingIntegTests extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/47834") public void testRetentionWhileSnapshotInProgress() throws Exception { final String indexName = "test"; final String policyId = "slm-policy"; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java index 2fe7e983a7a..0aae7194487 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java @@ -49,11 +49,19 @@ public class ClusterAlertsUtil { private static final Pattern UNIQUE_WATCH_ID_PROPERTY = Pattern.compile(Pattern.quote("${monitoring.watch.unique_id}")); + /** + * Replace the ${monitoring.watch.unique_id} field in the watches. + * + * @see #createUniqueWatchId(ClusterService, String) + */ + private static final Pattern VERSION_CREATED_PROPERTY = + Pattern.compile(Pattern.quote("${monitoring.version_created}")); + /** * The last time that all watches were updated. For now, all watches have been updated in the same version and should all be replaced * together. */ - public static final int LAST_UPDATED_VERSION = Version.V_7_0_0.id; + public static final int LAST_UPDATED_VERSION = Version.V_7_5_0.id; /** * An unsorted list of Watch IDs representing resource files for Monitoring Cluster Alerts. @@ -113,6 +121,7 @@ public class ClusterAlertsUtil { source = CLUSTER_UUID_PROPERTY.matcher(source).replaceAll(clusterUuid); source = WATCH_ID_PROPERTY.matcher(source).replaceAll(watchId); source = UNIQUE_WATCH_ID_PROPERTY.matcher(source).replaceAll(uniqueWatchId); + source = VERSION_CREATED_PROPERTY.matcher(source).replaceAll(Integer.toString(LAST_UPDATED_VERSION)); return source; } catch (final IOException e) { diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json index 4e250d5d743..16e52bce019 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json @@ -7,7 +7,7 @@ "link": "elasticsearch/indices", "severity": 2100, "type": "monitoring", - "version_created": 7000099, + "version_created": "${monitoring.version_created}", "watch": "${monitoring.watch.id}" } }, @@ -134,11 +134,23 @@ }, "transform": { "script": { - "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def state = ctx.payload.check.hits.hits[0]._source.cluster_state.status;if (ctx.vars.not_resolved){ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check == false) {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = ['timestamp': ctx.execution_time, 'metadata': ctx.metadata.xpack];}if (ctx.vars.fails_check) {ctx.payload.prefix = 'Elasticsearch cluster status is ' + state + '.';if (state == 'red') {ctx.payload.message = 'Allocate missing primary shards and replica shards.';ctx.payload.metadata.severity = 2100;} else {ctx.payload.message = 'Allocate missing replica shards.';ctx.payload.metadata.severity = 1100;}}ctx.vars.state = state.toUpperCase();ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" + "source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0 && ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack != null) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.default_admin_email : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;ctx.vars.found_state = ctx.payload.check.hits.total != 0;def state = ctx.vars.found_state ? ctx.payload.check.hits.hits[0]._source.cluster_state.status : 'unknown';if (ctx.vars.not_resolved){ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check == false) {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = ['timestamp': ctx.execution_time, 'metadata': ctx.metadata.xpack];}if (ctx.vars.fails_check) {ctx.payload.prefix = 'Elasticsearch cluster status is ' + state + '.';if (state == 'red') {ctx.payload.message = 'Allocate missing primary shards and replica shards.';ctx.payload.metadata.severity = 2100;} else {ctx.payload.message = 'Allocate missing replica shards.';ctx.payload.metadata.severity = 1100;}}ctx.vars.state = state.toUpperCase();ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;" } }, "actions": { + "log_state_not_found": { + "condition": { + "script": "!ctx.vars.found_state" + }, + "logging" : { + "text" : "Watch [{{ctx.metadata.xpack.watch}}] could not determine cluster state for cluster [{{ctx.metadata.xpack.cluster_uuid}}]. This likely means the cluster has not sent any monitoring data recently.", + "level" : "debug" + } + }, "add_to_alerts_index": { + "condition": { + "script": "ctx.vars.found_state" + }, "index": { "index": ".monitoring-alerts-7", "doc_id": "${monitoring.watch.unique_id}" @@ -146,7 +158,7 @@ }, "send_email_to_admin": { "condition": { - "script": "return ctx.vars.email_recipient != null && (ctx.vars.is_new || ctx.vars.is_resolved)" + "script": "return ctx.vars.email_recipient != null && ctx.vars.found_state && (ctx.vars.is_new || ctx.vars.is_resolved)" }, "email": { "to": "X-Pack Admin <{{ctx.vars.email_recipient}}>", diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json index d79cb786267..4347801fa2a 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_nodes.json @@ -7,7 +7,7 @@ "link": "elasticsearch/nodes", "severity": 1999, "type": "monitoring", - "version_created": 7000099, + "version_created": "${monitoring.version_created}", "watch": "${monitoring.watch.id}" } }, diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json index 37132a03c7b..05fa8396623 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json @@ -7,7 +7,7 @@ "link": "elasticsearch/nodes", "severity": 1000, "type": "monitoring", - "version_created": 7000099, + "version_created": "${monitoring.version_created}", "watch": "${monitoring.watch.id}" } }, diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json index 3e08fd98843..b35137ad140 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/kibana_version_mismatch.json @@ -7,7 +7,7 @@ "link": "kibana/instances", "severity": 1000, "type": "monitoring", - "version_created": 7000099, + "version_created": "${monitoring.version_created}", "watch": "${monitoring.watch.id}" } }, diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json index 8bb5b5efe9d..8417ef4d069 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/logstash_version_mismatch.json @@ -7,7 +7,7 @@ "link": "logstash/instances", "severity": 1000, "type": "monitoring", - "version_created": 7000099, + "version_created": "${monitoring.version_created}", "watch": "${monitoring.watch.id}" } }, diff --git a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json index 3f1f49e0240..35041919141 100644 --- a/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json +++ b/x-pack/plugin/monitoring/src/main/resources/monitoring/watches/xpack_license_expiration.json @@ -8,7 +8,7 @@ "alert_index": ".monitoring-alerts-7", "cluster_uuid": "${monitoring.watch.cluster_uuid}", "type": "monitoring", - "version_created": 7000099, + "version_created": "${monitoring.version_created}", "watch": "${monitoring.watch.id}" } }, diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtilTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtilTests.java index 868cd17b3eb..7ce728e2582 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtilTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtilTests.java @@ -68,6 +68,7 @@ public class ClusterAlertsUtilTests extends ESTestCase { assertThat(watch, notNullValue()); assertThat(watch, containsString(clusterUuid)); assertThat(watch, containsString(watchId)); + assertThat(watch, containsString(String.valueOf(ClusterAlertsUtil.LAST_UPDATED_VERSION))); if ("elasticsearch_nodes".equals(watchId) == false) { assertThat(watch, containsString(clusterUuid + "_" + watchId)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 492d24b88f0..98f4d9e4283 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -452,7 +452,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { }); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34762") public void testRandomizedDateHisto() throws Exception { String rollupIndex = randomAlphaOfLengthBetween(5, 10); @@ -468,7 +467,9 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { final List> dataset = new ArrayList<>(); int numDocs = randomIntBetween(1,100); for (int i = 0; i < numDocs; i++) { - long timestamp = new DateTime().minusHours(randomIntBetween(1,100)).getMillis(); + // Make sure the timestamp is sufficiently in the past that we don't get bitten + // by internal rounding, causing no docs to match + long timestamp = new DateTime().minusDays(2).minusHours(randomIntBetween(11,100)).getMillis(); dataset.add(asMap(timestampField, timestamp, valueField, randomLongBetween(1, 100))); } executeTestCase(dataset, job, System.currentTimeMillis(), (resp) -> { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java index 64f3be516fa..24cbb9418f8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java @@ -83,13 +83,8 @@ class LdapUserSearchSessionFactory extends PoolingSessionFactory { final String dn = entry.getDN(); final byte[] passwordBytes = CharArrays.toUtf8Bytes(password.getChars()); final SimpleBindRequest bind = new SimpleBindRequest(dn, passwordBytes); - LdapUtils.maybeForkThenBindAndRevert(connectionPool, bind, threadPool, new ActionRunnable(listener) { - @Override - protected void doRun() throws Exception { - listener.onResponse(new LdapSession(logger, config, connectionPool, dn, groupResolver, metaDataResolver, timeout, - entry.getAttributes())); - } - }); + LdapUtils.maybeForkThenBindAndRevert(connectionPool, bind, threadPool, ActionRunnable.supply(listener, () -> + new LdapSession(logger, config, connectionPool, dn, groupResolver, metaDataResolver, timeout, entry.getAttributes()))); } }, listener::onFailure)); } diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 6ec60f09ba3..e62fda5478b 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -273,47 +273,46 @@ histogramDateTime schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h; - h | c ---------------------+--------------- -null |10 -1951-04-11T00:00:00Z|1 -1952-04-05T00:00:00Z|10 -1953-03-31T00:00:00Z|10 -1954-03-26T00:00:00Z|7 -1955-03-21T00:00:00Z|4 -1956-03-15T00:00:00Z|4 -1957-03-10T00:00:00Z|6 -1958-03-05T00:00:00Z|6 -1959-02-28T00:00:00Z|9 -1960-02-23T00:00:00Z|7 -1961-02-17T00:00:00Z|8 -1962-02-12T00:00:00Z|6 -1963-02-07T00:00:00Z|7 -1964-02-02T00:00:00Z|5 - + h | c +------------------------+--------------- +null |10 +1952-01-01T00:00:00.000Z|8 +1953-01-01T00:00:00.000Z|11 +1954-01-01T00:00:00.000Z|8 +1955-01-01T00:00:00.000Z|4 +1956-01-01T00:00:00.000Z|5 +1957-01-01T00:00:00.000Z|4 +1958-01-01T00:00:00.000Z|7 +1959-01-01T00:00:00.000Z|9 +1960-01-01T00:00:00.000Z|8 +1961-01-01T00:00:00.000Z|8 +1962-01-01T00:00:00.000Z|6 +1963-01-01T00:00:00.000Z|7 +1964-01-01T00:00:00.000Z|4 +1965-01-01T00:00:00.000Z|1 ; histogramDateTimeWithCountAndOrder schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; - h | c ---------------------+--------------- -1964-02-02T00:00:00Z|5 -1963-02-07T00:00:00Z|7 -1962-02-12T00:00:00Z|6 -1961-02-17T00:00:00Z|8 -1960-02-23T00:00:00Z|7 -1959-02-28T00:00:00Z|9 -1958-03-05T00:00:00Z|6 -1957-03-10T00:00:00Z|6 -1956-03-15T00:00:00Z|4 -1955-03-21T00:00:00Z|4 -1954-03-26T00:00:00Z|7 -1953-03-31T00:00:00Z|10 -1952-04-05T00:00:00Z|10 -1951-04-11T00:00:00Z|1 -null |10 + h | c +------------------------+--------------- +1965-01-01T00:00:00.000Z|1 +1964-01-01T00:00:00.000Z|4 +1963-01-01T00:00:00.000Z|7 +1962-01-01T00:00:00.000Z|6 +1961-01-01T00:00:00.000Z|8 +1960-01-01T00:00:00.000Z|8 +1959-01-01T00:00:00.000Z|9 +1958-01-01T00:00:00.000Z|7 +1957-01-01T00:00:00.000Z|4 +1956-01-01T00:00:00.000Z|5 +1955-01-01T00:00:00.000Z|4 +1954-01-01T00:00:00.000Z|8 +1953-01-01T00:00:00.000Z|11 +1952-01-01T00:00:00.000Z|8 +null |10 ; histogramDateTimeWithMonthOnTop @@ -369,23 +368,23 @@ histogramGroupByWithoutAlias schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY HISTOGRAM(birth_date, INTERVAL 1 YEAR) ORDER BY h DESC; - h | c ---------------------+--------------- -1964-02-02T00:00:00Z|5 -1963-02-07T00:00:00Z|7 -1962-02-12T00:00:00Z|6 -1961-02-17T00:00:00Z|8 -1960-02-23T00:00:00Z|7 -1959-02-28T00:00:00Z|9 -1958-03-05T00:00:00Z|6 -1957-03-10T00:00:00Z|6 -1956-03-15T00:00:00Z|4 -1955-03-21T00:00:00Z|4 -1954-03-26T00:00:00Z|7 -1953-03-31T00:00:00Z|10 -1952-04-05T00:00:00Z|10 -1951-04-11T00:00:00Z|1 -null |10 + h | c +------------------------+--------------- +1965-01-01T00:00:00.000Z|1 +1964-01-01T00:00:00.000Z|4 +1963-01-01T00:00:00.000Z|7 +1962-01-01T00:00:00.000Z|6 +1961-01-01T00:00:00.000Z|8 +1960-01-01T00:00:00.000Z|8 +1959-01-01T00:00:00.000Z|9 +1958-01-01T00:00:00.000Z|7 +1957-01-01T00:00:00.000Z|4 +1956-01-01T00:00:00.000Z|5 +1955-01-01T00:00:00.000Z|4 +1954-01-01T00:00:00.000Z|8 +1953-01-01T00:00:00.000Z|11 +1952-01-01T00:00:00.000Z|8 +null |10 ; countAll diff --git a/x-pack/plugin/sql/qa/src/main/resources/conditionals.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/conditionals.csv-spec index c54c4a81726..aaa29e814d4 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/conditionals.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/conditionals.csv-spec @@ -348,3 +348,104 @@ SELECT CONVERT(IIF(languages > 1, IIF(languages = 3, '3')), SQL_BIGINT) AS cond 3 null ; + +ifNullWithCompatibleDateBasedValues +schema::replacement:ts +SELECT IFNULL(birth_date, {d '2110-04-12'}) AS replacement FROM test_emp GROUP BY 1 ORDER BY replacement DESC LIMIT 5; + + replacement +------------------------ +2110-04-12T00:00:00.000Z +1965-01-03T00:00:00.000Z +1964-10-18T00:00:00.000Z +1964-06-11T00:00:00.000Z +1964-06-02T00:00:00.000Z +; + +caseWithCompatibleIntervals_1 +schema::date_math:ts|c:l +SELECT birth_date + (CASE WHEN gender='M' THEN INTERVAL 1 YEAR ELSE INTERVAL 6 MONTH END) AS date_math, COUNT(*) c FROM test_emp GROUP BY 1 ORDER BY 1 DESC LIMIT 5; + + date_math | c +------------------------+--------------- +1966-01-03T00:00:00.000Z|1 +1965-06-11T00:00:00.000Z|1 +1965-04-18T00:00:00.000Z|2 +1964-12-02T00:00:00.000Z|1 +1964-11-26T00:00:00.000Z|1 +; + +caseWithCompatibleIntervals_2 +SELECT hire_date, birth_date, (CASE WHEN birth_date > {d '1960-01-01'} THEN INTERVAL 1 YEAR ELSE INTERVAL 1 MONTH END) AS x FROM test_emp WHERE x + hire_date > {d '1995-01-01'} ORDER BY hire_date; + + hire_date | birth_date | x +------------------------+------------------------+--------------- +1994-04-09T00:00:00.000Z|1962-11-07T00:00:00.000Z|+1-0 +1995-01-27T00:00:00.000Z|1961-05-02T00:00:00.000Z|+1-0 +1995-03-13T00:00:00.000Z|1957-04-04T00:00:00.000Z|+0-1 +1995-03-20T00:00:00.000Z|1953-04-03T00:00:00.000Z|+0-1 +1995-08-22T00:00:00.000Z|1952-07-08T00:00:00.000Z|+0-1 +1995-12-15T00:00:00.000Z|1960-05-25T00:00:00.000Z|+1-0 +1996-11-05T00:00:00.000Z|1964-06-11T00:00:00.000Z|+1-0 +1997-05-19T00:00:00.000Z|1958-09-05T00:00:00.000Z|+0-1 +1999-04-30T00:00:00.000Z|1953-01-23T00:00:00.000Z|+0-1 +; + +iifWithCompatibleIntervals +schema::hire_date + IIF(salary > 70000, INTERVAL 2 HOURS, INTERVAL 2 DAYS):ts|salary:i +SELECT hire_date + IIF(salary > 70000, INTERVAL 2 HOURS, INTERVAL 2 DAYS), salary FROM test_emp ORDER BY salary DESC LIMIT 10; + +hire_date + IIF(salary > 70000, INTERVAL 2 HOURS, INTERVAL 2 DAYS)| salary +------------------------------------------------------------------+--------------- +1985-11-20T02:00:00.000Z |74999 +1989-09-02T02:00:00.000Z |74970 +1989-02-10T02:00:00.000Z |74572 +1989-07-07T02:00:00.000Z |73851 +1999-04-30T02:00:00.000Z |73717 +1988-10-18T02:00:00.000Z |73578 +1990-09-15T02:00:00.000Z |71165 +1987-03-18T02:00:00.000Z |70011 +1987-05-28T00:00:00.000Z |69904 +1990-02-18T00:00:00.000Z |68547 +; + +isNullWithIntervalMath +SELECT ISNULL(birth_date, INTERVAL '23:45' HOUR TO MINUTES + {d '2019-09-17'}) AS c, salary, birth_date, hire_date FROM test_emp ORDER BY salary DESC LIMIT 5; + + c:ts | salary:i | birth_date:ts | hire_date:ts +------------------------+-----------------+------------------------+------------------------ +1956-12-13T00:00:00.000Z|74999 |1956-12-13T00:00:00.000Z|1985-11-20T00:00:00.000Z +2019-09-17T00:00:00.000Z|74970 |null |1989-09-02T00:00:00.000Z +1957-05-23T00:00:00.000Z|74572 |1957-05-23T00:00:00.000Z|1989-02-10T00:00:00.000Z +1962-07-10T00:00:00.000Z|73851 |1962-07-10T00:00:00.000Z|1989-07-07T00:00:00.000Z +1953-01-23T00:00:00.000Z|73717 |1953-01-23T00:00:00.000Z|1999-04-30T00:00:00.000Z +; + +coalesceWithCompatibleDateBasedTypes +SELECT COALESCE(birth_date, CAST(birth_date AS DATE), CAST(hire_date AS DATETIME)) AS coalesce FROM test_emp ORDER BY 1 LIMIT 5; + + coalesce:ts +------------------------ +1952-02-27T00:00:00.000Z +1952-04-19T00:00:00.000Z +1952-05-15T00:00:00.000Z +1952-06-13T00:00:00.000Z +1952-07-08T00:00:00.000Z +; + +greatestWithCompatibleDateBasedTypes +SELECT GREATEST(null, null, birth_date + INTERVAL 25 YEARS, hire_date + INTERVAL 2 DAYS, CAST(hire_date + INTERVAL 2 DAYS AS DATE)) AS greatest, birth_date, hire_date FROM test_emp ORDER BY 1 LIMIT 10; + + greatest:ts | birth_date:ts | hire_date:ts +------------------------+------------------------+------------------------ +1985-02-20T00:00:00.000Z|1952-04-19T00:00:00.000Z|1985-02-18T00:00:00.000Z +1985-02-26T00:00:00.000Z|null |1985-02-24T00:00:00.000Z +1985-07-11T00:00:00.000Z|1952-06-13T00:00:00.000Z|1985-07-09T00:00:00.000Z +1985-10-16T00:00:00.000Z|1955-08-20T00:00:00.000Z|1985-10-14T00:00:00.000Z +1985-11-21T00:00:00.000Z|1957-12-03T00:00:00.000Z|1985-11-19T00:00:00.000Z +1985-11-22T00:00:00.000Z|1956-12-13T00:00:00.000Z|1985-11-20T00:00:00.000Z +1985-11-22T00:00:00.000Z|1959-04-07T00:00:00.000Z|1985-11-20T00:00:00.000Z +1986-02-06T00:00:00.000Z|1954-09-13T00:00:00.000Z|1986-02-04T00:00:00.000Z +1986-02-28T00:00:00.000Z|1952-11-13T00:00:00.000Z|1986-02-26T00:00:00.000Z +1986-05-30T00:00:00.000Z|1961-05-30T00:00:00.000Z|1986-03-14T00:00:00.000Z +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 1679f069b40..b2536271b22 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -811,23 +811,23 @@ schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) AS c FROM emp GROUP BY h; - h | c ---------------------+--------------- -null |10 -1951-04-11T00:00:00Z|1 -1952-04-05T00:00:00Z|10 -1953-03-31T00:00:00Z|10 -1954-03-26T00:00:00Z|7 -1955-03-21T00:00:00Z|4 -1956-03-15T00:00:00Z|4 -1957-03-10T00:00:00Z|6 -1958-03-05T00:00:00Z|6 -1959-02-28T00:00:00Z|9 -1960-02-23T00:00:00Z|7 -1961-02-17T00:00:00Z|8 -1962-02-12T00:00:00Z|6 -1963-02-07T00:00:00Z|7 -1964-02-02T00:00:00Z|5 + h | c +------------------------+--------------- +null |10 +1952-01-01T00:00:00.000Z|8 +1953-01-01T00:00:00.000Z|11 +1954-01-01T00:00:00.000Z|8 +1955-01-01T00:00:00.000Z|4 +1956-01-01T00:00:00.000Z|5 +1957-01-01T00:00:00.000Z|4 +1958-01-01T00:00:00.000Z|7 +1959-01-01T00:00:00.000Z|9 +1960-01-01T00:00:00.000Z|8 +1961-01-01T00:00:00.000Z|8 +1962-01-01T00:00:00.000Z|6 +1963-01-01T00:00:00.000Z|7 +1964-01-01T00:00:00.000Z|4 +1965-01-01T00:00:00.000Z|1 // end::histogramDateTime ; diff --git a/x-pack/plugin/sql/qa/src/main/resources/filter.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/filter.csv-spec index b10c9f7e198..a5d2b185029 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/filter.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/filter.csv-spec @@ -119,3 +119,23 @@ SELECT COUNT(*), TRUNCATE(emp_no, -2) t FROM test_emp WHERE 'aaabbb' RLIKE 'a{2, 99 |10000 1 |10100 ; + +inWithCompatibleDateTypes +SELECT birth_date FROM test_emp WHERE birth_date IN ({d '1959-07-23'},CAST('1959-12-25T12:12:12' AS TIMESTAMP)) OR birth_date IS NULL ORDER BY birth_date; + + birth_date:ts +------------------------ +1959-07-23T00:00:00.000Z +1959-07-23T00:00:00.000Z +1959-12-25T00:00:00.000Z +null +null +null +null +null +null +null +null +null +null +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec index fb95f6713c1..970724dabd2 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/math.csv-spec @@ -101,13 +101,13 @@ SELECT MIN(salary) mi, MAX(salary) ma, YEAR(hire_date) year, ROUND(AVG(languages mi:i | ma:i | year:i |ROUND(AVG(languages), 1):d|TRUNCATE(AVG(languages), 1):d| COUNT(*):l ---------------+---------------+---------------+--------------------------+-----------------------------+--------------- -25324 |70011 |1986 |3.0 |3.0 |15 -25945 |73578 |1987 |2.9 |2.8 |9 -25976 |74970 |1988 |3.0 |3.0 |13 -31120 |71165 |1989 |3.1 |3.0 |12 -30404 |58715 |1992 |3.0 |3.0 |3 -35742 |67492 |1993 |2.8 |2.7 |4 -45656 |45656 |1995 |3.0 |3.0 |1 +25324 |70011 |1987 |3.0 |3.0 |15 +25945 |73578 |1988 |2.9 |2.8 |9 +25976 |74970 |1989 |3.0 |3.0 |13 +31120 |71165 |1990 |3.1 |3.0 |12 +30404 |58715 |1993 |3.0 |3.0 |3 +35742 |67492 |1994 |2.8 |2.7 |4 +45656 |45656 |1996 |3.0 |3.0 |1 ; minMaxRoundWithHavingRound @@ -115,17 +115,17 @@ SELECT MIN(salary) mi, MAX(salary) ma, YEAR(hire_date) year, ROUND(AVG(languages mi:i | ma:i | year:i |ROUND(AVG(languages),1):d| COUNT(*):l ---------------+---------------+---------------+-------------------------+--------------- -26436 |74999 |1984 |3.1 |11 -31897 |61805 |1985 |3.5 |11 -25324 |70011 |1986 |3.0 |15 -25945 |73578 |1987 |2.9 |9 -25976 |74970 |1988 |3.0 |13 -31120 |71165 |1989 |3.1 |12 -32568 |65030 |1990 |3.3 |6 -27215 |60781 |1991 |4.1 |8 -30404 |58715 |1992 |3.0 |3 -35742 |67492 |1993 |2.8 |4 -45656 |45656 |1995 |3.0 |1 +26436 |74999 |1985 |3.1 |11 +31897 |61805 |1986 |3.5 |11 +25324 |70011 |1987 |3.0 |15 +25945 |73578 |1988 |2.9 |9 +25976 |74970 |1989 |3.0 |13 +31120 |71165 |1990 |3.1 |12 +32568 |65030 |1991 |3.3 |6 +27215 |60781 |1992 |4.1 |8 +30404 |58715 |1993 |3.0 |3 +35742 |67492 |1994 |2.8 |4 +45656 |45656 |1996 |3.0 |1 ; groupByAndOrderByTruncateWithPositiveParameter diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index 9cb752de5e6..38ded7cb091 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.type.DataTypes; import java.time.ZoneId; import java.util.Collections; @@ -48,7 +47,7 @@ public class Histogram extends GroupingFunction { if (resolution == TypeResolution.TYPE_RESOLVED) { // interval must be Literal interval if (field().dataType().isDateBased()) { - resolution = isType(interval, DataTypes::isInterval, "(Date) HISTOGRAM", ParamOrdinal.SECOND, "interval"); + resolution = isType(interval, DataType::isInterval, "(Date) HISTOGRAM", ParamOrdinal.SECOND, "interval"); } else { resolution = isNumeric(interval, "(Numeric) HISTOGRAM", ParamOrdinal.SECOND); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeFunction.java index 0dbef38c91f..02ebfc648f4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeFunction.java @@ -20,19 +20,15 @@ import java.util.Objects; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.BinaryDateTimeProcessor.BinaryDateOperation; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public abstract class BinaryDateTimeFunction extends BinaryScalarFunction { private final ZoneId zoneId; - private final BinaryDateOperation operation; - public BinaryDateTimeFunction(Source source, Expression datePart, Expression timestamp, ZoneId zoneId, - BinaryDateOperation operation) { + public BinaryDateTimeFunction(Source source, Expression datePart, Expression timestamp, ZoneId zoneId) { super(source, datePart, timestamp); this.zoneId = zoneId; - this.operation = operation; } @Override @@ -47,7 +43,7 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction { if (datePartValue != null && resolveDateTimeField(datePartValue) == false) { List similar = findSimilarDateTimeFields(datePartValue); if (similar.isEmpty()) { - return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]", + return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases; found value [{}]", sourceText(), validDateTimeFieldValues(), Expressions.name(left()))); @@ -78,9 +74,11 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction { @Override protected Pipe makePipe() { - return new BinaryDateTimePipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), zoneId, operation); + return createPipe(Expressions.pipe(left()), Expressions.pipe(right()), zoneId); } + protected abstract Pipe createPipe(Pipe left, Pipe right, ZoneId zoneId); + @Override public Nullability nullable() { return Nullability.TRUE; @@ -101,7 +99,7 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction { @Override public int hashCode() { - return Objects.hash(super.hashCode(), zoneId, operation); + return Objects.hash(super.hashCode(), zoneId); } @Override @@ -116,6 +114,6 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction { return false; } BinaryDateTimeFunction that = (BinaryDateTimeFunction) o; - return zoneId.equals(that.zoneId) && operation == that.operation; + return zoneId.equals(that.zoneId); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimePipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimePipe.java index e08e5f7ebe1..86209e59365 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimePipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimePipe.java @@ -9,50 +9,34 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; import java.util.Objects; -public class BinaryDateTimePipe extends BinaryPipe { +public abstract class BinaryDateTimePipe extends BinaryPipe { private final ZoneId zoneId; - private final BinaryDateTimeProcessor.BinaryDateOperation operation; - public BinaryDateTimePipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId, - BinaryDateTimeProcessor.BinaryDateOperation operation) { + public BinaryDateTimePipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) { super(source, expression, left, right); this.zoneId = zoneId; - this.operation = operation; } ZoneId zoneId() { return zoneId; } - BinaryDateTimeProcessor.BinaryDateOperation operation() { - return operation; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, BinaryDateTimePipe::new, expression(), left(), right(), zoneId, operation); - } - - @Override - protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryDateTimePipe(source(), expression(), left, right, zoneId, operation); - } - @Override public Processor asProcessor() { - return BinaryDateTimeProcessor.asProcessor(operation, left().asProcessor(), right().asProcessor(), zoneId); + return makeProcessor(left().asProcessor(), right().asProcessor(), zoneId); } + protected abstract Processor makeProcessor(Processor left, Processor right, ZoneId zoneId); + @Override public int hashCode() { - return Objects.hash(super.hashCode(), zoneId, operation); + return Objects.hash(super.hashCode(), zoneId); } @Override @@ -67,7 +51,6 @@ public class BinaryDateTimePipe extends BinaryPipe { return false; } BinaryDateTimePipe that = (BinaryDateTimePipe) o; - return Objects.equals(zoneId, that.zoneId) && - operation == that.operation; + return Objects.equals(zoneId, that.zoneId); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeProcessor.java index 004bb5a73e5..2cd7b5e2de1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimeProcessor.java @@ -15,16 +15,8 @@ import java.io.IOException; import java.time.ZoneId; import java.util.Objects; -import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.BinaryDateTimeProcessor.BinaryDateOperation.TRUNC; - public abstract class BinaryDateTimeProcessor extends BinaryProcessor { - // TODO: Remove and in favour of inheritance (subclasses which implement abstract methods) - public enum BinaryDateOperation { - TRUNC, - PART; - } - private final ZoneId zoneId; public BinaryDateTimeProcessor(Processor source1, Processor source2, ZoneId zoneId) { @@ -48,28 +40,24 @@ public abstract class BinaryDateTimeProcessor extends BinaryProcessor { @Override protected abstract Object doProcess(Object left, Object right); - public static BinaryDateTimeProcessor asProcessor(BinaryDateOperation operation, Processor left, Processor right, ZoneId zoneId) { - if (operation == TRUNC) { - return new DateTruncProcessor(left, right, zoneId); - } else { - return new DatePartProcessor(left, right, zoneId); - } - } - @Override public int hashCode() { - return Objects.hash(zoneId); + return Objects.hash(left(), right(), zoneId); } @Override - public boolean equals(Object o) { - if (this == o) { + public boolean equals(Object obj) { + if (this == obj) { return true; } - if (o == null || getClass() != o.getClass()) { + + if (obj == null || getClass() != obj.getClass()) { return false; } - BinaryDateTimeProcessor that = (BinaryDateTimeProcessor) o; - return zoneId.equals(that.zoneId); + + BinaryDateTimeProcessor other = (BinaryDateTimeProcessor) obj; + return Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()) + && Objects.equals(zoneId(), other.zoneId()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePart.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePart.java index 0fc616e14f7..b6a7c28ac1a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePart.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePart.java @@ -9,6 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; @@ -78,7 +79,7 @@ public class DatePart extends BinaryDateTimeFunction { } public DatePart(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) { - super(source, truncateTo, timestamp, zoneId, BinaryDateTimeProcessor.BinaryDateOperation.PART); + super(source, truncateTo, timestamp, zoneId); } @Override @@ -101,16 +102,6 @@ public class DatePart extends BinaryDateTimeFunction { return Nullability.TRUE; } - @Override - protected boolean resolveDateTimeField(String dateTimeField) { - return Part.resolve(dateTimeField) != null; - } - - @Override - protected List findSimilarDateTimeFields(String dateTimeField) { - return Part.findSimilar(dateTimeField); - } - @Override protected String scriptMethodName() { return "datePart"; @@ -121,6 +112,21 @@ public class DatePart extends BinaryDateTimeFunction { return DatePartProcessor.process(left().fold(), right().fold(), zoneId()); } + @Override + protected Pipe createPipe(Pipe left, Pipe right, ZoneId zoneId) { + return new DatePartPipe(source(), this, left, right, zoneId); + } + + @Override + protected boolean resolveDateTimeField(String dateTimeField) { + return Part.resolve(dateTimeField) != null; + } + + @Override + protected List findSimilarDateTimeFields(String dateTimeField) { + return Part.findSimilar(dateTimeField); + } + @Override protected List validDateTimeFieldValues() { return Part.VALID_VALUES; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartPipe.java new file mode 100644 index 00000000000..9eca90f845b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartPipe.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.time.ZoneId; + +public class DatePartPipe extends BinaryDateTimePipe { + + public DatePartPipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) { + super(source, expression, left, right, zoneId); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DatePartPipe::new, expression(), left(), right(), zoneId()); + } + + @Override + protected DatePartPipe replaceChildren(Pipe left, Pipe right) { + return new DatePartPipe(source(), expression(), left, right, zoneId()); + } + + @Override + protected Processor makeProcessor(Processor left, Processor right, ZoneId zoneId) { + return new DatePartProcessor(left, right, zoneId); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java index 5941683f059..616e953588f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java @@ -34,36 +34,36 @@ public class DatePartProcessor extends BinaryDateTimeProcessor { } @Override - protected Object doProcess(Object left, Object right) { - return process(left, right, zoneId()); + protected Object doProcess(Object part, Object timestamp) { + return process(part, timestamp, zoneId()); } /** * Used in Painless scripting */ - public static Object process(Object source1, Object source2, ZoneId zoneId) { - if (source1 == null || source2 == null) { + public static Object process(Object part, Object timestamp, ZoneId zoneId) { + if (part == null || timestamp == null) { return null; } - if (source1 instanceof String == false) { - throw new SqlIllegalArgumentException("A string is required; received [{}]", source1); + if (part instanceof String == false) { + throw new SqlIllegalArgumentException("A string is required; received [{}]", part); } - Part datePartField = Part.resolve((String) source1); + Part datePartField = Part.resolve((String) part); if (datePartField == null) { - List similar = Part.findSimilar((String) source1); + List similar = Part.findSimilar((String) part); if (similar.isEmpty()) { throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", - Part.values(), source1); + Part.values(), part); } else { throw new SqlIllegalArgumentException("Received value [{}] is not valid date part for extraction; " + - "did you mean {}?", source1, similar); + "did you mean {}?", part, similar); } } - if (source2 instanceof ZonedDateTime == false) { - throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", source2); + if (timestamp instanceof ZonedDateTime == false) { + throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", timestamp); } - return datePartField.extract(((ZonedDateTime) source2).withZoneSameInstant(zoneId)); + return datePartField.extract(((ZonedDateTime) timestamp).withZoneSameInstant(zoneId)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java index d5e867e0b71..0cf95169d38 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java @@ -22,7 +22,13 @@ public abstract class DateTimeHistogramFunction extends DateTimeFunction { } /** - * used for aggregration (date histogram) + * used for aggregation (date histogram) */ - public abstract long interval(); + public long fixedInterval() { + return -1; + } + + public String calendarInterval() { + return null; + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java index 6b13419def3..2cc6e527428 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTrunc.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; @@ -21,9 +22,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Function; - -import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.BinaryDateTimeProcessor.BinaryDateOperation.TRUNC; +import java.util.function.UnaryOperator; public class DateTrunc extends BinaryDateTimeFunction { @@ -37,7 +36,7 @@ public class DateTrunc extends BinaryDateTimeFunction { .with(ChronoField.MONTH_OF_YEAR, 1) .with(ChronoField.DAY_OF_MONTH, 1) .toLocalDate().atStartOfDay(dt.getZone()); - },"millennia"), + },"millennia"), CENTURY(dt -> { int year = dt.getYear(); int firstYearOfCentury = year - (year % 100); @@ -46,7 +45,7 @@ public class DateTrunc extends BinaryDateTimeFunction { .with(ChronoField.MONTH_OF_YEAR, 1) .with(ChronoField.DAY_OF_MONTH, 1) .toLocalDate().atStartOfDay(dt.getZone()); - }, "centuries"), + }, "centuries"), DECADE(dt -> { int year = dt.getYear(); int firstYearOfDecade = year - (year % 10); @@ -55,7 +54,7 @@ public class DateTrunc extends BinaryDateTimeFunction { .with(ChronoField.MONTH_OF_YEAR, 1) .with(ChronoField.DAY_OF_MONTH, 1) .toLocalDate().atStartOfDay(dt.getZone()); - }, "decades"), + }, "decades"), YEAR(dt -> dt .with(ChronoField.MONTH_OF_YEAR, 1) .with(ChronoField.DAY_OF_MONTH, 1) @@ -68,14 +67,14 @@ public class DateTrunc extends BinaryDateTimeFunction { .with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter) .with(ChronoField.DAY_OF_MONTH, 1) .toLocalDate().atStartOfDay(dt.getZone()); - }, "quarters", "qq", "q"), + }, "quarters", "qq", "q"), MONTH(dt -> dt - .with(ChronoField.DAY_OF_MONTH, 1) - .toLocalDate().atStartOfDay(dt.getZone()), + .with(ChronoField.DAY_OF_MONTH, 1) + .toLocalDate().atStartOfDay(dt.getZone()), "months", "mm", "m"), WEEK(dt -> dt - .with(ChronoField.DAY_OF_WEEK, 1) - .toLocalDate().atStartOfDay(dt.getZone()), + .with(ChronoField.DAY_OF_WEEK, 1) + .toLocalDate().atStartOfDay(dt.getZone()), "weeks", "wk", "ww"), DAY(dt -> dt.toLocalDate().atStartOfDay(dt.getZone()), "days", "dd", "d"), HOUR(dt -> { @@ -89,16 +88,16 @@ public class DateTrunc extends BinaryDateTimeFunction { return dt.toLocalDate().atStartOfDay(dt.getZone()) .with(ChronoField.HOUR_OF_DAY, hour) .with(ChronoField.MINUTE_OF_HOUR, minute); - }, "minutes", "mi", "n"), + }, "minutes", "mi", "n"), SECOND(dt -> dt.with(ChronoField.NANO_OF_SECOND, 0), "seconds", "ss", "s"), MILLISECOND(dt -> { int micros = dt.get(ChronoField.MICRO_OF_SECOND); return dt.with(ChronoField.MILLI_OF_SECOND, (micros / 1000)); - }, "milliseconds", "ms"), + }, "milliseconds", "ms"), MICROSECOND(dt -> { int nanos = dt.getNano(); return dt.with(ChronoField.MICRO_OF_SECOND, (nanos / 1000)); - }, "microseconds", "mcs"), + }, "microseconds", "mcs"), NANOSECOND(dt -> dt, "nanoseconds", "ns"); private static final Map NAME_TO_PART; @@ -109,10 +108,10 @@ public class DateTrunc extends BinaryDateTimeFunction { VALID_VALUES = DateTimeField.initializeValidValues(values()); } - private Function truncateFunction; + private UnaryOperator truncateFunction; private Set aliases; - Part(Function truncateFunction, String... aliases) { + Part(UnaryOperator truncateFunction, String... aliases) { this.truncateFunction = truncateFunction; this.aliases = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(aliases))); } @@ -136,7 +135,7 @@ public class DateTrunc extends BinaryDateTimeFunction { } public DateTrunc(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) { - super(source, truncateTo, timestamp, zoneId, TRUNC); + super(source, truncateTo, timestamp, zoneId); } @Override @@ -159,16 +158,6 @@ public class DateTrunc extends BinaryDateTimeFunction { return Nullability.TRUE; } - @Override - protected boolean resolveDateTimeField(String dateTimeField) { - return Part.resolve(dateTimeField) != null; - } - - @Override - protected List findSimilarDateTimeFields(String dateTimeField) { - return Part.findSimilar(dateTimeField); - } - @Override protected String scriptMethodName() { return "dateTrunc"; @@ -179,6 +168,21 @@ public class DateTrunc extends BinaryDateTimeFunction { return DateTruncProcessor.process(left().fold(), right().fold(), zoneId()); } + @Override + protected Pipe createPipe(Pipe left, Pipe right, ZoneId zoneId) { + return new DateTruncPipe(source(), this, left, right, zoneId); + } + + @Override + protected boolean resolveDateTimeField(String dateTimeField) { + return Part.resolve(dateTimeField) != null; + } + + @Override + protected List findSimilarDateTimeFields(String dateTimeField) { + return Part.findSimilar(dateTimeField); + } + @Override protected List validDateTimeFieldValues() { return Part.VALID_VALUES; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipe.java new file mode 100644 index 00000000000..3a51f25ab63 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipe.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.time.ZoneId; + +public class DateTruncPipe extends BinaryDateTimePipe { + + public DateTruncPipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) { + super(source, expression, left, right, zoneId); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DateTruncPipe::new, expression(), left(), right(), zoneId()); + } + + @Override + protected DateTruncPipe replaceChildren(Pipe left, Pipe right) { + return new DateTruncPipe(source(), expression(), left, right, zoneId()); + } + + @Override + protected Processor makeProcessor(Processor left, Processor right, ZoneId zoneId) { + return new DateTruncProcessor(left, right, zoneId); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java index 4ddeaa501c0..447e2ca410c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessor.java @@ -34,36 +34,36 @@ public class DateTruncProcessor extends BinaryDateTimeProcessor { } @Override - protected Object doProcess(Object left, Object right) { - return process(left, right, zoneId()); + protected Object doProcess(Object truncateTo, Object timestamp) { + return process(truncateTo, timestamp, zoneId()); } /** * Used in Painless scripting */ - public static Object process(Object source1, Object source2, ZoneId zoneId) { - if (source1 == null || source2 == null) { + public static Object process(Object truncateTo, Object timestamp, ZoneId zoneId) { + if (truncateTo == null || timestamp == null) { return null; } - if (source1 instanceof String == false) { - throw new SqlIllegalArgumentException("A string is required; received [{}]", source1); + if (truncateTo instanceof String == false) { + throw new SqlIllegalArgumentException("A string is required; received [{}]", truncateTo); } - Part truncateDateField = Part.resolve((String) source1); + Part truncateDateField = Part.resolve((String) truncateTo); if (truncateDateField == null) { - List similar = Part.findSimilar((String) source1); + List similar = Part.findSimilar((String) truncateTo); if (similar.isEmpty()) { throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]", - Part.values(), source1); + Part.values(), truncateTo); } else { throw new SqlIllegalArgumentException("Received value [{}] is not valid date part for truncation; " + - "did you mean {}?", source1, similar); + "did you mean {}?", truncateTo, similar); } } - if (source2 instanceof ZonedDateTime == false) { - throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", source2); + if (timestamp instanceof ZonedDateTime == false) { + throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", timestamp); } - return truncateDateField.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId)); + return truncateDateField.truncate(((ZonedDateTime) timestamp).withZoneSameInstant(zoneId)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java index 12355c0baa0..0720706de22 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java @@ -5,20 +5,20 @@ */ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; +import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; -import java.util.concurrent.TimeUnit; /** * Extract the year from a datetime. */ public class Year extends DateTimeHistogramFunction { - - private static long YEAR_IN_MILLIS = TimeUnit.DAYS.toMillis(1) * 365L; + + public static String YEAR_INTERVAL = DateHistogramInterval.YEAR.toString(); public Year(Source source, Expression field, ZoneId zoneId) { super(source, field, zoneId, DateTimeExtractor.YEAR); @@ -45,7 +45,7 @@ public class Year extends DateTimeHistogramFunction { } @Override - public long interval() { - return YEAR_IN_MILLIS; + public String calendarInterval() { + return YEAR_INTERVAL; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 5b1076592d8..b7b559f1b86 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Bina import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; -import org.elasticsearch.xpack.sql.type.DataTypes; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -41,7 +40,7 @@ abstract class DateTimeArithmeticOperation extends ArithmeticOperation { return TypeResolution.TYPE_RESOLVED; } // 2. 3. 4. intervals - if ((DataTypes.isInterval(l) || DataTypes.isInterval(r))) { + if (l.isInterval() || r.isInterval()) { if (DataTypeConversion.commonType(l, r) == null) { return new TypeResolution(format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); } else { @@ -57,7 +56,7 @@ abstract class DateTimeArithmeticOperation extends ArithmeticOperation { DataType l = left().dataType(); DataType r = right().dataType(); - if (!(r.isDateOrTimeBased() || DataTypes.isInterval(r))|| !(l.isDateOrTimeBased() || DataTypes.isInterval(l))) { + if (!(r.isDateOrTimeBased() || r.isInterval())|| !(l.isDateOrTimeBased() || l.isInterval())) { return new TypeResolution(format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); } return TypeResolution.TYPE_RESOLVED; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java index e3fa7ac1031..9c12a246876 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.type.DataTypes; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -39,10 +38,10 @@ public class Mul extends ArithmeticOperation { return TypeResolution.TYPE_RESOLVED; } - if (DataTypes.isInterval(l) && r.isInteger()) { + if (l.isInterval() && r.isInteger()) { dataType = l; return TypeResolution.TYPE_RESOLVED; - } else if (DataTypes.isInterval(r) && l.isInteger()) { + } else if (r.isInterval() && l.isInteger()) { dataType = r; return TypeResolution.TYPE_RESOLVED; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java index a47b9cc9731..affb02a4009 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; -import org.elasticsearch.xpack.sql.type.DataTypes; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -38,7 +37,7 @@ public class Sub extends DateTimeArithmeticOperation { if (resolution.unresolved()) { return resolution; } - if ((right().dataType().isDateOrTimeBased()) && DataTypes.isInterval(left().dataType())) { + if ((right().dataType().isDateOrTimeBased()) && left().dataType().isInterval()) { return new TypeResolution(format(null, "Cannot subtract a {}[{}] from an interval[{}]; do you mean the reverse?", right().dataType().typeName, right().source().text(), left().source().text())); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index d46b2a3da01..a3f1f385346 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -41,9 +41,11 @@ import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistance; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.literal.IntervalYearMonth; import org.elasticsearch.xpack.sql.expression.literal.Intervals; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; @@ -109,6 +111,7 @@ import org.elasticsearch.xpack.sql.util.DateUtils; import org.elasticsearch.xpack.sql.util.ReflectionUtils; import java.time.OffsetTime; +import java.time.Period; import java.time.ZonedDateTime; import java.util.Arrays; import java.util.LinkedHashMap; @@ -279,7 +282,11 @@ final class QueryTranslator { // dates are handled differently because of date histograms if (exp instanceof DateTimeHistogramFunction) { DateTimeHistogramFunction dthf = (DateTimeHistogramFunction) exp; - key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.interval(), dthf.zoneId()); + if (dthf.calendarInterval() != null) { + key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.calendarInterval(), dthf.zoneId()); + } else { + key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.fixedInterval(), dthf.zoneId()); + } } // all other scalar functions become a script else if (exp instanceof ScalarFunction) { @@ -294,19 +301,33 @@ final class QueryTranslator { // date histogram if (h.dataType().isDateBased()) { - long intervalAsMillis = Intervals.inMillis(h.interval()); - - // When the histogram in SQL is applied on DATE type instead of DATETIME, the interval - // specified is truncated to the multiple of a day. If the interval specified is less - // than 1 day, then the interval used will be `INTERVAL '1' DAY`. - if (h.dataType() == DATE) { - intervalAsMillis = DateUtils.minDayInterval(intervalAsMillis); - } - - if (field instanceof FieldAttribute) { - key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.zoneId()); - } else if (field instanceof Function) { - key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), intervalAsMillis, h.zoneId()); + Object value = h.interval().value(); + if (value instanceof IntervalYearMonth + && ((IntervalYearMonth) value).interval().equals(Period.of(1, 0, 0))) { + String calendarInterval = Year.YEAR_INTERVAL; + + // When the histogram is `INTERVAL '1' YEAR`, the interval used in the ES date_histogram will be + // a calendar_interval with value "1y". All other intervals will be fixed_intervals expressed in ms. + if (field instanceof FieldAttribute) { + key = new GroupByDateHistogram(aggId, nameOf(field), calendarInterval, h.zoneId()); + } else if (field instanceof Function) { + key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), calendarInterval, h.zoneId()); + } + } else { + long intervalAsMillis = Intervals.inMillis(h.interval()); + + // When the histogram in SQL is applied on DATE type instead of DATETIME, the interval + // specified is truncated to the multiple of a day. If the interval specified is less + // than 1 day, then the interval used will be `INTERVAL '1' DAY`. + if (h.dataType() == DATE) { + intervalAsMillis = DateUtils.minDayInterval(intervalAsMillis); + } + + if (field instanceof FieldAttribute) { + key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.zoneId()); + } else if (field instanceof Function) { + key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), intervalAsMillis, h.zoneId()); + } } } // numeric histogram diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java index f66278fc2c6..807df41387a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.querydsl.agg; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; @@ -19,52 +20,65 @@ import java.util.Objects; */ public class GroupByDateHistogram extends GroupByKey { - private final long interval; + private final long fixedInterval; + private final String calendarInterval; private final ZoneId zoneId; - public GroupByDateHistogram(String id, String fieldName, long interval, ZoneId zoneId) { - this(id, fieldName, null, null, interval, zoneId); + public GroupByDateHistogram(String id, String fieldName, long fixedInterval, ZoneId zoneId) { + this(id, fieldName, null, null, fixedInterval, null, zoneId); } - public GroupByDateHistogram(String id, ScriptTemplate script, long interval, ZoneId zoneId) { - this(id, null, script, null, interval, zoneId); + public GroupByDateHistogram(String id, ScriptTemplate script, long fixedInterval, ZoneId zoneId) { + this(id, null, script, null, fixedInterval, null, zoneId); + } + + public GroupByDateHistogram(String id, String fieldName, String calendarInterval, ZoneId zoneId) { + this(id, fieldName, null, null, -1L, calendarInterval, zoneId); + } + + public GroupByDateHistogram(String id, ScriptTemplate script, String calendarInterval, ZoneId zoneId) { + this(id, null, script, null, -1L, calendarInterval, zoneId); } - private GroupByDateHistogram(String id, String fieldName, ScriptTemplate script, Direction direction, long interval, - ZoneId zoneId) { + private GroupByDateHistogram(String id, String fieldName, ScriptTemplate script, Direction direction, long fixedInterval, + String calendarInterval, ZoneId zoneId) { super(id, fieldName, script, direction); - this.interval = interval; + if (fixedInterval <= 0 && (calendarInterval == null || calendarInterval.trim().isEmpty())) { + throw new SqlIllegalArgumentException("Either fixed interval or calendar interval needs to be specified"); + } + this.fixedInterval = fixedInterval; + this.calendarInterval = calendarInterval; this.zoneId = zoneId; - } // For testing - public long interval() { - return interval; + public long fixedInterval() { + return fixedInterval; } @Override protected CompositeValuesSourceBuilder createSourceBuilder() { - return new DateHistogramValuesSourceBuilder(id()) - .fixedInterval(new DateHistogramInterval(interval + "ms")) - .timeZone(zoneId); + DateHistogramValuesSourceBuilder builder = new DateHistogramValuesSourceBuilder(id()).timeZone(zoneId); + return calendarInterval != null ? builder.calendarInterval(new DateHistogramInterval(calendarInterval)) + : builder.fixedInterval(new DateHistogramInterval(fixedInterval + "ms")); } @Override protected GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction) { - return new GroupByDateHistogram(id, fieldName, script, direction, interval, zoneId); + return new GroupByDateHistogram(id, fieldName, script, direction, fixedInterval, calendarInterval, zoneId); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), interval, zoneId); + return Objects.hash(super.hashCode(), fixedInterval, calendarInterval, zoneId); } @Override public boolean equals(Object obj) { if (super.equals(obj)) { GroupByDateHistogram other = (GroupByDateHistogram) obj; - return Objects.equals(interval, other.interval) + return Objects.equals(fixedInterval, other.fixedInterval) + && Objects.equals(calendarInterval, other.calendarInterval) && Objects.equals(zoneId, other.zoneId); } return false; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 4a783edae58..22f2a596e35 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -271,6 +271,21 @@ public enum DataType { return isDateBased() || isTimeBased(); } + public boolean isInterval() { + int ordinal = this.ordinal(); + return ordinal >= INTERVAL_YEAR.ordinal() && ordinal <= INTERVAL_MINUTE_TO_SECOND.ordinal(); + } + + public boolean isYearMonthInterval() { + return this == INTERVAL_YEAR || this == INTERVAL_MONTH || this == INTERVAL_YEAR_TO_MONTH; + } + + public boolean isDayTimeInterval() { + int ordinal = this.ordinal(); + return (ordinal >= INTERVAL_DAY.ordinal() && ordinal <= INTERVAL_SECOND.ordinal()) + || (ordinal >= INTERVAL_DAY_TO_HOUR.ordinal() && ordinal <= INTERVAL_MINUTE_TO_SECOND.ordinal()); + } + // data type extract-able from _source or from docvalue_fields public boolean isFromDocValuesOnly() { return this == KEYWORD // because of ignore_above. Extracting this from _source wouldn't make sense if it wasn't indexed at all. diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index ebae0a516a4..a9b836da135 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -87,12 +87,12 @@ public abstract class DataTypeConversion { // interval and dates if (left == DATE) { - if (DataTypes.isInterval(right)) { + if (right.isInterval()) { return left; } } if (right == DATE) { - if (DataTypes.isInterval(left)) { + if (left.isInterval()) { return right; } } @@ -100,7 +100,7 @@ public abstract class DataTypeConversion { if (right == DATE) { return DATETIME; } - if (DataTypes.isInterval(right)) { + if (right.isInterval()) { return left; } } @@ -108,7 +108,7 @@ public abstract class DataTypeConversion { if (left == DATE) { return DATETIME; } - if (DataTypes.isInterval(left)) { + if (left.isInterval()) { return right; } } @@ -116,7 +116,7 @@ public abstract class DataTypeConversion { if (right == DATE || right == TIME) { return left; } - if (DataTypes.isInterval(right)) { + if (right.isInterval()) { return left; } } @@ -124,24 +124,24 @@ public abstract class DataTypeConversion { if (left == DATE || left == TIME) { return right; } - if (DataTypes.isInterval(left)) { + if (left.isInterval()) { return right; } } // Interval * integer is a valid operation - if (DataTypes.isInterval(left)) { + if (left.isInterval()) { if (right.isInteger()) { return left; } } - if (DataTypes.isInterval(right)) { + if (right.isInterval()) { if (left.isInteger()) { return right; } } - if (DataTypes.isInterval(left)) { + if (left.isInterval()) { // intervals widening - if (DataTypes.isInterval(right)) { + if (right.isInterval()) { // null returned for incompatible intervals return DataTypes.compatibleInterval(left, right); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java index 3f985ae4e3b..5d4691e70d5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java @@ -18,12 +18,6 @@ import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.DOUBLE; import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; -import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY; -import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_HOUR; -import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_MINUTE_TO_SECOND; -import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_MONTH; -import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_SECOND; -import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_YEAR; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_YEAR_TO_MONTH; import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD; import static org.elasticsearch.xpack.sql.type.DataType.LONG; @@ -88,18 +82,6 @@ public final class DataTypes { throw new SqlIllegalArgumentException("No idea what's the DataType for {}", value.getClass()); } - - // - // Interval utilities - // - // some of the methods below could have used an EnumSet however isDayTime would have required a large initialization block - // for this reason, these use the ordinal directly (and thus avoid the type check in EnumSet) - - public static boolean isInterval(DataType type) { - int ordinal = type.ordinal(); - return ordinal >= INTERVAL_YEAR.ordinal() && ordinal <= INTERVAL_MINUTE_TO_SECOND.ordinal(); - } - // return the compatible interval between the two - it is assumed the types are intervals // YEAR and MONTH -> YEAR_TO_MONTH // DAY... SECOND -> DAY_TIME @@ -108,11 +90,11 @@ public final class DataTypes { if (left == right) { return left; } - if (isYearMonthInterval(left) && isYearMonthInterval(right)) { + if (left.isYearMonthInterval() && right.isYearMonthInterval()) { // no need to look at YEAR/YEAR or MONTH/MONTH as these are equal and already handled return INTERVAL_YEAR_TO_MONTH; } - if (isDayTimeInterval(left) && isDayTimeInterval(right)) { + if (left.isDayTimeInterval() && right.isDayTimeInterval()) { // to avoid specifying the combinations, extract the leading and trailing unit from the name // D > H > S > M which is also the alphabetical order String lName = left.name().substring(9); @@ -141,16 +123,6 @@ public final class DataTypes { return null; } - private static boolean isYearMonthInterval(DataType type) { - return type == INTERVAL_YEAR || type == INTERVAL_MONTH || type == INTERVAL_YEAR_TO_MONTH; - } - - private static boolean isDayTimeInterval(DataType type) { - int ordinal = type.ordinal(); - return (ordinal >= INTERVAL_DAY.ordinal() && ordinal <= INTERVAL_SECOND.ordinal()) - || (ordinal >= INTERVAL_DAY_TO_HOUR.ordinal() && ordinal <= INTERVAL_MINUTE_TO_SECOND.ordinal()); - } - private static String intervalUnit(char unitChar) { switch (unitChar) { case 'D': @@ -240,9 +212,11 @@ public final class DataTypes { return true; } else { return - (left == DataType.NULL || right == DataType.NULL) || - (left.isString() && right.isString()) || - (left.isNumeric() && right.isNumeric()); + (left == DataType.NULL || right == DataType.NULL) + || (left.isString() && right.isString()) + || (left.isNumeric() && right.isNumeric()) + || (left.isDateBased() && right.isDateBased()) + || (left.isInterval() && right.isInterval() && compatibleInterval(left, right) != null); } } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 0f96850e09a..8dde63539f5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -80,7 +80,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { private LogicalPlan incompatibleAccept(String sql) { return accept(incompatible(), sql); } - + public void testMissingIndex() { assertEquals("1:17: Unknown index [missing]", error(IndexResolution.notFound("missing"), "SELECT foo FROM missing")); } @@ -96,11 +96,11 @@ public class VerifierErrorMessagesTests extends ESTestCase { public void testMissingColumnWithWildcard() { assertEquals("1:8: Unknown column [xxx]", error("SELECT xxx.* FROM test")); } - + public void testMisspelledColumnWithWildcard() { assertEquals("1:8: Unknown column [tex], did you mean [text]?", error("SELECT tex.* FROM test")); } - + public void testColumnWithNoSubFields() { assertEquals("1:8: Cannot determine columns for [text.*]", error("SELECT text.* FROM test")); } @@ -131,14 +131,14 @@ public class VerifierErrorMessagesTests extends ESTestCase { "line 1:22: Unknown column [c]\n" + "line 1:25: Unknown column [tex], did you mean [text]?", error("SELECT bool, a, b.*, c, tex.* FROM test")); } - + public void testMultipleColumnsWithWildcard2() { assertEquals("1:8: Unknown column [tex], did you mean [text]?\n" + "line 1:21: Unknown column [a]\n" + "line 1:24: Unknown column [dat], did you mean [date]?\n" + "line 1:31: Unknown column [c]", error("SELECT tex.*, bool, a, dat.*, c FROM test")); } - + public void testMultipleColumnsWithWildcard3() { assertEquals("1:8: Unknown column [ate], did you mean [date]?\n" + "line 1:21: Unknown column [keyw], did you mean [keyword]?\n" + @@ -210,7 +210,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { "type [keyword]", error("SELECT DATE_TRUNC(keyword, keyword) FROM test")); assertEquals("1:8: first argument of [DATE_TRUNC('invalid', keyword)] must be one of [MILLENNIUM, CENTURY, DECADE, " + "" + "YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND] " + - "or their aliases, found value ['invalid']", + "or their aliases; found value ['invalid']", error("SELECT DATE_TRUNC('invalid', keyword) FROM test")); assertEquals("1:8: Unknown value ['millenioum'] for first argument of [DATE_TRUNC('millenioum', keyword)]; " + "did you mean [millennium, millennia]?", @@ -237,7 +237,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { "type [keyword]", error("SELECT DATE_PART(keyword, keyword) FROM test")); assertEquals("1:8: first argument of [DATE_PART('invalid', keyword)] must be one of [YEAR, QUARTER, MONTH, DAYOFYEAR, " + "DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND, TZOFFSET] " + - "or their aliases, found value ['invalid']", + "or their aliases; found value ['invalid']", error("SELECT DATE_PART('invalid', keyword) FROM test")); assertEquals("1:8: Unknown value ['tzofset'] for first argument of [DATE_PART('tzofset', keyword)]; " + "did you mean [tzoffset]?", @@ -616,13 +616,13 @@ public class VerifierErrorMessagesTests extends ESTestCase { "No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", error("SELECT * FROM test WHERE text LIKE 'foo'")); } - + public void testInvalidTypeForRLikeMatch() { assertEquals("1:26: [text RLIKE 'foo'] cannot operate on field of data type [text]: " + "No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", error("SELECT * FROM test WHERE text RLIKE 'foo'")); } - + public void testAllowCorrectFieldsInIncompatibleMappings() { assertNotNull(incompatibleAccept("SELECT languages FROM \"*\"")); } @@ -746,32 +746,32 @@ public class VerifierErrorMessagesTests extends ESTestCase { assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test")); } - + public void testHistogramNotInGroupingWithCount() { assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h, COUNT(*) FROM test")); } - + public void testHistogramNotInGroupingWithMaxFirst() { assertEquals("1:19: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping", error("SELECT MAX(date), HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test")); } - + public void testHistogramWithoutAliasNotInGrouping() { assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) FROM test")); } - + public void testTwoHistogramsNotInGrouping() { assertEquals("1:48: [HISTOGRAM(date, INTERVAL 1 DAY)] needs to be part of the grouping", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h, HISTOGRAM(date, INTERVAL 1 DAY) FROM test GROUP BY h")); } - + public void testHistogramNotInGrouping_WithGroupByField() { assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) FROM test GROUP BY date")); } - + public void testScalarOfHistogramNotInGrouping() { assertEquals("1:14: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping", error("SELECT MONTH(HISTOGRAM(date, INTERVAL 1 MONTH)) FROM test")); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartPipeTests.java new file mode 100644 index 00000000000..e6a5b3c9d14 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartPipeTests.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; + +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; + +public class DatePartPipeTests extends AbstractNodeTestCase { + + @Override + protected DatePartPipe randomInstance() { + return randomDatePartPipe(); + } + + private Expression randomDatePartPipeExpression() { + return randomDatePartPipe().expression(); + } + + public static DatePartPipe randomDatePartPipe() { + return (DatePartPipe) new DatePart( + randomSource(), + randomStringLiteral(), + randomDatetimeLiteral(), + randomZone()) + .makePipe(); + } + + @Override + public void testTransform() { + // test transforming only the properties (source, expression), + // skipping the children (the two parameters of the binary function) which are tested separately + DatePartPipe b1 = randomInstance(); + + Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDatePartPipeExpression); + DatePartPipe newB = new DatePartPipe( + b1.source(), + newExpression, + b1.left(), + b1.right(), + b1.zoneId()); + assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); + + DatePartPipe b2 = randomInstance(); + Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource); + newB = new DatePartPipe( + newLoc, + b2.expression(), + b2.left(), + b2.right(), + b2.zoneId()); + assertEquals(newB, + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); + } + + @Override + public void testReplaceChildren() { + DatePartPipe b = randomInstance(); + Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomStringLiteral))); + Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomDatetimeLiteral))); + ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone); + DatePartPipe newB = new DatePartPipe( b.source(), b.expression(), b.left(), b.right(), newZoneId); + BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); + + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.source(), b.source()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), b.right()); + + transformed = newB.replaceChildren(b.left(), newRight); + assertEquals(transformed.left(), b.left()); + assertEquals(transformed.source(), b.source()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + + transformed = newB.replaceChildren(newLeft, newRight); + assertEquals(transformed.left(), newLeft); + assertEquals(transformed.source(), b.source()); + assertEquals(transformed.expression(), b.expression()); + assertEquals(transformed.right(), newRight); + } + + @Override + protected DatePartPipe mutate(DatePartPipe instance) { + List> randoms = new ArrayList<>(); + randoms.add(f -> new DatePartPipe(f.source(), f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), + f.right(), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + randoms.add(f -> new DatePartPipe(f.source(), f.expression(), + f.left(), + pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + randoms.add(f -> new DatePartPipe(f.source(), f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), + pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + + return randomFrom(randoms).apply(instance); + } + + @Override + protected DatePartPipe copy(DatePartPipe instance) { + return new DatePartPipe( + instance.source(), + instance.expression(), + instance.left(), + instance.right(), + instance.zoneId()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java index 314f57c54b4..fca8c27cc87 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessorTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; -import java.time.ZonedDateTime; import static org.elasticsearch.xpack.sql.expression.Literal.NULL; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; @@ -27,7 +26,7 @@ public class DatePartProcessorTests extends AbstractSqlWireSerializingTestCase new DatePart(Source.EMPTY, l("dayfyear"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); assertEquals("Received value [dayfyear] is not valid date part for extraction; did you mean [dayofyear, year]?", - siae.getMessage()); + siae.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java index 45bb3752123..9df9bc995c7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.Clock; +import java.time.Duration; import java.time.OffsetTime; import java.time.ZoneOffset; import java.time.ZonedDateTime; @@ -39,4 +41,9 @@ public class DateTimeTestUtils { public static OffsetTime time(int hour, int minute, int second, int nano) { return OffsetTime.of(hour, minute, second, nano, ZoneOffset.UTC); } + + static ZonedDateTime nowWithMillisResolution() { + Clock millisResolutionClock = Clock.tick(Clock.systemUTC(), Duration.ofMillis(1)); + return ZonedDateTime.now(millisResolutionClock); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimePipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipeTests.java similarity index 52% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimePipeTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipeTests.java index ff02245df88..08836f98afc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BinaryDateTimePipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncPipeTests.java @@ -22,13 +22,14 @@ import java.util.Objects; import java.util.function.Function; import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; -public class BinaryDateTimePipeTests extends AbstractNodeTestCase { +public class DateTruncPipeTests extends AbstractNodeTestCase { @Override - protected BinaryDateTimePipe randomInstance() { + protected DateTruncPipe randomInstance() { return randomDateTruncPipe(); } @@ -36,52 +37,49 @@ public class BinaryDateTimePipeTests extends AbstractNodeTestCase Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); - BinaryDateTimePipe b2 = randomInstance(); + DateTruncPipe b2 = randomInstance(); Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource); - newB = new BinaryDateTimePipe( - newLoc, - b2.expression(), - b2.left(), - b2.right(), - b2.zoneId(), - b2.operation()); + newB = new DateTruncPipe( + newLoc, + b2.expression(), + b2.left(), + b2.right(), + b2.zoneId()); assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); } @Override public void testReplaceChildren() { - BinaryDateTimePipe b = randomInstance(); + DateTruncPipe b = randomInstance(); Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomStringLiteral))); Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomDatetimeLiteral))); ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone); - BinaryDateTimePipe newB = new BinaryDateTimePipe( - b.source(), b.expression(), b.left(), b.right(), newZoneId, randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values())); + DateTruncPipe newB = new DateTruncPipe( b.source(), b.expression(), b.left(), b.right(), newZoneId); BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); assertEquals(transformed.left(), newLeft); @@ -103,37 +101,31 @@ public class BinaryDateTimePipeTests extends AbstractNodeTestCase> randoms = new ArrayList<>(); - randoms.add(f -> new BinaryDateTimePipe(f.source(), - f.expression(), - pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), - f.right(), - randomValueOtherThan(f.zoneId(), ESTestCase::randomZone), - randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values()))); - randoms.add(f -> new BinaryDateTimePipe(f.source(), - f.expression(), - f.left(), - pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), - randomValueOtherThan(f.zoneId(), ESTestCase::randomZone), - randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values()))); - randoms.add(f -> new BinaryDateTimePipe(f.source(), - f.expression(), - pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), - pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), - randomValueOtherThan(f.zoneId(), ESTestCase::randomZone), - randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values()))); + protected DateTruncPipe mutate(DateTruncPipe instance) { + List> randoms = new ArrayList<>(); + randoms.add(f -> new DateTruncPipe(f.source(), f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), + f.right(), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + randoms.add(f -> new DateTruncPipe(f.source(), f.expression(), + f.left(), + pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); + randoms.add(f -> new DateTruncPipe(f.source(), f.expression(), + pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))), + pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))), + randomValueOtherThan(f.zoneId(), ESTestCase::randomZone))); return randomFrom(randoms).apply(instance); } @Override - protected BinaryDateTimePipe copy(BinaryDateTimePipe instance) { - return new BinaryDateTimePipe(instance.source(), - instance.expression(), - instance.left(), - instance.right(), - instance.zoneId(), - instance.operation()); + protected DateTruncPipe copy(DateTruncPipe instance) { + return new DateTruncPipe( + instance.source(), + instance.expression(), + instance.left(), + instance.right(), + instance.zoneId()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java index de120371225..cbc76419c58 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTruncProcessorTests.java @@ -29,7 +29,7 @@ public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase< public static DateTruncProcessor randomDateTruncProcessor() { return new DateTruncProcessor( new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)), - new ConstantProcessor(ZonedDateTime.now()), + new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()), randomZone()); } @@ -52,13 +52,13 @@ public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase< protected DateTruncProcessor mutateInstance(DateTruncProcessor instance) { return new DateTruncProcessor( new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)), - new ConstantProcessor(ZonedDateTime.now()), + new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()), randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone)); } public void testInvalidInputs() { SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, - () -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); + () -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); assertEquals("A string is required; received [5]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, @@ -68,13 +68,13 @@ public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase< siae = expectThrows(SqlIllegalArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); assertEquals("A value of [MILLENNIUM, CENTURY, DECADE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " + - "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", + "SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, () -> new DateTrunc(Source.EMPTY, l("dacede"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null)); assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?", - siae.getMessage()); + siae.getMessage()); } public void testWithNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index b353724acd0..d46afb3e157 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -915,7 +915,7 @@ public class QueryTranslatorTests extends ESTestCase { assertEquals(DataType.INTEGER, eqe.output().get(0).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," - + "\"fixed_interval\":\"31536000000ms\",\"time_zone\":\"Z\"}}}]}}}")); + + "\"calendar_interval\":\"1y\",\"time_zone\":\"Z\"}}}]}}}")); } public void testGroupByHistogramWithDate() { @@ -940,7 +940,7 @@ public class QueryTranslatorTests extends ESTestCase { EsQueryExec eqe = (EsQueryExec) p; assertEquals(1, eqe.queryContainer().aggs().groups().size()); assertEquals(GroupByDateHistogram.class, eqe.queryContainer().aggs().groups().get(0).getClass()); - assertEquals(86400000L, ((GroupByDateHistogram) eqe.queryContainer().aggs().groups().get(0)).interval()); + assertEquals(86400000L, ((GroupByDateHistogram) eqe.queryContainer().aggs().groups().get(0)).fixedInterval()); } public void testGroupByHistogramWithDateTruncateIntervalToDayMultiples() { @@ -951,7 +951,7 @@ public class QueryTranslatorTests extends ESTestCase { EsQueryExec eqe = (EsQueryExec) p; assertEquals(1, eqe.queryContainer().aggs().groups().size()); assertEquals(GroupByDateHistogram.class, eqe.queryContainer().aggs().groups().get(0).getClass()); - assertEquals(172800000L, ((GroupByDateHistogram) eqe.queryContainer().aggs().groups().get(0)).interval()); + assertEquals(172800000L, ((GroupByDateHistogram) eqe.queryContainer().aggs().groups().get(0)).fixedInterval()); } { PhysicalPlan p = optimizeAndPlan("SELECT MAX(int) FROM test GROUP BY " + @@ -960,7 +960,7 @@ public class QueryTranslatorTests extends ESTestCase { EsQueryExec eqe = (EsQueryExec) p; assertEquals(1, eqe.queryContainer().aggs().groups().size()); assertEquals(GroupByDateHistogram.class, eqe.queryContainer().aggs().groups().get(0).getClass()); - assertEquals(259200000L, ((GroupByDateHistogram) eqe.queryContainer().aggs().groups().get(0)).interval()); + assertEquals(259200000L, ((GroupByDateHistogram) eqe.queryContainer().aggs().groups().get(0)).fixedInterval()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index ea5f9efc0be..76c629ad1dc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -684,6 +684,6 @@ public class DataTypeConversionTests extends ESTestCase { } private DataType randomInterval() { - return randomFrom(Stream.of(DataType.values()).filter(DataTypes::isInterval).collect(Collectors.toList())); + return randomFrom(Stream.of(DataType.values()).filter(DataType::isInterval).collect(Collectors.toList())); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java index a789324e0b4..5b0111b5998 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java @@ -32,7 +32,6 @@ import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_YEAR_TO_MONTH; import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD; import static org.elasticsearch.xpack.sql.type.DataType.LONG; import static org.elasticsearch.xpack.sql.type.DataTypes.compatibleInterval; -import static org.elasticsearch.xpack.sql.type.DataTypes.isInterval; import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlDataType; import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlDateTimeSub; import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlMaximumScale; @@ -77,7 +76,7 @@ public class DataTypesTests extends ESTestCase { // type checks public void testIsInterval() throws Exception { for (DataType dataType : EnumSet.range(INTERVAL_YEAR, INTERVAL_MINUTE_TO_SECOND)) { - assertTrue(isInterval(dataType)); + assertTrue(dataType.isInterval()); } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml index 234e5627853..c7130faf277 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml @@ -15,5 +15,5 @@ setup: # This is fragile - it needs to be updated every time we add a new cluster/index privilege # I would much prefer we could just check that specific entries are in the array, but we don't have # an assertion for that - - length: { "cluster" : 31 } + - length: { "cluster" : 33 } - length: { "index" : 17 } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java index 3ef271fb42e..8c80616fcd7 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java @@ -53,8 +53,15 @@ public class TransformGetAndGetStatsIT extends TransformRestTestCase { createReviewsIndex(); indicesCreated = true; - setupUser(TEST_USER_NAME, Collections.singletonList("data_frame_transforms_user")); - setupUser(TEST_ADMIN_USER_NAME, Collections.singletonList("data_frame_transforms_admin")); + + // at random test the old deprecated roles, to be removed in 9.0.0 + if (randomBoolean()) { + setupUser(TEST_USER_NAME, Collections.singletonList("transform_user")); + setupUser(TEST_ADMIN_USER_NAME, Collections.singletonList("transform_admin")); + } else { + setupUser(TEST_USER_NAME, Collections.singletonList("data_frame_transforms_user")); + setupUser(TEST_ADMIN_USER_NAME, Collections.singletonList("data_frame_transforms_admin")); + } } @After diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java index 72e9a621d54..c47a191b19f 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java @@ -50,7 +50,13 @@ public class TransformPivotRestIT extends TransformRestTestCase { createReviewsIndex(); indicesCreated = true; setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME); - setupUser(TEST_USER_NAME, Arrays.asList("data_frame_transforms_admin", DATA_ACCESS_ROLE)); + + // at random test the old deprecated roles, to be removed in 9.0.0 + if (randomBoolean()) { + setupUser(TEST_USER_NAME, Arrays.asList("transform_admin", DATA_ACCESS_ROLE)); + } else { + setupUser(TEST_USER_NAME, Arrays.asList("data_frame_transforms_admin", DATA_ACCESS_ROLE)); + } } public void testSimplePivot() throws Exception { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java index 44d340cb2cc..86775467860 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.client.transform.transforms.pivot.PivotConfig; import org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.DeprecationHandler; @@ -35,21 +36,25 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; +import org.junit.Before; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.SortedSet; +import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.test.rest.XPackRestTestConstants.TRANSFORM_INTERNAL_INDEX_PREFIX; +import static org.elasticsearch.xpack.test.rest.XPackRestTestConstants.TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED; +import static org.elasticsearch.xpack.test.rest.XPackRestTestConstants.TRANSFORM_NOTIFICATIONS_INDEX_PREFIX; +import static org.elasticsearch.xpack.test.rest.XPackRestTestConstants.TRANSFORM_NOTIFICATIONS_INDEX_PREFIX_DEPRECATED; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -72,14 +77,38 @@ public class TransformSurvivesUpgradeIT extends AbstractUpgradeTestCase { .map(TimeValue::timeValueMinutes) .collect(Collectors.toList()); - @Override - protected Collection templatesToWaitFor() { - if (UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_4_0)) { - return Stream.concat(XPackRestTestConstants.DATA_FRAME_TEMPLATES.stream(), - super.templatesToWaitFor().stream()).collect(Collectors.toSet()); - } else { - return Collections.emptySet(); + @Before + public void waitForTemplates() throws Exception { + // no transform before 7.2 + if (UPGRADE_FROM_VERSION.before(Version.V_7_2_0)) { + return; } + + assertBusy(() -> { + final Request catRequest = new Request("GET", "_cat/templates?h=n&s=n"); + final Response catResponse = adminClient().performRequest(catRequest); + + final SortedSet templates = new TreeSet<>(Streams.readAllLines(catResponse.getEntity().getContent())); + + // match templates, independent of the version, at least 2 should exist + SortedSet internalDeprecated = templates.tailSet(TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED); + SortedSet internal = templates.tailSet(TRANSFORM_INTERNAL_INDEX_PREFIX); + SortedSet notificationsDeprecated = templates + .tailSet(TRANSFORM_NOTIFICATIONS_INDEX_PREFIX_DEPRECATED); + SortedSet notifications = templates.tailSet(TRANSFORM_NOTIFICATIONS_INDEX_PREFIX); + + int foundTemplates = 0; + foundTemplates += internalDeprecated.isEmpty() ? 0 + : internalDeprecated.first().startsWith(TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED) ? 1 : 0; + foundTemplates += internal.isEmpty() ? 0 : internal.first().startsWith(TRANSFORM_INTERNAL_INDEX_PREFIX) ? 1 : 0; + foundTemplates += notificationsDeprecated.isEmpty() ? 0 + : notificationsDeprecated.first().startsWith(TRANSFORM_NOTIFICATIONS_INDEX_PREFIX_DEPRECATED) ? 1 : 0; + foundTemplates += notifications.isEmpty() ? 0 : notifications.first().startsWith(TRANSFORM_NOTIFICATIONS_INDEX_PREFIX) ? 1 : 0; + + if (foundTemplates < 2) { + fail("Transform index templates not found. The templates that exist are: " + templates); + } + }); } protected static void waitForPendingDataFrameTasks() throws Exception { diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java index d09d09bbc40..acfeed8b91e 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java @@ -38,12 +38,11 @@ public final class XPackRestTestConstants { RESULTS_INDEX_PREFIX, CONFIG_INDEX)); - // Data Frame constants: - public static final String DATA_FRAME_INTERNAL_INDEX = ".data-frame-internal-2"; - public static final String DATA_FRAME_NOTIFICATIONS_INDEX = ".data-frame-notifications-1"; - - public static final List DATA_FRAME_TEMPLATES = - Collections.unmodifiableList(Arrays.asList(DATA_FRAME_INTERNAL_INDEX, DATA_FRAME_NOTIFICATIONS_INDEX)); + // Transform constants: + public static final String TRANSFORM_INTERNAL_INDEX_PREFIX = ".transform-internal-"; + public static final String TRANSFORM_NOTIFICATIONS_INDEX_PREFIX = ".transform-notifications-"; + public static final String TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED = ".data-frame-internal-"; + public static final String TRANSFORM_NOTIFICATIONS_INDEX_PREFIX_DEPRECATED = ".data-frame-notifications-"; private XPackRestTestConstants() { } diff --git a/x-pack/snapshot-tool/src/test/java/org/elasticsearch/snapshots/S3CleanupTests.java b/x-pack/snapshot-tool/src/test/java/org/elasticsearch/snapshots/S3CleanupTests.java index 5a80103f0b5..c33a5993248 100644 --- a/x-pack/snapshot-tool/src/test/java/org/elasticsearch/snapshots/S3CleanupTests.java +++ b/x-pack/snapshot-tool/src/test/java/org/elasticsearch/snapshots/S3CleanupTests.java @@ -49,13 +49,8 @@ public class S3CleanupTests extends ESSingleNodeTestCase { createRepository("test-repo"); repository = (BlobStoreRepository) getInstanceFromNode(RepositoriesService.class).repository("test-repo"); final PlainActionFuture future = PlainActionFuture.newFuture(); - repository.threadPool().generic().execute(new ActionRunnable(future) { - @Override - protected void doRun() throws Exception { - repository.blobStore().blobContainer(repository.basePath()).delete(); - future.onResponse(null); - } - }); + repository.threadPool().generic().execute(ActionRunnable.run(future, + () -> repository.blobStore().blobContainer(repository.basePath()).delete())); future.actionGet(); assertBusy(() -> BlobStoreTestUtil.assertBlobsByPrefix(repository, repository.basePath(), "", Collections.emptyMap()), 10L, TimeUnit.MINUTES);