From 7bc86f23ec394438b5f391315b1f4e116eac2063 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 23 Sep 2019 10:46:25 +0100 Subject: [PATCH 01/94] Wait longer for leader failure in logs test (#46958) `testLogsWarningPeriodicallyIfClusterNotFormed` simulates a leader failure and waits for long enough that a failing leader check is scheduled. However it does not wait for the failing check to actually fail, which requires another two actions and therefore might take up to 200ms more. Unlucky timing would result in this test failing, for instance: ./gradle ':server:test' \ --tests "org.elasticsearch.cluster.coordination.CoordinatorTests.testLogsWarningPeriodicallyIfClusterNotFormed" \ -Dtests.jvm.argline="-Dhppc.bitmixer=DETERMINISTIC" \ -Dtests.seed=F18CDD0EBEB5653:E9BC1A8B062E697A This commit adds the extra delay needed for the leader failure to complete as expected. Fixes #46920 --- .../cluster/coordination/CoordinatorTests.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index 9a0238960b5..f968f6f4742 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -1216,9 +1216,15 @@ public class CoordinatorTests extends AbstractCoordinatorTestCase { clusterNode.disconnect(); } - cluster.runFor(defaultMillis(LEADER_CHECK_INTERVAL_SETTING) + defaultMillis(LEADER_CHECK_TIMEOUT_SETTING), + cluster.runFor(defaultMillis(LEADER_CHECK_TIMEOUT_SETTING) // to wait for any in-flight check to time out + + defaultMillis(LEADER_CHECK_INTERVAL_SETTING) // to wait for the next check to be sent + + 2 * DEFAULT_DELAY_VARIABILITY, // to send the failing check and receive the disconnection response "waiting for leader failure"); + for (final ClusterNode clusterNode : cluster.clusterNodes) { + assertThat(clusterNode.getId() + " is CANDIDATE", clusterNode.coordinator.getMode(), is(CANDIDATE)); + } + for (int i = scaledRandomIntBetween(1, 10); i >= 0; i--) { final MockLogAppender mockLogAppender = new MockLogAppender(); try { From ef0b75765b0820baa0afa4cd5a4e0117a49afce9 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 28 May 2019 17:52:35 -0700 Subject: [PATCH 02/94] Add explicit build flag for experimenting with test execution cacheability (#42649) * Add build flag for ignoring random test seed as task input * Fix checkstyle violations --- .../elasticsearch/gradle/BuildPlugin.groovy | 21 ++------ ...emPropertyCommandLineArgumentProvider.java | 30 +++++++++++ .../testfixtures/TestFixturesPlugin.java | 53 +++++++++++-------- 3 files changed, 63 insertions(+), 41 deletions(-) create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/SystemPropertyCommandLineArgumentProvider.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 9afbd436400..595bd173730 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -843,7 +843,7 @@ class BuildPlugin implements Plugin { } test.jvmArgumentProviders.add(nonInputProperties) - test.extensions.getByType(ExtraPropertiesExtension).set('nonInputProperties', nonInputProperties) + test.extensions.add('nonInputProperties', nonInputProperties) test.executable = "${ext.get('runtimeJavaHome')}/bin/java" test.workingDir = project.file("${project.buildDir}/testrun/${test.name}") @@ -865,7 +865,8 @@ class BuildPlugin implements Plugin { } // we use './temp' since this is per JVM and tests are forbidden from writing to CWD - test.systemProperties 'java.io.tmpdir': './temp', + test.systemProperties 'gradle.dist.lib': new File(project.class.location.toURI()).parent, + 'java.io.tmpdir': './temp', 'java.awt.headless': 'true', 'tests.gradle': 'true', 'tests.artifact': project.name, @@ -881,7 +882,6 @@ class BuildPlugin implements Plugin { } // don't track these as inputs since they contain absolute paths and break cache relocatability - nonInputProperties.systemProperty('gradle.dist.lib', new File(project.class.location.toURI()).parent) nonInputProperties.systemProperty('gradle.worker.jar', "${project.gradle.getGradleUserHomeDir()}/caches/${project.gradle.gradleVersion}/workerMain/gradle-worker.jar") nonInputProperties.systemProperty('gradle.user.home', project.gradle.getGradleUserHomeDir()) @@ -1007,19 +1007,4 @@ class BuildPlugin implements Plugin { }) } } - - private static class SystemPropertyCommandLineArgumentProvider implements CommandLineArgumentProvider { - private final Map systemProperties = [:] - - void systemProperty(String key, Object value) { - systemProperties.put(key, value) - } - - @Override - Iterable asArguments() { - return systemProperties.collect { key, value -> - "-D${key}=${value.toString()}".toString() - } - } - } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/SystemPropertyCommandLineArgumentProvider.java b/buildSrc/src/main/java/org/elasticsearch/gradle/SystemPropertyCommandLineArgumentProvider.java new file mode 100644 index 00000000000..7e808724035 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/SystemPropertyCommandLineArgumentProvider.java @@ -0,0 +1,30 @@ +package org.elasticsearch.gradle; + +import org.gradle.api.tasks.Input; +import org.gradle.process.CommandLineArgumentProvider; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.stream.Collectors; + +public class SystemPropertyCommandLineArgumentProvider implements CommandLineArgumentProvider { + private final Map systemProperties = new LinkedHashMap<>(); + + public void systemProperty(String key, Object value) { + systemProperties.put(key, value); + } + + @Override + public Iterable asArguments() { + return systemProperties.entrySet() + .stream() + .map(entry -> "-D" + entry.getKey() + "=" + entry.getValue()) + .collect(Collectors.toList()); + } + + // Track system property keys as an input so our build cache key will change if we add properties but values are still ignored + @Input + public Iterable getPropertyNames() { + return systemProperties.keySet(); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 81b431772c2..556e938875e 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -22,7 +22,9 @@ import com.avast.gradle.dockercompose.ComposeExtension; import com.avast.gradle.dockercompose.DockerComposePlugin; import com.avast.gradle.dockercompose.tasks.ComposeUp; import org.elasticsearch.gradle.OS; +import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider; import org.elasticsearch.gradle.precommit.TestingConventionsTasks; +import org.gradle.api.Action; import org.gradle.api.DefaultTask; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -142,7 +144,8 @@ public class TestFixturesPlugin implements Plugin { configureServiceInfoForTask( task, fixtureProject, - task::systemProperty + (name, host) -> + task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host) ); task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture")); }) @@ -165,28 +168,32 @@ public class TestFixturesPlugin implements Plugin { private void configureServiceInfoForTask(Task task, Project fixtureProject, BiConsumer consumer) { // Configure ports for the tests as system properties. // We only know these at execution time so we need to do it in doFirst - task.doFirst(theTask -> - fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos() - .forEach((service, infos) -> { - infos.getTcpPorts() - .forEach((container, host) -> { - String name = "test.fixtures." + service + ".tcp." + container; - theTask.getLogger().info("port mapping property: {}={}", name, host); - consumer.accept( - name, - host - ); - }); - infos.getUdpPorts() - .forEach((container, host) -> { - String name = "test.fixtures." + service + ".udp." + container; - theTask.getLogger().info("port mapping property: {}={}", name, host); - consumer.accept( - name, - host - ); - }); - }) + task.doFirst(new Action() { + @Override + public void execute(Task theTask) { + fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos() + .forEach((service, infos) -> { + infos.getTcpPorts() + .forEach((container, host) -> { + String name = "test.fixtures." + service + ".tcp." + container; + theTask.getLogger().info("port mapping property: {}={}", name, host); + consumer.accept( + name, + host + ); + }); + infos.getUdpPorts() + .forEach((container, host) -> { + String name = "test.fixtures." + service + ".udp." + container; + theTask.getLogger().info("port mapping property: {}={}", name, host); + consumer.accept( + name, + host + ); + }); + }); + } + } ); } From 5fd7505efc4d5e69804b49cf945e1eace9ae5e44 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Mon, 23 Sep 2019 12:48:47 +0300 Subject: [PATCH 03/94] Testfixtures allow a single service only (#46780) This PR adds some restrictions around testfixtures to make sure the same service ( as defiend in docker-compose.yml ) is not shared between multiple projects. Sharing would break running with --parallel. Projects can still share fixtures as long as each has it;s own service within. This is still useful to share some of the setup and configuration code of the fixture. Project now also have to specify a service name when calling useCluster to refer to a specific service. If this is not the case all services will be claimed and the fixture can't be shared. For this reason fixtures have to explicitly specify if they are using themselves ( fixture and tests in the same project ). --- .../testfixtures/TestFixtureExtension.java | 59 +++++++++++++++++++ .../testfixtures/TestFixturesPlugin.java | 19 ++++-- distribution/docker/build.gradle | 2 + plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-s3/build.gradle | 3 + x-pack/qa/kerberos-tests/build.gradle | 3 +- x-pack/qa/oidc-op-tests/build.gradle | 2 +- x-pack/qa/openldap-tests/build.gradle | 2 +- .../third-party/active-directory/build.gradle | 2 +- 9 files changed, 83 insertions(+), 11 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java index b4ddcf0bed1..1521b797133 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java @@ -18,20 +18,65 @@ */ package org.elasticsearch.gradle.testfixtures; +import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + public class TestFixtureExtension { private final Project project; final NamedDomainObjectContainer fixtures; + final Map serviceToProjectUseMap = new HashMap<>(); public TestFixtureExtension(Project project) { this.project = project; this.fixtures = project.container(Project.class); } + public void useFixture() { + useFixture(this.project.getPath()); + } + public void useFixture(String path) { + addFixtureProject(path); + serviceToProjectUseMap.put(path, this.project.getPath()); + } + + public void useFixture(String path, String serviceName) { + addFixtureProject(path); + String key = getServiceNameKey(path, serviceName); + serviceToProjectUseMap.put(key, this.project.getPath()); + + Optional otherProject = this.findOtherProjectUsingService(key); + if (otherProject.isPresent()) { + throw new GradleException( + "Projects " + otherProject.get() + " and " + this.project.getPath() + " both claim the "+ serviceName + + " service defined in the docker-compose.yml of " + path + "This is not supported because it breaks " + + "running in parallel. Configure dedicated services for each project and use those instead." + ); + } + } + + private String getServiceNameKey(String fixtureProjectPath, String serviceName) { + return fixtureProjectPath + "::" + serviceName; + } + + private Optional findOtherProjectUsingService(String serviceName) { + return this.project.getRootProject().getAllprojects().stream() + .filter(p -> p.equals(this.project) == false) + .filter(p -> p.getExtensions().findByType(TestFixtureExtension.class) != null) + .map(project -> project.getExtensions().getByType(TestFixtureExtension.class)) + .flatMap(ext -> ext.serviceToProjectUseMap.entrySet().stream()) + .filter(entry -> entry.getKey().equals(serviceName)) + .map(Map.Entry::getValue) + .findAny(); + } + + private void addFixtureProject(String path) { Project fixtureProject = this.project.findProject(path); if (fixtureProject == null) { throw new IllegalArgumentException("Could not find test fixture " + fixtureProject); @@ -42,6 +87,20 @@ public class TestFixtureExtension { ); } fixtures.add(fixtureProject); + // Check for exclusive access + Optional otherProject = this.findOtherProjectUsingService(path); + if (otherProject.isPresent()) { + throw new GradleException("Projects " + otherProject.get() + " and " + this.project.getPath() + " both " + + "claim all services from " + path + ". This is not supported because it breaks running in parallel. " + + "Configure specific services in docker-compose.yml for each and add the service name to `useFixture`" + ); + } } + boolean isServiceRequired(String serviceName, String fixtureProject) { + if (serviceToProjectUseMap.containsKey(fixtureProject)) { + return true; + } + return serviceToProjectUseMap.containsKey(getServiceNameKey(fixtureProject, serviceName)); + } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 556e938875e..93c91cbee51 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -20,6 +20,7 @@ package org.elasticsearch.gradle.testfixtures; import com.avast.gradle.dockercompose.ComposeExtension; import com.avast.gradle.dockercompose.DockerComposePlugin; +import com.avast.gradle.dockercompose.ServiceInfo; import com.avast.gradle.dockercompose.tasks.ComposeUp; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider; @@ -58,9 +59,6 @@ public class TestFixturesPlugin implements Plugin { ext.set("testFixturesDir", testfixturesDir); if (project.file(DOCKER_COMPOSE_YML).exists()) { - // the project that defined a test fixture can also use it - extension.fixtures.add(project); - Task buildFixture = project.getTasks().create("buildFixture"); Task pullFixture = project.getTasks().create("pullFixture"); Task preProcessFixture = project.getTasks().create("preProcessFixture"); @@ -106,6 +104,7 @@ public class TestFixturesPlugin implements Plugin { configureServiceInfoForTask( postProcessFixture, project, + false, (name, port) -> postProcessFixture.getExtensions() .getByType(ExtraPropertiesExtension.class).set(name, port) ); @@ -144,6 +143,7 @@ public class TestFixturesPlugin implements Plugin { configureServiceInfoForTask( task, fixtureProject, + true, (name, host) -> task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host) ); @@ -165,14 +165,23 @@ public class TestFixturesPlugin implements Plugin { ); } - private void configureServiceInfoForTask(Task task, Project fixtureProject, BiConsumer consumer) { + private void configureServiceInfoForTask( + Task task, Project fixtureProject, boolean enableFilter, BiConsumer consumer + ) { // Configure ports for the tests as system properties. // We only know these at execution time so we need to do it in doFirst + TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class); task.doFirst(new Action() { @Override public void execute(Task theTask) { fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos() - .forEach((service, infos) -> { + .entrySet().stream() + .filter(entry -> enableFilter == false || + extension.isServiceRequired(entry.getKey(), fixtureProject.getPath()) + ) + .forEach(entry -> { + String service = entry.getKey(); + ServiceInfo infos = entry.getValue(); infos.getTcpPorts() .forEach((container, host) -> { String name = "test.fixtures." + service + ".tcp." + container; diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 7bf973e7edc..e4f0a04d4e9 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -6,6 +6,8 @@ import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.test.fixtures' +testFixtures.useFixture() + configurations { dockerPlugins dockerSource diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 43b58ea7f39..7d849856aa8 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -37,7 +37,7 @@ versions << [ 'hadoop2': '2.8.1' ] -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" +testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs" configurations { hdfsFixture diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 99eb86a4e00..ab4597cf7f4 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -146,6 +146,9 @@ task thirdPartyTest(type: Test) { if (useFixture) { apply plugin: 'elasticsearch.test.fixtures' + + testFixtures.useFixture() + task writeDockerFile { File minioDockerfile = new File("${project.buildDir}/minio-docker/Dockerfile") outputs.file(minioDockerfile) diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 81e5d746cc7..3b6530a69d8 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -1,13 +1,12 @@ import java.nio.file.Path import java.nio.file.Paths -import java.nio.file.Files apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test.fixtures' -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" +testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "peppa" dependencies { testCompile project(':x-pack:plugin:core') diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index 9328447597e..13f2ef4927d 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -10,7 +10,7 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } -testFixtures.useFixture ":x-pack:test:idp-fixture" +testFixtures.useFixture ":x-pack:test:idp-fixture", "oidc-provider" String ephemeralPort; task setupPorts { diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 9fc5a9b3b31..805023b5413 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -7,7 +7,7 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } -testFixtures.useFixture ":x-pack:test:idp-fixture" +testFixtures.useFixture ":x-pack:test:idp-fixture", "openldap" Project idpFixtureProject = xpackProject("test:idp-fixture") String outputDir = "${project.buildDir}/generated-resources/${project.name}" diff --git a/x-pack/qa/third-party/active-directory/build.gradle b/x-pack/qa/third-party/active-directory/build.gradle index 2d4af2b46bb..b76b25b08ea 100644 --- a/x-pack/qa/third-party/active-directory/build.gradle +++ b/x-pack/qa/third-party/active-directory/build.gradle @@ -15,7 +15,7 @@ processTestResources { compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" -// we have to repeat these patterns because the security test resources are effectively in the src of this project +// we have to repeat these patterns because the security test resources are effectively in the src of this p forbiddenPatterns { exclude '**/*.key' exclude '**/*.p12' From f06aa0c6c00d31578c44f0bdc122b132e76387df Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Mon, 23 Sep 2019 13:31:41 +0200 Subject: [PATCH 04/94] Fix G1 GC default IHOP (#46169) G1 GC were setup to use an `InitiatingHeapOccupancyPercent` of 75. This could leave used memory at a very high level for an extended duration, triggering the real memory circuit breaker even at low activity levels. The value is a threshold for old generation usage relative to total heap size and thus it should leave room for the new generation. Default in G1 is to allow up to 60 percent for new generation and this could mean that the threshold was effectively at 135% heap usage. GC would still kick in of course and eventually enough mixed collections would take place such that adaptive adjustment of IHOP kicks in. The JVM has adaptive setting of the IHOP, but this does not kick in until it has sampled a few collections. A newly started, relatively quiet server with primarily new generation activity could thus experience heap above 95% frequently for a duration. The changes here are two-fold: 1. Use 30% default for IHOP (the JVM default of 45 could still mean 105% heap usage threshold and did not fully ensure not to hit the circuit breaker with low activity) 2. Set G1ReservePercent=25. This is used by the adaptive IHOP mechanism, meaning old/mixed GC should kick in no later than at 75% heap. This ensures IHOP stays compatible with the real memory circuit breaker also after being adjusted by adaptive IHOP. --- distribution/src/config/jvm.options | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index 699664cb254..075106100ea 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -43,7 +43,8 @@ # 10-:-XX:-UseConcMarkSweepGC # 10-:-XX:-UseCMSInitiatingOccupancyOnly # 10-:-XX:+UseG1GC -# 10-:-XX:InitiatingHeapOccupancyPercent=75 +# 10-:-XX:G1ReservePercent=25 +# 10-:-XX:InitiatingHeapOccupancyPercent=30 ## DNS cache policy # cache ttl in seconds for positive DNS lookups noting that this overrides the From 2da040601be092f3817317ad4cbe4a131e3f29db Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 23 Sep 2019 15:01:47 +0200 Subject: [PATCH 05/94] Fix Bug in Snapshot Status Response Timestamps (#46919) (#46970) Fixing a corner case where snapshot total time calculation was off when getting the `SnapshotStatus` of an in-progress snapshot. Closes #46913 --- .../admin/cluster/snapshots/status/SnapshotStats.java | 4 ++++ .../admin/cluster/snapshots/status/SnapshotStatus.java | 1 + .../snapshots/status/TransportSnapshotsStatusAction.java | 7 ++++++- .../admin/cluster/snapshots/status/SnapshotStatsTests.java | 5 +++-- 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java index 16410eefbf0..c242d01ed74 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -70,6 +71,7 @@ public class SnapshotStats implements Writeable, ToXContentObject { long incrementalSize, long totalSize, long processedSize) { this.startTime = startTime; this.time = time; + assert time >= 0 : "Tried to initialize snapshot stats with negative total time [" + time + "]"; this.incrementalFileCount = incrementalFileCount; this.totalFileCount = totalFileCount; this.processedFileCount = processedFileCount; @@ -323,6 +325,8 @@ public class SnapshotStats implements Writeable, ToXContentObject { // Update duration time = endTime - startTime; } + assert time >= 0 + : "Update with [" + Strings.toString(stats) + "][" + updateTimestamps + "] resulted in negative total time [" + time + "]"; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index 293a14d731b..9e4318bf1e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -103,6 +103,7 @@ public class SnapshotStatus implements ToXContentObject, Writeable { this.shards = Objects.requireNonNull(shards); this.includeGlobalState = includeGlobalState; shardsStats = new SnapshotShardsStats(shards); + assert time >= 0 : "time must be >= 0 but received [" + time + "]"; updateShardStats(startTime, time); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 063f051b136..1d0c3ed4d8c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -238,9 +238,14 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction= startTime || (endTime == 0L && snapshotInfo.state().completed() == false) + : "Inconsistent timestamps found in SnapshotInfo [" + snapshotInfo + "]"; builder.add(new SnapshotStatus(new Snapshot(repositoryName, snapshotId), state, Collections.unmodifiableList(shardStatusBuilder), snapshotInfo.includeGlobalState(), - startTime, snapshotInfo.endTime() - startTime)); + startTime, + // Use current time to calculate overall runtime for in-progress snapshots that have endTime == 0 + (endTime == 0 ? threadPool.absoluteTimeInMillis() : endTime) - startTime)); } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java index 2822a9661fd..76f35bcdcc3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java @@ -28,8 +28,9 @@ public class SnapshotStatsTests extends AbstractXContentTestCase @Override protected SnapshotStats createTestInstance() { - long startTime = randomNonNegativeLong(); - long time = randomNonNegativeLong(); + // Using less than half of Long.MAX_VALUE for random time values to avoid long overflow in tests that add the two time values + long startTime = randomLongBetween(0, Long.MAX_VALUE / 2 - 1); + long time = randomLongBetween(0, Long.MAX_VALUE / 2 - 1); int incrementalFileCount = randomIntBetween(0, Integer.MAX_VALUE); int totalFileCount = randomIntBetween(0, Integer.MAX_VALUE); int processedFileCount = randomIntBetween(0, Integer.MAX_VALUE); From b09aba4c55eeb2a3a3c8cad1705d4d98b84f2ef1 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Mon, 23 Sep 2019 09:18:01 -0400 Subject: [PATCH 06/94] [DOCS] Reformat rollover index API docs (#46778) --- docs/reference/indices/create-index.asciidoc | 2 + .../reference/indices/rollover-index.asciidoc | 237 +++++++++++++----- 2 files changed, 174 insertions(+), 65 deletions(-) diff --git a/docs/reference/indices/create-index.asciidoc b/docs/reference/indices/create-index.asciidoc index fef6ff96a52..afb7ab91232 100644 --- a/docs/reference/indices/create-index.asciidoc +++ b/docs/reference/indices/create-index.asciidoc @@ -35,6 +35,7 @@ creating an index, you can specify the following: -- (Optional, string) Name of the index you wish to create. +// tag::index-name-reqs[] Index names must meet the following criteria: - Lowercase only @@ -43,6 +44,7 @@ Index names must meet the following criteria: - Cannot start with `-`, `_`, `+` - Cannot be `.` or `..` - Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit faster) +// end::index-name-reqs[] -- diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 8372de55024..294c38790e1 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -1,5 +1,37 @@ [[indices-rollover-index]] -=== Rollover Index +=== Rollover index API +++++ +Rollover index +++++ + +Assigns an <> to a new index +when the alias's existing index meets a condition you provide. + +[source,console] +---- +POST /alias1/_rollover/twitter +{ + "conditions": { + "max_age": "7d", + "max_docs": 1000, + "max_size": "5gb" + } +} +---- +// TEST[s/^/PUT my_old_index_name\nPUT my_old_index_name\/_alias\/alias1\n/] + + +[[rollover-index-api-request]] +==== {api-request-title} + + +`POST //_rollover/` + +`POST //_rollover/` + + +[[rollover-index-api-desc]] +==== {api-description-title} The rollover index API rolls an <> to a new index when the existing index meets a condition you provide. You can use this API to retire @@ -24,17 +56,102 @@ from the original (rolled-over) index. In this scenario, the write index will have its rollover alias' `is_write_index` set to `false`, while the newly created index will now have the rollover alias pointing to it as the write index with `is_write_index` as `true`. -The available conditions are: -[[index-rollover-conditions]] -.`conditions` parameters -[options="header"] -|=== -| Name | Description -| max_age | The maximum age of the index -| max_docs | The maximum number of documents the index should contain. This does not add documents multiple times for replicas -| max_size | The maximum estimated size of the primary shard of the index -|=== +[[rollover-wait-active-shards]] +===== Wait for active shards + +Because the rollover operation creates a new index to rollover to, the +<> setting on +index creation applies to the rollover action. + + +[[rollover-index-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of the existing index alias +to assign to the target index. + + +``:: ++ +-- +(Optional*, string) +Name of the target index to create and assign the index alias. + +include::{docdir}/indices/create-index.asciidoc[tag=index-name-reqs] + +*This parameter is not required +if the alias is assigned to an index name that ends with `-` and a number, +such as `logs-000001`. +In this case, +the name of the new index follows the same pattern, +incrementing the number. +For example, +`logs-000001` increments to `logs-000002`. +This number is zero-padded with a length of 6, +regardless of the prior index name. + +If the existing index for the alias does not match this pattern, +this parameter is required. +-- + + +[[rollover-index-api-query-params]] +==== {api-query-parms-title} + +`dry_run`:: +(Optional, boolean) +If `true`, +the request checks whether the index matches provided conditions +but does not perform a rollover. +Defaults to `false`. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=include-type-name] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=timeoutparms] + + +[[rollover-index-api-request-body]] +==== {api-request-body-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=aliases] + +`conditions`:: ++ +-- +(Required, object) +Set of conditions the index alias's existing index must met to roll over. + +Parameters include: + +`max_age`:: +(Optional, <>) +Maximum age of the index. + +`max_docs`:: +(Optional, integer) +Maximum number of documents in the index. +This number does *not* include documents in replica shards. + +`max_size`:: +(Optional, <>) +Maximum estimated size of the primary shard of the index. +-- + +include::{docdir}/rest-api/common-parms.asciidoc[tag=mappings] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=settings] + + +[[rollover-index-api-example]] +==== {api-examples-title} + +[[rollover-index-basic-ex]] +===== Basic example [source,console] -------------------------------------------------- @@ -63,7 +180,7 @@ POST /logs_write/_rollover <2> contains 1,000 or more documents, or has an index size at least around 5GB, then the `logs-000002` index is created and the `logs_write` alias is updated to point to `logs-000002`. -The above request might return the following response: +The API returns the following response: [source,console-result] -------------------------------------------------- @@ -86,8 +203,41 @@ The above request might return the following response: <2> Whether the rollover was dry run. <3> The result of each condition. -[float] -==== Naming the new index +[[rollover-index-settings-ex]] +===== Specify settings for the target index + +The settings, mappings, and aliases for the new index are taken from any +matching <>. Additionally, you can specify +`settings`, `mappings`, and `aliases` in the body of the request, just like the +<> API. Values specified in the request +override any values set in matching index templates. For example, the following +`rollover` request overrides the `index.number_of_shards` setting: + +[source,console] +-------------------------------------------------- +PUT /logs-000001 +{ + "aliases": { + "logs_write": {} + } +} + +POST /logs_write/_rollover +{ + "conditions" : { + "max_age": "7d", + "max_docs": 1000, + "max_size": "5gb" + }, + "settings": { + "index.number_of_shards": 2 + } +} +-------------------------------------------------- + + +[[rollover-index-specify-index-ex]] +===== Specify a target index name If the name of the existing index ends with `-` and a number -- e.g. `logs-000001` -- then the name of the new index will follow the same pattern, @@ -110,8 +260,9 @@ POST /my_alias/_rollover/my_new_index_name -------------------------------------------------- // TEST[s/^/PUT my_old_index_name\nPUT my_old_index_name\/_alias\/my_alias\n/] -[float] -==== Using date math with the rollover API + +[[_using_date_math_with_the_rollover_api]] +===== Use date math with a rollover It can be useful to use <> to name the rollover index according to the date that the index rolled over, e.g. @@ -187,53 +338,15 @@ GET /%3Clogs-%7Bnow%2Fd%7D-*%3E%2C%3Clogs-%7Bnow%2Fd-1d%7D-*%3E%2C%3Clogs-%7Bnow // TEST[continued] // TEST[s/now/2016.10.31||/] -[float] -==== Defining the new index -The settings, mappings, and aliases for the new index are taken from any -matching <>. Additionally, you can specify -`settings`, `mappings`, and `aliases` in the body of the request, just like the -<> API. Values specified in the request -override any values set in matching index templates. For example, the following -`rollover` request overrides the `index.number_of_shards` setting: - -[source,console] --------------------------------------------------- -PUT /logs-000001 -{ - "aliases": { - "logs_write": {} - } -} - -POST /logs_write/_rollover -{ - "conditions" : { - "max_age": "7d", - "max_docs": 1000, - "max_size": "5gb" - }, - "settings": { - "index.number_of_shards": 2 - } -} --------------------------------------------------- - -[float] -==== Dry run +[[rollover-index-api-dry-run-ex]] +===== Dry run The rollover API supports `dry_run` mode, where request conditions can be -checked without performing the actual rollover: +checked without performing the actual rollover. [source,console] -------------------------------------------------- -PUT /logs-000001 -{ - "aliases": { - "logs_write": {} - } -} - POST /logs_write/_rollover?dry_run { "conditions" : { @@ -243,17 +356,11 @@ POST /logs_write/_rollover?dry_run } } -------------------------------------------------- +// TEST[s/^/PUT logs-000001\nPUT logs-000001\/_alias\/logs_write\n/] -[float] -==== Wait For Active Shards - -Because the rollover operation creates a new index to rollover to, the -<> setting on -index creation applies to the rollover action as well. [[indices-rollover-is-write-index]] -[float] -==== Write Index Alias Behavior +===== Roll over a write index The rollover alias when rolling over a write index that has `is_write_index` explicitly set to `true` is not swapped during rollover actions. Since having an alias point to multiple indices is ambiguous in distinguishing From d4d1182677ce9c2715b4064fed5c34b73a8a5021 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 23 Sep 2019 17:00:37 +0200 Subject: [PATCH 07/94] update _common.json format (#46872) API spec now use an object for the documentation field. _common was not updated yet. This commit updates _common.json and its corresponding parser. Closes #46744 Co-Authored-By: Tomas Della Vedova --- .../resources/rest-api-spec/api/_common.json | 6 +- .../restspec/ClientYamlSuiteRestSpec.java | 66 +++++++++++-------- .../restspec/ClientYamlSuiteRestApiTests.java | 45 +++++++++++++ 3 files changed, 87 insertions(+), 30 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json index 69a1f8fb8ce..1505db774f0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json @@ -1,6 +1,8 @@ { - "description": "Parameters that are accepted by all API endpoints.", - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html", + "documentation" : { + "description": "Parameters that are accepted by all API endpoints.", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html" + }, "params": { "pretty": { "type": "boolean", diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java index 70665ad5d9b..f0d1b13d98d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java @@ -18,6 +18,12 @@ */ package org.elasticsearch.test.rest.yaml.restspec; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; + import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; @@ -30,12 +36,6 @@ import java.util.Map; import java.util.Set; import java.util.stream.Stream; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; - /** * Holds the specification used to turn {@code do} actions in the YAML suite into REST api calls. */ @@ -43,7 +43,7 @@ public class ClientYamlSuiteRestSpec { private final Set globalParameters = new HashSet<>(); private final Map restApiMap = new HashMap<>(); - private ClientYamlSuiteRestSpec() {} + ClientYamlSuiteRestSpec() {} private void addApi(ClientYamlSuiteRestApi restApi) { ClientYamlSuiteRestApi previous = restApiMap.putIfAbsent(restApi.getName(), restApi); @@ -99,27 +99,7 @@ public class ClientYamlSuiteRestSpec { JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { String filename = jsonFile.getFileName().toString(); if (filename.equals("_common.json")) { - String currentFieldName = null; - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && "params".equals(currentFieldName)) { - while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - String param = parser.currentName(); - if (restSpec.globalParameters.contains(param)) { - throw new IllegalArgumentException("Found duplicate global param [" + param + "]"); - } - restSpec.globalParameters.add(param); - parser.nextToken(); - if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Expected params field in rest api definition to " + - "contain an object"); - } - parser.skipChildren(); - } - } - } + parseCommonSpec(parser, restSpec); } else { ClientYamlSuiteRestApi restApi = restApiParser.parse(jsonFile.toString(), parser); String expectedApiName = filename.substring(0, filename.lastIndexOf('.')); @@ -134,4 +114,34 @@ public class ClientYamlSuiteRestSpec { throw new UncheckedIOException("Can't parse rest spec file: [" + jsonFile + "]", ex); } } + + static void parseCommonSpec(XContentParser parser, ClientYamlSuiteRestSpec restSpec) throws IOException { + String currentFieldName = null; + parser.nextToken(); + assert parser.currentToken() == XContentParser.Token.START_OBJECT; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + if ("params".equals(currentFieldName)) { + while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + String param = parser.currentName(); + if (restSpec.globalParameters.contains(param)) { + throw new IllegalArgumentException("Found duplicate global param [" + param + "]"); + } + restSpec.globalParameters.add(param); + parser.nextToken(); + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException("Expected params field in rest api definition to " + + "contain an object"); + } + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + } + + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java index a3c6544137a..e2b9a4cddb4 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java @@ -29,6 +29,18 @@ import java.util.List; public class ClientYamlSuiteRestApiTests extends ESTestCase { + public void testParseCommonSpec() throws IOException { + XContentParser parser = createParser(YamlXContent.yamlXContent, COMMON_SPEC); + ClientYamlSuiteRestSpec restSpec = new ClientYamlSuiteRestSpec(); + ClientYamlSuiteRestSpec.parseCommonSpec(parser, restSpec); + assertTrue(restSpec.isGlobalParameter("pretty")); + assertTrue(restSpec.isGlobalParameter("human")); + assertTrue(restSpec.isGlobalParameter("error_trace")); + assertTrue(restSpec.isGlobalParameter("source")); + assertTrue(restSpec.isGlobalParameter("filter_path")); + assertFalse(restSpec.isGlobalParameter("unknown")); + } + public void testPathMatching() throws IOException { XContentParser parser = createParser(YamlXContent.yamlXContent, REST_SPEC_API); ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApiParser().parse("index.json", parser); @@ -66,6 +78,39 @@ public class ClientYamlSuiteRestApiTests extends ESTestCase { } } + private static final String COMMON_SPEC = "{\n"+ + " \"documentation\" : {\n"+ + " \"url\": \"Parameters that are accepted by all API endpoints.\",\n"+ + " \"documentation\": \"https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html\"\n"+ + " },\n"+ + " \"params\": {\n"+ + " \"pretty\": {\n"+ + " \"type\": \"boolean\",\n"+ + " \"description\": \"Pretty format the returned JSON response.\",\n"+ + " \"default\": false\n"+ + " },\n"+ + " \"human\": {\n"+ + " \"type\": \"boolean\",\n"+ + " \"description\": \"Return human readable values for statistics.\",\n"+ + " \"default\": true\n"+ + " },\n"+ + " \"error_trace\": {\n"+ + " \"type\": \"boolean\",\n"+ + " \"description\": \"Include the stack trace of returned errors.\",\n"+ + " \"default\": false\n"+ + " },\n"+ + " \"source\": {\n"+ + " \"type\": \"string\",\n"+ + " \"description\": \"The URL-encoded request definition." + + " Useful for libraries that do not accept a request body for non-POST requests.\"\n"+ + " },\n"+ + " \"filter_path\": {\n"+ + " \"type\": \"list\",\n"+ + " \"description\": \"A comma-separated list of filters used to reduce the response.\"\n"+ + " }\n"+ + " }\n"+ + "}\n"; + private static final String REST_SPEC_API = "{\n" + " \"index\":{\n" + " \"documentation\":{\n" + From a815f8b930255b03e69dc422163a1086ec869256 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 23 Sep 2019 08:45:01 -0700 Subject: [PATCH 08/94] [DOCS] Group rollup and transform content (#46882) --- docs/reference/data-rollup-transform.asciidoc | 16 ++++++++++++ docs/reference/index.asciidoc | 4 +-- docs/reference/rollup/api-quickref.asciidoc | 11 +++++--- docs/reference/rollup/index.asciidoc | 25 ++++++++----------- docs/reference/rollup/overview.asciidoc | 13 ++++++---- .../rollup/rollup-agg-limitations.asciidoc | 4 +-- .../rollup/rollup-getting-started.asciidoc | 13 ++++++---- .../rollup/rollup-search-limitations.asciidoc | 12 ++++----- .../rollup/understanding-groups.asciidoc | 6 ++--- 9 files changed, 63 insertions(+), 41 deletions(-) create mode 100644 docs/reference/data-rollup-transform.asciidoc diff --git a/docs/reference/data-rollup-transform.asciidoc b/docs/reference/data-rollup-transform.asciidoc new file mode 100644 index 00000000000..5fe08d6f0d5 --- /dev/null +++ b/docs/reference/data-rollup-transform.asciidoc @@ -0,0 +1,16 @@ +[[data-rollup-transform]] += Roll up or transform your data + +[partintro] +-- + +{es} offers the following methods for manipulating your data: + +* <> ++ +include::rollup/index.asciidoc[tag=rollup-intro] +* {stack-ov}/ml-dataframes.html[Transforming your data] + +-- + +include::rollup/index.asciidoc[] diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 8eaac30aa50..e0cbe106681 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -50,10 +50,10 @@ include::sql/index.asciidoc[] include::monitoring/index.asciidoc[] -include::rollup/index.asciidoc[] - include::frozen-indices.asciidoc[] +include::data-rollup-transform.asciidoc[] + include::high-availability.asciidoc[] include::security/index.asciidoc[] diff --git a/docs/reference/rollup/api-quickref.asciidoc b/docs/reference/rollup/api-quickref.asciidoc index d6be3e4e5b6..8a64d9df17f 100644 --- a/docs/reference/rollup/api-quickref.asciidoc +++ b/docs/reference/rollup/api-quickref.asciidoc @@ -1,7 +1,10 @@ [role="xpack"] [testenv="basic"] [[rollup-api-quickref]] -== API Quick Reference +=== {rollup-cap} API quick reference +++++ +API quick reference +++++ experimental[] @@ -15,7 +18,7 @@ Most rollup endpoints have the following base: [float] [[rollup-api-jobs]] -=== /job/ +==== /job/ * {ref}/rollup-put-job.html[PUT /_rollup/job/+++]: Create a {rollup-job} * {ref}/rollup-get-job.html[GET /_rollup/job]: List {rollup-jobs} @@ -26,13 +29,13 @@ Most rollup endpoints have the following base: [float] [[rollup-api-data]] -=== /data/ +==== /data/ * {ref}/rollup-get-rollup-caps.html[GET /_rollup/data//_rollup_caps+++]: Get Rollup Capabilities * {ref}/rollup-get-rollup-index-caps.html[GET //_rollup/data/+++]: Get Rollup Index Capabilities [float] [[rollup-api-index]] -=== // +==== // * {ref}/rollup-search.html[GET //_rollup_search]: Search rollup data diff --git a/docs/reference/rollup/index.asciidoc b/docs/reference/rollup/index.asciidoc index 64dc233f82f..99180e2f32d 100644 --- a/docs/reference/rollup/index.asciidoc +++ b/docs/reference/rollup/index.asciidoc @@ -1,10 +1,7 @@ [role="xpack"] [testenv="basic"] [[xpack-rollup]] -= Rolling up historical data - -[partintro] --- +== Rolling up historical data experimental[] @@ -12,20 +9,20 @@ Keeping historical data around for analysis is extremely useful but often avoide archiving massive amounts of data. Retention periods are thus driven by financial realities rather than by the usefulness of extensive historical data. -The Rollup feature in {xpack} provides a means to summarize and store historical data so that it can still be used -for analysis, but at a fraction of the storage cost of raw data. +// tag::rollup-intro[] +The {stack} {rollup-features} provide a means to summarize and store historical +data so that it can still be used for analysis, but at a fraction of the storage +cost of raw data. +// end::rollup-intro[] - -* <> -* <> -* <> -* <> +* <> +* <> +* <> +* <> * <> -* <> +* <> --- - include::overview.asciidoc[] include::api-quickref.asciidoc[] include::rollup-getting-started.asciidoc[] diff --git a/docs/reference/rollup/overview.asciidoc b/docs/reference/rollup/overview.asciidoc index 90c5e20a850..843cd5c0584 100644 --- a/docs/reference/rollup/overview.asciidoc +++ b/docs/reference/rollup/overview.asciidoc @@ -1,7 +1,10 @@ [role="xpack"] [testenv="basic"] [[rollup-overview]] -== Overview +=== {rollup-cap} overview +++++ +Overview +++++ experimental[] @@ -23,7 +26,7 @@ reading often diminishes with time. It's not useless -- it could easily contrib value often leads to deletion rather than paying the fixed storage cost. [float] -=== Rollup store historical data at reduced granularity +==== Rollup stores historical data at reduced granularity That's where Rollup comes into play. The Rollup functionality summarizes old, high-granularity data into a reduced granularity format for long-term storage. By "rolling" the data up into a single summary document, historical data @@ -39,7 +42,7 @@ automates this process of summarizing historical data. Details about setting up and configuring Rollup are covered in <> [float] -=== Rollup uses standard query DSL +==== Rollup uses standard query DSL The Rollup feature exposes a new search endpoint (`/_rollup_search` vs the standard `/_search`) which knows how to search over rolled-up data. Importantly, this endpoint accepts 100% normal {es} Query DSL. Your application does not need to learn @@ -53,7 +56,7 @@ But if your queries, aggregations and dashboards only use the available function data is trivial. [float] -=== Rollup merges "live" and "rolled" data +==== Rollup merges "live" and "rolled" data A useful feature of Rollup is the ability to query both "live", realtime data in addition to historical "rolled" data in a single query. @@ -67,7 +70,7 @@ It will take the results from both data sources and merge them together. If the "rolled" data, live data is preferred to increase accuracy. [float] -=== Rollup is multi-interval aware +==== Rollup is multi-interval aware Finally, Rollup is capable of intelligently utilizing the best interval available. If you've worked with summarizing features of other products, you'll find that they can be limiting. If you configure rollups at daily intervals... your diff --git a/docs/reference/rollup/rollup-agg-limitations.asciidoc b/docs/reference/rollup/rollup-agg-limitations.asciidoc index 9f8b6f66ade..6f9f949bf8b 100644 --- a/docs/reference/rollup/rollup-agg-limitations.asciidoc +++ b/docs/reference/rollup/rollup-agg-limitations.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[rollup-agg-limitations]] -== Rollup Aggregation Limitations +=== {rollup-cap} aggregation limitations experimental[] @@ -9,7 +9,7 @@ There are some limitations to how fields can be rolled up / aggregated. This pa you are aware of them. [float] -=== Limited aggregation components +==== Limited aggregation components The Rollup functionality allows fields to be grouped with the following aggregations: diff --git a/docs/reference/rollup/rollup-getting-started.asciidoc b/docs/reference/rollup/rollup-getting-started.asciidoc index 27f9d9cd406..3b57e968a9e 100644 --- a/docs/reference/rollup/rollup-getting-started.asciidoc +++ b/docs/reference/rollup/rollup-getting-started.asciidoc @@ -1,7 +1,10 @@ [role="xpack"] [testenv="basic"] [[rollup-getting-started]] -== Getting Started +=== Getting started with {rollups} +++++ +Getting started +++++ experimental[] @@ -23,7 +26,7 @@ look like this: // NOTCONSOLE [float] -=== Creating a Rollup Job +==== Creating a rollup job We'd like to rollup these documents into hourly summaries, which will allow us to generate reports and dashboards with any time interval one hour or greater. A rollup job might look like this: @@ -103,7 +106,7 @@ After you execute the above command and create the job, you'll receive the follo ---- [float] -=== Starting the job +==== Starting the job After the job is created, it will be sitting in an inactive state. Jobs need to be started before they begin processing data (this allows you to stop them later as a way to temporarily pause, without deleting the configuration). @@ -117,7 +120,7 @@ POST _rollup/job/sensor/_start // TEST[setup:sensor_rollup_job] [float] -=== Searching the Rolled results +==== Searching the rolled results After the job has run and processed some data, we can use the <> endpoint to do some searching. The Rollup feature is designed so that you can use the same Query DSL syntax that you are accustomed to... it just happens to run on the rolled up data instead. @@ -292,7 +295,7 @@ In addition to being more complicated (date histogram and a terms aggregation, p the date_histogram uses a `7d` interval instead of `60m`. [float] -=== Conclusion +==== Conclusion This quickstart should have provided a concise overview of the core functionality that Rollup exposes. There are more tips and things to consider when setting up Rollups, which you can find throughout the rest of this section. You may also explore the <> diff --git a/docs/reference/rollup/rollup-search-limitations.asciidoc b/docs/reference/rollup/rollup-search-limitations.asciidoc index d55787f3cec..f6315e12a30 100644 --- a/docs/reference/rollup/rollup-search-limitations.asciidoc +++ b/docs/reference/rollup/rollup-search-limitations.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[rollup-search-limitations]] -== Rollup Search Limitations +=== {rollup-cap} search limitations experimental[] @@ -11,7 +11,7 @@ live data is thrown away, you will always lose some flexibility. This page highlights the major limitations so that you are aware of them. [float] -=== Only one Rollup index per search +==== Only one {rollup} index per search When using the <> endpoint, the `index` parameter accepts one or more indices. These can be a mix of regular, non-rollup indices and rollup indices. However, only one rollup index can be specified. The exact list of rules for the `index` parameter are as @@ -33,7 +33,7 @@ may be able to open this up to multiple rollup jobs. [float] [[aggregate-stored-only]] -=== Can only aggregate what's been stored +==== Can only aggregate what's been stored A perhaps obvious limitation, but rollups can only aggregate on data that has been stored in the rollups. If you don't configure the rollup job to store metrics about the `price` field, you won't be able to use the `price` field in any query or aggregation. @@ -81,7 +81,7 @@ The response will tell you that the field and aggregation were not possible, bec // TESTRESPONSE[s/"stack_trace": \.\.\./"stack_trace": $body.$_path/] [float] -=== Interval Granularity +==== Interval granularity Rollups are stored at a certain granularity, as defined by the `date_histogram` group in the configuration. This means you can only search/aggregate the rollup data with an interval that is greater-than or equal to the configured rollup interval. @@ -111,7 +111,7 @@ That said, if multiple jobs are present in a single rollup index with varying in with the largest interval to satisfy the search request. [float] -=== Limited querying components +==== Limited querying components The Rollup functionality allows `query`'s in the search request, but with a limited subset of components. The queries currently allowed are: @@ -128,7 +128,7 @@ If you attempt to use an unsupported query, or the query references a field that thrown. We expect the list of support queries to grow over time as more are implemented. [float] -=== Timezones +==== Timezones Rollup documents are stored in the timezone of the `date_histogram` group configuration in the job. If no timezone is specified, the default is to rollup timestamps in `UTC`. diff --git a/docs/reference/rollup/understanding-groups.asciidoc b/docs/reference/rollup/understanding-groups.asciidoc index a59c19fbf5c..eb1b47e8a16 100644 --- a/docs/reference/rollup/understanding-groups.asciidoc +++ b/docs/reference/rollup/understanding-groups.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[rollup-understanding-groups]] -== Understanding Groups +=== Understanding groups experimental[] @@ -121,7 +121,7 @@ Ultimately, when configuring `groups` for a job, think in terms of how you might then include those in the config. Because Rollup Search allows any order or combination of the grouped fields, you just need to decide if a field is useful for aggregating later, and how you might wish to use it (terms, histogram, etc) -=== Grouping Limitations with heterogeneous indices +==== Grouping limitations with heterogeneous indices There was previously a limitation in how Rollup could handle indices that had heterogeneous mappings (multiple, unrelated/non-overlapping mappings). The recommendation at the time was to configure a separate job per data "type". For example, you might configure a separate @@ -192,7 +192,7 @@ PUT _rollup/job/combined -------------------------------------------------- // NOTCONSOLE -=== Doc counts and overlapping jobs +==== Doc counts and overlapping jobs There was previously an issue with document counts on "overlapping" job configurations, driven by the same internal implementation detail. If there were two Rollup jobs saving to the same index, where one job is a "subset" of another job, it was possible that document counts From 199fff8a55fc31d23512abaf102a0270538e202d Mon Sep 17 00:00:00 2001 From: Eray Date: Mon, 23 Sep 2019 19:46:39 +0300 Subject: [PATCH 09/94] Allow max_children only in top level nested sort (#46731) This commit restricts the usage of max_children to the top level nested sort since it is ignored on the other levels. --- .../search/sort/FieldSortBuilder.java | 17 +++++++++-- .../search/sort/GeoDistanceSortBuilder.java | 2 ++ .../search/sort/ScriptSortBuilder.java | 2 ++ .../search/sort/FieldSortIT.java | 30 +++++++++++++++++++ 4 files changed, 48 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 4b52aa82a37..a4e5f4c5262 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -409,16 +409,15 @@ public class FieldSortBuilder extends SortBuilder { throw new QueryShardException(context, "max_children is only supported on last level of nested sort"); } - // new nested sorts takes priority + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); nested = resolveNested(context, nestedSort); } else { nested = resolveNested(context, nestedPath, nestedFilter); } } - IndexFieldData fieldData = context.getForField(fieldType); if (fieldData instanceof IndexNumericFieldData == false - && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { + && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); } final SortField field; @@ -437,6 +436,18 @@ public class FieldSortBuilder extends SortBuilder { } } + /** + * Throws an exception if max children is not located at top level nested sort. + */ + static void validateMaxChildrenExistOnlyInTopLevelNestedSort(QueryShardContext context, NestedSortBuilder nestedSort) { + for (NestedSortBuilder child = nestedSort.getNestedSort(); child != null; child = child.getNestedSort()) { + if (child.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on top level of nested sort"); + } + } + } + @Override public boolean equals(Object other) { if (this == other) { diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 1bc6af2c966..9ec51753dac 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -66,6 +66,7 @@ import java.util.Locale; import java.util.Objects; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; +import static org.elasticsearch.search.sort.FieldSortBuilder.validateMaxChildrenExistOnlyInTopLevelNestedSort; import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; /** @@ -630,6 +631,7 @@ public class GeoDistanceSortBuilder extends SortBuilder "max_children is only supported on last level of nested sort"); } // new nested sorts takes priority + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); nested = resolveNested(context, nestedSort); } else { nested = resolveNested(context, nestedPath, nestedFilter); diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 8d5690d8583..4ebb8f2689c 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -60,6 +60,7 @@ import java.util.Locale; import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.search.sort.FieldSortBuilder.validateMaxChildrenExistOnlyInTopLevelNestedSort; import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; /** @@ -325,6 +326,7 @@ public class ScriptSortBuilder extends SortBuilder { "max_children is only supported on last level of nested sort"); } // new nested sorts takes priority + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); nested = resolveNested(context, nestedSort); } else { nested = resolveNested(context, nestedPath, nestedFilter); diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index d3f21867ab1..ae435d23ed0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -1421,6 +1421,20 @@ public class FieldSortIT extends ESIntegTestCase { .endObject() .endObject() .endObject() + .startObject("bar") + .field("type", "nested") + .startObject("properties") + .startObject("foo") + .field("type", "text") + .field("fielddata", true) + .startObject("fields") + .startObject("sub") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() .endObject() .endObject() .endObject() @@ -1471,6 +1485,22 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(hits[0].getSortValues()[0], is("bar")); assertThat(hits[1].getSortValues()[0], is("abc")); + { + SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, + () -> client().prepareSearch() + .setQuery(matchAllQuery()) + .addSort(SortBuilders + .fieldSort("nested.bar.foo") + .setNestedSort(new NestedSortBuilder("nested") + .setNestedSort(new NestedSortBuilder("nested.bar") + .setMaxChildren(1))) + .order(SortOrder.DESC)) + .get() + ); + assertThat(exc.toString(), + containsString("max_children is only supported on top level of nested sort")); + } + // We sort on nested sub field searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) From a61050378308ec892c823f8cb19e8f364371205c Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Mon, 23 Sep 2019 18:59:46 +0300 Subject: [PATCH 10/94] SQL: Add PIVOT support (#46489) Add initial PIVOT support for transforming a regular table into a statistics table around an arbitrary pivoting column: SELECT * FROM (SELECT languages, country, salary, FROM mp) PIVOT (AVG(salary) FOR countries IN ('NL', 'DE', 'ES', 'RO', 'US')) In the current implementation PIVOT allows only one aggregation however this restriction is likely to be lifted in the future. Also not all aggregations are working, in particular MatrixStats are not yet supported. (cherry picked from commit d91263746a222915c570d4a662ec48c1d6b4f583) --- .../xpack/sql/qa/jdbc/FetchSizeTestCase.java | 57 +- .../sql/qa/src/main/resources/pivot.csv-spec | 206 ++ x-pack/plugin/sql/src/main/antlr/SqlBase.g4 | 25 +- .../plugin/sql/src/main/antlr/SqlBase.tokens | 372 +-- .../sql/src/main/antlr/SqlBaseLexer.tokens | 370 +-- .../xpack/sql/analysis/analyzer/Analyzer.java | 51 +- .../xpack/sql/analysis/analyzer/Verifier.java | 111 +- ...ionCursor.java => CompositeAggCursor.java} | 93 +- ...ggsRowSet.java => CompositeAggRowSet.java} | 40 +- .../sql/execution/search/PivotCursor.java | 74 + .../sql/execution/search/PivotRowSet.java | 139 + .../xpack/sql/execution/search/Querier.java | 40 +- ...Set.java => SchemaCompositeAggRowSet.java} | 6 +- .../search/SchemaDelegatingRowSet.java | 52 + .../sql/execution/search/SourceGenerator.java | 4 + .../search/extractor/BucketExtractors.java | 1 + .../search/extractor/PivotExtractor.java | 71 + .../xpack/sql/expression/Alias.java | 2 +- .../xpack/sql/expression/Attribute.java | 25 +- .../xpack/sql/expression/ExpressionId.java | 4 + .../xpack/sql/expression/Expressions.java | 27 + .../xpack/sql/expression/FieldAttribute.java | 15 +- .../xpack/sql/expression/Literal.java | 2 +- .../sql/expression/LiteralAttribute.java | 10 +- .../sql/expression/UnresolvedAttribute.java | 2 +- .../expression/function/ScoreAttribute.java | 4 +- .../function/aggregate/AggregateFunction.java | 6 +- .../aggregate/AggregateFunctionAttribute.java | 5 +- .../grouping/GroupingFunctionAttribute.java | 4 +- .../scalar/ScalarFunctionAttribute.java | 4 +- .../xpack/sql/optimizer/Optimizer.java | 53 +- .../xpack/sql/parser/LogicalPlanBuilder.java | 51 +- .../xpack/sql/parser/SqlBaseBaseListener.java | 48 + .../xpack/sql/parser/SqlBaseBaseVisitor.java | 28 + .../xpack/sql/parser/SqlBaseLexer.java | 873 +++---- .../xpack/sql/parser/SqlBaseListener.java | 40 + .../xpack/sql/parser/SqlBaseParser.java | 2258 ++++++++++------- .../xpack/sql/parser/SqlBaseVisitor.java | 24 + .../xpack/sql/plan/logical/Aggregate.java | 2 +- .../xpack/sql/plan/logical/Pivot.java | 142 ++ .../xpack/sql/plan/physical/PivotExec.java | 63 + .../xpack/sql/planner/Mapper.java | 7 + .../xpack/sql/planner/QueryFolder.java | 352 +-- .../xpack/sql/planner/Verifier.java | 20 +- .../querydsl/container/PivotColumnRef.java | 51 + .../querydsl/container/QueryContainer.java | 44 +- .../xpack/sql/session/Cursors.java | 6 +- .../xpack/sql/session/ListCursor.java | 2 +- .../analyzer/VerifierErrorMessagesTests.java | 55 +- .../CompositeAggregationCursorTests.java | 18 +- .../xpack/sql/optimizer/OptimizerTests.java | 26 +- .../planner/PostOptimizerVerifierTests.java | 77 + .../xpack/sql/planner/QueryFolderTests.java | 17 + 53 files changed, 3966 insertions(+), 2113 deletions(-) create mode 100644 x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec rename x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/{CompositeAggregationCursor.java => CompositeAggCursor.java} (72%) rename x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/{CompositeAggsRowSet.java => CompositeAggRowSet.java} (70%) create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotCursor.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java rename x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/{SchemaCompositeAggsRowSet.java => SchemaCompositeAggRowSet.java} (77%) create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaDelegatingRowSet.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/PivotExtractor.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/PivotColumnRef.java create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java index 3da3c0ba73b..f12f069a3b3 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java @@ -149,4 +149,59 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase { assertTrue("No more entries left after row " + rs.getRow(), (i+j == 23 || rs.next())); } } -} + + /** + * Explicit pagination test for PIVOT. + * Checks that the paging properly consumes the necessary amount of aggregations and the + * page size affects the result not the intermediate query. + */ + public void testPivotPaging() throws Exception { + Request request = new Request("PUT", "/test_pivot/_bulk"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); + String[] continent = new String[] { "AF", "AS", "EU", "NA", "SA", "AQ", "AU" }; + for (int i = 0; i <= 100; i++) { + bulk.append("{\"index\":{}}\n"); + bulk.append("{\"item\":").append(i % 10) + .append(", \"entry\":").append(i) + .append(", \"amount\" : ").append(randomInt(999)) + .append(", \"location\" : \"").append(continent[i % (continent.length)]).append("\"") + .append("}\n"); + } + request.setJsonEntity(bulk.toString()); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + try (Connection c = esJdbc(); + Statement s = c.createStatement()) { + + String query = "SELECT * FROM " + + "(SELECT item, amount, location FROM test_pivot)" + + " PIVOT (AVG(amount) FOR location IN ( 'AF', 'AS', 'EU', 'NA', 'SA', 'AQ', 'AU') )"; + // set size smaller than an agg page + s.setFetchSize(3); + try (ResultSet rs = s.executeQuery(query)) { + assertEquals(8, rs.getMetaData().getColumnCount()); + for (int i = 0; i < 10; i++) { + assertTrue(rs.next()); + // the page was set to a pivot row (since the initial 3 is lower as a pivot page takes number of pivot entries + 1) + assertEquals(1, rs.getFetchSize()); + assertEquals(Long.valueOf(i), rs.getObject("item")); + } + assertFalse(rs.next()); + } + + // now try with a larger fetch size (8 * 2 + something) - should be 2 + s.setFetchSize(20); + try (ResultSet rs = s.executeQuery(query)) { + for (int i = 0; i < 10; i++) { + assertTrue(rs.next()); + // + assertEquals(2, rs.getFetchSize()); + assertEquals(Long.valueOf(i), rs.getObject("item")); + } + assertFalse(rs.next()); + } + } + assertNoSearchContexts(); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec new file mode 100644 index 00000000000..c7e47a4304b --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec @@ -0,0 +1,206 @@ +averageWithOneValue +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')); + + languages | 'F' +---------------+------------------ +null |62140.666666666664 +1 |47073.25 +2 |50684.4 +3 |53660.0 +4 |49291.5 +5 |46705.555555555555 +; + +averageWithAliasAndOneValue +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) AS "AVG" FOR gender IN ('F')); + + languages | 'F' +---------------+------------------ +null |62140.666666666664 +1 |47073.25 +2 |50684.4 +3 |53660.0 +4 |49291.5 +5 |46705.555555555555 +; + +averageWithAliasedValue +schema::languages:bt|XX:d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F' AS "XX")); + + languages | XX +---------------+------------------ +null |62140.666666666664 +1 |47073.25 +2 |50684.4 +3 |53660.0 +4 |49291.5 +5 |46705.555555555555 +; + +averageWithTwoValues +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')); + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + +averageWithTwoValuesAndAlias +schema::languages:bt|XY:d|XX:d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M' AS "XY", 'F' "XX")); + + languages | XY | XX +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + +averageWithThreeValuesIncludingNull +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')); + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + + +averageWithOneValueAndLimit +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) LIMIT 3; + + languages | 'F' +---------------+------------------ +null |62140.666666666664 +1 |47073.25 +2 |50684.4 +; + +averageWithTwoValuesAndLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) LIMIT 3; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +; + + +averageWithTwoValuesAndTinyLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) LIMIT 1; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +; + + +averageWithTwoValuesAndSmallLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) LIMIT 2; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +; + +averageWithOneValueAndOrder +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) ORDER BY languages DESC LIMIT 4; + + languages | 'F' +---------------+------------------ +5 |46705.555555555555 +4 |49291.5 +3 |53660.0 +2 |50684.4 +; + +averageWithTwoValuesAndOrderDesc +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) ORDER BY languages DESC; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +5 |39052.875 |46705.555555555555 +4 |47058.90909090909|49291.5 +3 |51741.90909090909|53660.0 +2 |44103.90909090909|50684.4 +1 |49767.22222222222|47073.25 +null |48396.28571428572|62140.666666666664 +; + +averageWithTwoValuesAndOrderDescAndLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) ORDER BY languages DESC LIMIT 2; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +5 |39052.875 |46705.555555555555 +4 |47058.90909090909|49291.5 +; + +averageWithTwoValuesAndOrderAsc +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) ORDER BY languages ASC; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + + +sumWithoutSubquery +schema::birth_date:ts|emp_no:i|first_name:s|gender:s|hire_date:ts|last_name:s|1:i|2:i +SELECT * FROM test_emp PIVOT (SUM(salary) FOR languages IN (1, 2)) LIMIT 5; + + birth_date | emp_no | first_name | gender | hire_date | last_name | 1 | 2 +---------------------+---------------+---------------+---------------+---------------------+---------------+---------------+--------------- +null |10041 |Uri |F |1989-11-12 00:00:00.0|Lenart |56415 |null +null |10043 |Yishay |M |1990-10-20 00:00:00.0|Tzvieli |34341 |null +null |10044 |Mingsen |F |1994-05-21 00:00:00.0|Casley |39728 |null +1952-04-19 00:00:00.0|10009 |Sumant |F |1985-02-18 00:00:00.0|Peac |66174 |null +1953-01-07 00:00:00.0|10067 |Claudi |M |1987-03-04 00:00:00.0|Stavenow |null |52044 +1953-01-23 00:00:00.0|10019 |Lillian |null |1999-04-30 00:00:00.0|Haddadi |73717 |null +; + +averageWithOneValueAndMath +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (ROUND(AVG(salary) / 2) FOR gender IN ('F')); + + languages | 'F' +---------------+--------------- +null |31070.0 +1 |23537.0 +2 |25342.0 +3 |26830.0 +4 |24646.0 +5 |23353.0 +; \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index 76af159be90..86c11952498 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -90,7 +90,7 @@ orderBy ; querySpecification - : SELECT setQuantifier? selectItem (',' selectItem)* + : SELECT setQuantifier? selectItems fromClause? (WHERE where=booleanExpression)? (GROUP BY groupBy)? @@ -98,7 +98,7 @@ querySpecification ; fromClause - : FROM relation (',' relation)* + : FROM relation (',' relation)* pivotClause? ; groupBy @@ -123,6 +123,10 @@ setQuantifier | ALL ; +selectItems + : selectItem (',' selectItem)* + ; + selectItem : expression (AS? identifier)? #selectExpression ; @@ -154,6 +158,18 @@ relationPrimary | '(' relation ')' (AS? qualifiedName)? #aliasedRelation ; +pivotClause + : PIVOT '(' aggs=pivotArgs FOR column=qualifiedName IN '(' vals=pivotArgs ')' ')' + ; + +pivotArgs + : namedValueExpression (',' namedValueExpression)* + ; + +namedValueExpression + : valueExpression (AS? identifier)? + ; + expression : booleanExpression ; @@ -343,6 +359,7 @@ whenClause ; // http://developer.mimer.se/validator/sql-reserved-words.tml +// https://developer.mimer.com/wp-content/uploads/standard-sql-reserved-words-summary.pdf nonReserved : ANALYZE | ANALYZED | CATALOGS | COLUMNS | CURRENT_DATE | CURRENT_TIME | CURRENT_TIMESTAMP @@ -355,7 +372,7 @@ nonReserved | LAST | LIMIT | MAPPED | MINUTE | MONTH | OPTIMIZED - | PARSED | PHYSICAL | PLAN + | PARSED | PHYSICAL | PIVOT | PLAN | QUERY | RLIKE | SCHEMAS | SECOND | SHOW | SYS @@ -397,6 +414,7 @@ EXPLAIN: 'EXPLAIN'; EXTRACT: 'EXTRACT'; FALSE: 'FALSE'; FIRST: 'FIRST'; +FOR: 'FOR'; FORMAT: 'FORMAT'; FROM: 'FROM'; FROZEN: 'FROZEN'; @@ -434,6 +452,7 @@ ORDER: 'ORDER'; OUTER: 'OUTER'; PARSED: 'PARSED'; PHYSICAL: 'PHYSICAL'; +PIVOT: 'PIVOT'; PLAN: 'PLAN'; RIGHT: 'RIGHT'; RLIKE: 'RLIKE'; diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens index 7eeec75f9c9..9771af465bb 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens @@ -35,105 +35,107 @@ EXPLAIN=34 EXTRACT=35 FALSE=36 FIRST=37 -FORMAT=38 -FROM=39 -FROZEN=40 -FULL=41 -FUNCTIONS=42 -GRAPHVIZ=43 -GROUP=44 -HAVING=45 -HOUR=46 -HOURS=47 -IN=48 -INCLUDE=49 -INNER=50 -INTERVAL=51 -IS=52 -JOIN=53 -LAST=54 -LEFT=55 -LIKE=56 -LIMIT=57 -MAPPED=58 -MATCH=59 -MINUTE=60 -MINUTES=61 -MONTH=62 -MONTHS=63 -NATURAL=64 -NOT=65 -NULL=66 -NULLS=67 -ON=68 -OPTIMIZED=69 -OR=70 -ORDER=71 -OUTER=72 -PARSED=73 -PHYSICAL=74 -PLAN=75 -RIGHT=76 -RLIKE=77 -QUERY=78 -SCHEMAS=79 -SECOND=80 -SECONDS=81 -SELECT=82 -SHOW=83 -SYS=84 -TABLE=85 -TABLES=86 -TEXT=87 -THEN=88 -TRUE=89 -TO=90 -TYPE=91 -TYPES=92 -USING=93 -VERIFY=94 -WHEN=95 -WHERE=96 -WITH=97 -YEAR=98 -YEARS=99 -ESCAPE_ESC=100 -FUNCTION_ESC=101 -LIMIT_ESC=102 -DATE_ESC=103 -TIME_ESC=104 -TIMESTAMP_ESC=105 -GUID_ESC=106 -ESC_END=107 -EQ=108 -NULLEQ=109 -NEQ=110 -LT=111 -LTE=112 -GT=113 -GTE=114 -PLUS=115 -MINUS=116 -ASTERISK=117 -SLASH=118 -PERCENT=119 -CAST_OP=120 -CONCAT=121 -DOT=122 -PARAM=123 -STRING=124 -INTEGER_VALUE=125 -DECIMAL_VALUE=126 -IDENTIFIER=127 -DIGIT_IDENTIFIER=128 -TABLE_IDENTIFIER=129 -QUOTED_IDENTIFIER=130 -BACKQUOTED_IDENTIFIER=131 -SIMPLE_COMMENT=132 -BRACKETED_COMMENT=133 -WS=134 -UNRECOGNIZED=135 -DELIMITER=136 +FOR=38 +FORMAT=39 +FROM=40 +FROZEN=41 +FULL=42 +FUNCTIONS=43 +GRAPHVIZ=44 +GROUP=45 +HAVING=46 +HOUR=47 +HOURS=48 +IN=49 +INCLUDE=50 +INNER=51 +INTERVAL=52 +IS=53 +JOIN=54 +LAST=55 +LEFT=56 +LIKE=57 +LIMIT=58 +MAPPED=59 +MATCH=60 +MINUTE=61 +MINUTES=62 +MONTH=63 +MONTHS=64 +NATURAL=65 +NOT=66 +NULL=67 +NULLS=68 +ON=69 +OPTIMIZED=70 +OR=71 +ORDER=72 +OUTER=73 +PARSED=74 +PHYSICAL=75 +PIVOT=76 +PLAN=77 +RIGHT=78 +RLIKE=79 +QUERY=80 +SCHEMAS=81 +SECOND=82 +SECONDS=83 +SELECT=84 +SHOW=85 +SYS=86 +TABLE=87 +TABLES=88 +TEXT=89 +THEN=90 +TRUE=91 +TO=92 +TYPE=93 +TYPES=94 +USING=95 +VERIFY=96 +WHEN=97 +WHERE=98 +WITH=99 +YEAR=100 +YEARS=101 +ESCAPE_ESC=102 +FUNCTION_ESC=103 +LIMIT_ESC=104 +DATE_ESC=105 +TIME_ESC=106 +TIMESTAMP_ESC=107 +GUID_ESC=108 +ESC_END=109 +EQ=110 +NULLEQ=111 +NEQ=112 +LT=113 +LTE=114 +GT=115 +GTE=116 +PLUS=117 +MINUS=118 +ASTERISK=119 +SLASH=120 +PERCENT=121 +CAST_OP=122 +CONCAT=123 +DOT=124 +PARAM=125 +STRING=126 +INTEGER_VALUE=127 +DECIMAL_VALUE=128 +IDENTIFIER=129 +DIGIT_IDENTIFIER=130 +TABLE_IDENTIFIER=131 +QUOTED_IDENTIFIER=132 +BACKQUOTED_IDENTIFIER=133 +SIMPLE_COMMENT=134 +BRACKETED_COMMENT=135 +WS=136 +UNRECOGNIZED=137 +DELIMITER=138 '('=1 ')'=2 ','=3 @@ -171,88 +173,90 @@ DELIMITER=136 'EXTRACT'=35 'FALSE'=36 'FIRST'=37 -'FORMAT'=38 -'FROM'=39 -'FROZEN'=40 -'FULL'=41 -'FUNCTIONS'=42 -'GRAPHVIZ'=43 -'GROUP'=44 -'HAVING'=45 -'HOUR'=46 -'HOURS'=47 -'IN'=48 -'INCLUDE'=49 -'INNER'=50 -'INTERVAL'=51 -'IS'=52 -'JOIN'=53 -'LAST'=54 -'LEFT'=55 -'LIKE'=56 -'LIMIT'=57 -'MAPPED'=58 -'MATCH'=59 -'MINUTE'=60 -'MINUTES'=61 -'MONTH'=62 -'MONTHS'=63 -'NATURAL'=64 -'NOT'=65 -'NULL'=66 -'NULLS'=67 -'ON'=68 -'OPTIMIZED'=69 -'OR'=70 -'ORDER'=71 -'OUTER'=72 -'PARSED'=73 -'PHYSICAL'=74 -'PLAN'=75 -'RIGHT'=76 -'RLIKE'=77 -'QUERY'=78 -'SCHEMAS'=79 -'SECOND'=80 -'SECONDS'=81 -'SELECT'=82 -'SHOW'=83 -'SYS'=84 -'TABLE'=85 -'TABLES'=86 -'TEXT'=87 -'THEN'=88 -'TRUE'=89 -'TO'=90 -'TYPE'=91 -'TYPES'=92 -'USING'=93 -'VERIFY'=94 -'WHEN'=95 -'WHERE'=96 -'WITH'=97 -'YEAR'=98 -'YEARS'=99 -'{ESCAPE'=100 -'{FN'=101 -'{LIMIT'=102 -'{D'=103 -'{T'=104 -'{TS'=105 -'{GUID'=106 -'}'=107 -'='=108 -'<=>'=109 -'<'=111 -'<='=112 -'>'=113 -'>='=114 -'+'=115 -'-'=116 -'*'=117 -'/'=118 -'%'=119 -'::'=120 -'||'=121 -'.'=122 -'?'=123 +'FOR'=38 +'FORMAT'=39 +'FROM'=40 +'FROZEN'=41 +'FULL'=42 +'FUNCTIONS'=43 +'GRAPHVIZ'=44 +'GROUP'=45 +'HAVING'=46 +'HOUR'=47 +'HOURS'=48 +'IN'=49 +'INCLUDE'=50 +'INNER'=51 +'INTERVAL'=52 +'IS'=53 +'JOIN'=54 +'LAST'=55 +'LEFT'=56 +'LIKE'=57 +'LIMIT'=58 +'MAPPED'=59 +'MATCH'=60 +'MINUTE'=61 +'MINUTES'=62 +'MONTH'=63 +'MONTHS'=64 +'NATURAL'=65 +'NOT'=66 +'NULL'=67 +'NULLS'=68 +'ON'=69 +'OPTIMIZED'=70 +'OR'=71 +'ORDER'=72 +'OUTER'=73 +'PARSED'=74 +'PHYSICAL'=75 +'PIVOT'=76 +'PLAN'=77 +'RIGHT'=78 +'RLIKE'=79 +'QUERY'=80 +'SCHEMAS'=81 +'SECOND'=82 +'SECONDS'=83 +'SELECT'=84 +'SHOW'=85 +'SYS'=86 +'TABLE'=87 +'TABLES'=88 +'TEXT'=89 +'THEN'=90 +'TRUE'=91 +'TO'=92 +'TYPE'=93 +'TYPES'=94 +'USING'=95 +'VERIFY'=96 +'WHEN'=97 +'WHERE'=98 +'WITH'=99 +'YEAR'=100 +'YEARS'=101 +'{ESCAPE'=102 +'{FN'=103 +'{LIMIT'=104 +'{D'=105 +'{T'=106 +'{TS'=107 +'{GUID'=108 +'}'=109 +'='=110 +'<=>'=111 +'<'=113 +'<='=114 +'>'=115 +'>='=116 +'+'=117 +'-'=118 +'*'=119 +'/'=120 +'%'=121 +'::'=122 +'||'=123 +'.'=124 +'?'=125 diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens index 603e67fec88..adb6142e865 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens @@ -35,104 +35,106 @@ EXPLAIN=34 EXTRACT=35 FALSE=36 FIRST=37 -FORMAT=38 -FROM=39 -FROZEN=40 -FULL=41 -FUNCTIONS=42 -GRAPHVIZ=43 -GROUP=44 -HAVING=45 -HOUR=46 -HOURS=47 -IN=48 -INCLUDE=49 -INNER=50 -INTERVAL=51 -IS=52 -JOIN=53 -LAST=54 -LEFT=55 -LIKE=56 -LIMIT=57 -MAPPED=58 -MATCH=59 -MINUTE=60 -MINUTES=61 -MONTH=62 -MONTHS=63 -NATURAL=64 -NOT=65 -NULL=66 -NULLS=67 -ON=68 -OPTIMIZED=69 -OR=70 -ORDER=71 -OUTER=72 -PARSED=73 -PHYSICAL=74 -PLAN=75 -RIGHT=76 -RLIKE=77 -QUERY=78 -SCHEMAS=79 -SECOND=80 -SECONDS=81 -SELECT=82 -SHOW=83 -SYS=84 -TABLE=85 -TABLES=86 -TEXT=87 -THEN=88 -TRUE=89 -TO=90 -TYPE=91 -TYPES=92 -USING=93 -VERIFY=94 -WHEN=95 -WHERE=96 -WITH=97 -YEAR=98 -YEARS=99 -ESCAPE_ESC=100 -FUNCTION_ESC=101 -LIMIT_ESC=102 -DATE_ESC=103 -TIME_ESC=104 -TIMESTAMP_ESC=105 -GUID_ESC=106 -ESC_END=107 -EQ=108 -NULLEQ=109 -NEQ=110 -LT=111 -LTE=112 -GT=113 -GTE=114 -PLUS=115 -MINUS=116 -ASTERISK=117 -SLASH=118 -PERCENT=119 -CAST_OP=120 -CONCAT=121 -DOT=122 -PARAM=123 -STRING=124 -INTEGER_VALUE=125 -DECIMAL_VALUE=126 -IDENTIFIER=127 -DIGIT_IDENTIFIER=128 -TABLE_IDENTIFIER=129 -QUOTED_IDENTIFIER=130 -BACKQUOTED_IDENTIFIER=131 -SIMPLE_COMMENT=132 -BRACKETED_COMMENT=133 -WS=134 -UNRECOGNIZED=135 +FOR=38 +FORMAT=39 +FROM=40 +FROZEN=41 +FULL=42 +FUNCTIONS=43 +GRAPHVIZ=44 +GROUP=45 +HAVING=46 +HOUR=47 +HOURS=48 +IN=49 +INCLUDE=50 +INNER=51 +INTERVAL=52 +IS=53 +JOIN=54 +LAST=55 +LEFT=56 +LIKE=57 +LIMIT=58 +MAPPED=59 +MATCH=60 +MINUTE=61 +MINUTES=62 +MONTH=63 +MONTHS=64 +NATURAL=65 +NOT=66 +NULL=67 +NULLS=68 +ON=69 +OPTIMIZED=70 +OR=71 +ORDER=72 +OUTER=73 +PARSED=74 +PHYSICAL=75 +PIVOT=76 +PLAN=77 +RIGHT=78 +RLIKE=79 +QUERY=80 +SCHEMAS=81 +SECOND=82 +SECONDS=83 +SELECT=84 +SHOW=85 +SYS=86 +TABLE=87 +TABLES=88 +TEXT=89 +THEN=90 +TRUE=91 +TO=92 +TYPE=93 +TYPES=94 +USING=95 +VERIFY=96 +WHEN=97 +WHERE=98 +WITH=99 +YEAR=100 +YEARS=101 +ESCAPE_ESC=102 +FUNCTION_ESC=103 +LIMIT_ESC=104 +DATE_ESC=105 +TIME_ESC=106 +TIMESTAMP_ESC=107 +GUID_ESC=108 +ESC_END=109 +EQ=110 +NULLEQ=111 +NEQ=112 +LT=113 +LTE=114 +GT=115 +GTE=116 +PLUS=117 +MINUS=118 +ASTERISK=119 +SLASH=120 +PERCENT=121 +CAST_OP=122 +CONCAT=123 +DOT=124 +PARAM=125 +STRING=126 +INTEGER_VALUE=127 +DECIMAL_VALUE=128 +IDENTIFIER=129 +DIGIT_IDENTIFIER=130 +TABLE_IDENTIFIER=131 +QUOTED_IDENTIFIER=132 +BACKQUOTED_IDENTIFIER=133 +SIMPLE_COMMENT=134 +BRACKETED_COMMENT=135 +WS=136 +UNRECOGNIZED=137 '('=1 ')'=2 ','=3 @@ -170,88 +172,90 @@ UNRECOGNIZED=135 'EXTRACT'=35 'FALSE'=36 'FIRST'=37 -'FORMAT'=38 -'FROM'=39 -'FROZEN'=40 -'FULL'=41 -'FUNCTIONS'=42 -'GRAPHVIZ'=43 -'GROUP'=44 -'HAVING'=45 -'HOUR'=46 -'HOURS'=47 -'IN'=48 -'INCLUDE'=49 -'INNER'=50 -'INTERVAL'=51 -'IS'=52 -'JOIN'=53 -'LAST'=54 -'LEFT'=55 -'LIKE'=56 -'LIMIT'=57 -'MAPPED'=58 -'MATCH'=59 -'MINUTE'=60 -'MINUTES'=61 -'MONTH'=62 -'MONTHS'=63 -'NATURAL'=64 -'NOT'=65 -'NULL'=66 -'NULLS'=67 -'ON'=68 -'OPTIMIZED'=69 -'OR'=70 -'ORDER'=71 -'OUTER'=72 -'PARSED'=73 -'PHYSICAL'=74 -'PLAN'=75 -'RIGHT'=76 -'RLIKE'=77 -'QUERY'=78 -'SCHEMAS'=79 -'SECOND'=80 -'SECONDS'=81 -'SELECT'=82 -'SHOW'=83 -'SYS'=84 -'TABLE'=85 -'TABLES'=86 -'TEXT'=87 -'THEN'=88 -'TRUE'=89 -'TO'=90 -'TYPE'=91 -'TYPES'=92 -'USING'=93 -'VERIFY'=94 -'WHEN'=95 -'WHERE'=96 -'WITH'=97 -'YEAR'=98 -'YEARS'=99 -'{ESCAPE'=100 -'{FN'=101 -'{LIMIT'=102 -'{D'=103 -'{T'=104 -'{TS'=105 -'{GUID'=106 -'}'=107 -'='=108 -'<=>'=109 -'<'=111 -'<='=112 -'>'=113 -'>='=114 -'+'=115 -'-'=116 -'*'=117 -'/'=118 -'%'=119 -'::'=120 -'||'=121 -'.'=122 -'?'=123 +'FOR'=38 +'FORMAT'=39 +'FROM'=40 +'FROZEN'=41 +'FULL'=42 +'FUNCTIONS'=43 +'GRAPHVIZ'=44 +'GROUP'=45 +'HAVING'=46 +'HOUR'=47 +'HOURS'=48 +'IN'=49 +'INCLUDE'=50 +'INNER'=51 +'INTERVAL'=52 +'IS'=53 +'JOIN'=54 +'LAST'=55 +'LEFT'=56 +'LIKE'=57 +'LIMIT'=58 +'MAPPED'=59 +'MATCH'=60 +'MINUTE'=61 +'MINUTES'=62 +'MONTH'=63 +'MONTHS'=64 +'NATURAL'=65 +'NOT'=66 +'NULL'=67 +'NULLS'=68 +'ON'=69 +'OPTIMIZED'=70 +'OR'=71 +'ORDER'=72 +'OUTER'=73 +'PARSED'=74 +'PHYSICAL'=75 +'PIVOT'=76 +'PLAN'=77 +'RIGHT'=78 +'RLIKE'=79 +'QUERY'=80 +'SCHEMAS'=81 +'SECOND'=82 +'SECONDS'=83 +'SELECT'=84 +'SHOW'=85 +'SYS'=86 +'TABLE'=87 +'TABLES'=88 +'TEXT'=89 +'THEN'=90 +'TRUE'=91 +'TO'=92 +'TYPE'=93 +'TYPES'=94 +'USING'=95 +'VERIFY'=96 +'WHEN'=97 +'WHERE'=98 +'WITH'=99 +'YEAR'=100 +'YEARS'=101 +'{ESCAPE'=102 +'{FN'=103 +'{LIMIT'=104 +'{D'=105 +'{T'=106 +'{TS'=107 +'{GUID'=108 +'}'=109 +'='=110 +'<=>'=111 +'<'=113 +'<='=114 +'>'=115 +'>='=116 +'+'=117 +'-'=118 +'*'=119 +'/'=120 +'%'=121 +'::'=122 +'||'=123 +'.'=124 +'?'=125 diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 901318258c0..5fdd1f9124d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Join; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.UnaryPlan; @@ -419,7 +420,7 @@ public class Analyzer extends RuleExecutor { return result; } - private List expandStar(UnresolvedStar us, List output) { + static List expandStar(UnresolvedStar us, List output) { List expanded = new ArrayList<>(); // a qualifier is specified - since this is a star, it should be a CompoundDataType @@ -460,24 +461,7 @@ public class Analyzer extends RuleExecutor { } } } else { - // add only primitives - // but filter out multi fields (allow only the top-level value) - Set seenMultiFields = new LinkedHashSet<>(); - - for (Attribute a : output) { - if (!DataTypes.isUnsupported(a.dataType()) && a.dataType().isPrimitive()) { - if (a instanceof FieldAttribute) { - FieldAttribute fa = (FieldAttribute) a; - // skip nested fields and seen multi-fields - if (!fa.isNested() && !seenMultiFields.contains(fa.parent())) { - expanded.add(a); - seenMultiFields.add(a); - } - } else { - expanded.add(a); - } - } - } + expanded.addAll(Expressions.onlyPrimitiveFieldAttributes(output)); } return expanded; @@ -954,12 +938,24 @@ public class Analyzer extends RuleExecutor { } return a; } + if (plan instanceof Pivot) { + Pivot p = (Pivot) plan; + if (p.childrenResolved()) { + if (hasUnresolvedAliases(p.values())) { + p = new Pivot(p.source(), p.child(), p.column(), assignAliases(p.values()), p.aggregates()); + } + if (hasUnresolvedAliases(p.aggregates())) { + p = new Pivot(p.source(), p.child(), p.column(), p.values(), assignAliases(p.aggregates())); + } + } + return p; + } return plan; } private boolean hasUnresolvedAliases(List expressions) { - return expressions != null && expressions.stream().anyMatch(e -> e instanceof UnresolvedAlias); + return expressions != null && Expressions.anyMatch(expressions, e -> e instanceof UnresolvedAlias); } private List assignAliases(List exprs) { @@ -1277,13 +1273,20 @@ public class Analyzer extends RuleExecutor { protected LogicalPlan rule(LogicalPlan plan) { if (plan instanceof Project) { Project p = (Project) plan; - return new Project(p.source(), p.child(), cleanSecondaryAliases(p.projections())); + return new Project(p.source(), p.child(), cleanChildrenAliases(p.projections())); } if (plan instanceof Aggregate) { Aggregate a = (Aggregate) plan; - // clean group expressions - return new Aggregate(a.source(), a.child(), cleanAllAliases(a.groupings()), cleanSecondaryAliases(a.aggregates())); + // aliases inside GROUP BY are irellevant so remove all of them + // however aggregations are important (ultimately a projection) + return new Aggregate(a.source(), a.child(), cleanAllAliases(a.groupings()), cleanChildrenAliases(a.aggregates())); + } + + if (plan instanceof Pivot) { + Pivot p = (Pivot) plan; + return new Pivot(p.source(), p.child(), trimAliases(p.column()), cleanChildrenAliases(p.values()), + cleanChildrenAliases(p.aggregates())); } return plan.transformExpressionsOnly(e -> { @@ -1294,7 +1297,7 @@ public class Analyzer extends RuleExecutor { }); } - private List cleanSecondaryAliases(List args) { + private List cleanChildrenAliases(List args) { List cleaned = new ArrayList<>(args.size()); for (NamedExpression ne : args) { cleaned.add((NamedExpression) trimNonTopLevelAliases(ne)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index d5a4cb436e6..5c4b89209fa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -13,6 +13,8 @@ import org.elasticsearch.xpack.sql.expression.Exists; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; @@ -33,13 +35,16 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.stats.FeatureMetric; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.tree.Node; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.util.Holder; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.ArrayList; @@ -64,6 +69,7 @@ import static org.elasticsearch.xpack.sql.stats.FeatureMetric.LOCAL; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.ORDERBY; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.WHERE; import static org.elasticsearch.xpack.sql.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; /** * The verifier has the role of checking the analyzed tree for failures and build a list of failures following this check. @@ -237,6 +243,7 @@ public final class Verifier { checkForScoreInsideFunctions(p, localFailures); checkNestedUsedInGroupByOrHaving(p, localFailures); checkForGeoFunctionsOnDocValues(p, localFailures); + checkPivot(p, localFailures); // everything checks out // mark the plan as analyzed @@ -464,20 +471,39 @@ public final class Verifier { private static boolean checkGroupByInexactField(LogicalPlan p, Set localFailures) { if (p instanceof Aggregate) { - Aggregate a = (Aggregate) p; - - // The grouping can not be an aggregate function or an inexact field (e.g. text without a keyword) - a.groupings().forEach(e -> e.forEachUp(c -> { - EsField.Exact exact = c.getExactInfo(); - if (exact.hasExact() == false) { - localFailures.add(fail(c, "Field [" + c.sourceText() + "] of data type [" + c.dataType().typeName + "] " + - "cannot be used for grouping; " + exact.errorMsg())); - } - }, FieldAttribute.class)); + return onlyExactFields(((Aggregate) p).groupings(), localFailures); } return true; } + // The grouping can not be an aggregate function or an inexact field (e.g. text without a keyword) + private static boolean onlyExactFields(List expressions, Set localFailures) { + Holder onlyExact = new Holder<>(Boolean.TRUE); + + expressions.forEach(e -> e.forEachUp(c -> { + EsField.Exact exact = c.getExactInfo(); + if (exact.hasExact() == false) { + localFailures.add(fail(c, "Field [{}] of data type [{}] cannot be used for grouping; {}", c.sourceText(), + c.dataType().typeName, exact.errorMsg())); + onlyExact.set(Boolean.FALSE); + } + }, FieldAttribute.class)); + + return onlyExact.get(); + } + + private static boolean onlyRawFields(Iterable expressions, Set localFailures) { + Holder onlyExact = new Holder<>(Boolean.TRUE); + + expressions.forEach(e -> e.forEachDown(c -> { + if (c instanceof Function || c instanceof FunctionAttribute) { + localFailures.add(fail(c, "No functions allowed (yet); encountered [{}]", c.sourceText())); + onlyExact.set(Boolean.FALSE); + } + })); + return onlyExact.get(); + } + private static boolean checkGroupByTime(LogicalPlan p, Set localFailures) { if (p instanceof Aggregate) { Aggregate a = (Aggregate) p; @@ -625,8 +651,9 @@ public final class Verifier { Project proj = (Project) p; proj.projections().forEach(e -> e.forEachDown(f -> localFailures.add(fail(f, "[{}] needs to be part of the grouping", Expressions.name(f))), GroupingFunction.class)); - } else if (p instanceof Aggregate) { - // if it does have a GROUP BY, check if the groupings contain the grouping functions (Histograms) + } + // if it does have a GROUP BY, check if the groupings contain the grouping functions (Histograms) + else if (p instanceof Aggregate) { Aggregate a = (Aggregate) p; a.aggregates().forEach(agg -> agg.forEachDown(e -> { if (a.groupings().size() == 0 @@ -749,4 +776,62 @@ public final class Verifier { } }, FieldAttribute.class)), OrderBy.class); } -} + + private static void checkPivot(LogicalPlan p, Set localFailures) { + p.forEachDown(pv -> { + // check only exact fields are used inside PIVOTing + if (onlyExactFields(combine(pv.groupingSet(), pv.column()), localFailures) == false + || onlyRawFields(pv.groupingSet(), localFailures) == false) { + // if that is not the case, no need to do further validation since the declaration is fundamentally wrong + return; + } + + // check values + DataType colType = pv.column().dataType(); + for (NamedExpression v : pv.values()) { + // check all values are foldable + Expression ex = v instanceof Alias ? ((Alias) v).child() : v; + if (ex instanceof Literal == false) { + localFailures.add(fail(v, "Non-literal [{}] found inside PIVOT values", v.name())); + } + else if (ex.foldable() && ex.fold() == null) { + localFailures.add(fail(v, "Null not allowed as a PIVOT value", v.name())); + } + // and that their type is compatible with that of the column + else if (DataTypes.areTypesCompatible(colType, v.dataType()) == false) { + localFailures.add(fail(v, "Literal [{}] of type [{}] does not match type [{}] of PIVOT column [{}]", v.name(), + v.dataType().typeName, colType.typeName, pv.column().sourceText())); + } + } + + // check aggregate function, in particular formulas that might hide literals or scalars + pv.aggregates().forEach(a -> { + Holder hasAggs = new Holder<>(Boolean.FALSE); + List aggs = a.collectFirstChildren(c -> { + // skip aggregate functions + if (Functions.isAggregate(c)) { + hasAggs.set(Boolean.TRUE); + return true; + } + if (c.children().isEmpty()) { + return true; + } + return false; + }); + + if (Boolean.FALSE.equals(hasAggs.get())) { + localFailures.add(fail(a, "No aggregate function found in PIVOT at [{}]", a.sourceText())); + } + // check mixture of Agg and column (wrapped in scalar) + else { + for (Expression agg : aggs) { + if (agg instanceof FieldAttribute) { + localFailures.add(fail(a, "Non-aggregate function found in PIVOT at [{}]", a.sourceText())); + } + } + } + }); + + }, Pivot.class); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java similarity index 72% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java index 41b5e1199ef..616ca01c38f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java @@ -37,14 +37,14 @@ import java.util.BitSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Consumer; -import java.util.function.Function; +import java.util.function.BiFunction; +import java.util.function.Supplier; /** * Cursor for composite aggregation (GROUP BY). * Stores the query that gets updated/slides across requests. */ -public class CompositeAggregationCursor implements Cursor { +public class CompositeAggCursor implements Cursor { private final Logger log = LogManager.getLogger(getClass()); @@ -57,7 +57,7 @@ public class CompositeAggregationCursor implements Cursor { private final int limit; private final boolean includeFrozen; - CompositeAggregationCursor(byte[] next, List exts, BitSet mask, int remainingLimit, boolean includeFrozen, + CompositeAggCursor(byte[] next, List exts, BitSet mask, int remainingLimit, boolean includeFrozen, String... indices) { this.indices = indices; this.nextQuery = next; @@ -67,7 +67,7 @@ public class CompositeAggregationCursor implements Cursor { this.includeFrozen = includeFrozen; } - public CompositeAggregationCursor(StreamInput in) throws IOException { + public CompositeAggCursor(StreamInput in) throws IOException { indices = in.readStringArray(); nextQuery = in.readByteArray(); limit = in.readVInt(); @@ -86,7 +86,6 @@ public class CompositeAggregationCursor implements Cursor { out.writeNamedWriteableList(extractors); out.writeByteArray(mask.toByteArray()); out.writeBoolean(includeFrozen); - } @Override @@ -133,16 +132,17 @@ public class CompositeAggregationCursor implements Cursor { log.trace("About to execute composite query {} on {}", StringUtils.toString(query), indices); } - SearchRequest search = Querier.prepareRequest(client, query, cfg.pageTimeout(), includeFrozen, indices); + SearchRequest request = Querier.prepareRequest(client, query, cfg.pageTimeout(), includeFrozen, indices); - client.search(search, new ActionListener() { + client.search(request, new ActionListener() { @Override - public void onResponse(SearchResponse r) { - handle(r, search.source(), ba -> new CompositeAggsRowSet(extractors, mask, r, limit, ba), - () -> client.search(search, this), - p -> listener.onResponse(p), - e -> listener.onFailure(e), - Schema.EMPTY, includeFrozen, indices); + public void onResponse(SearchResponse response) { + handle(response, request.source(), + makeRowSet(response), + makeCursor(), + () -> client.search(request, this), + listener, + Schema.EMPTY); } @Override @@ -152,40 +152,55 @@ public class CompositeAggregationCursor implements Cursor { }); } - static void handle(SearchResponse response, SearchSourceBuilder source, Function makeRowSet, - Runnable retry, Consumer onPage, Consumer onFailure, - Schema schema, boolean includeFrozen, String[] indices) { + protected Supplier makeRowSet(SearchResponse response) { + return () -> new CompositeAggRowSet(extractors, mask, response, limit); + } + + protected BiFunction makeCursor() { + return (q, r) -> new CompositeAggCursor(q, r.extractors(), r.mask(), r.remainingData(), includeFrozen, indices); + } + + static void handle(SearchResponse response, SearchSourceBuilder source, + Supplier makeRowSet, + BiFunction makeCursor, + Runnable retry, + ActionListener listener, + Schema schema) { // there are some results if (response.getAggregations().asList().isEmpty() == false) { // retry - if (CompositeAggregationCursor.shouldRetryDueToEmptyPage(response)) { - CompositeAggregationCursor.updateCompositeAfterKey(response, source); + if (shouldRetryDueToEmptyPage(response)) { + updateCompositeAfterKey(response, source); retry.run(); return; } try { - boolean hasAfterKey = updateCompositeAfterKey(response, source); - byte[] queryAsBytes = hasAfterKey ? serializeQuery(source) : null; - CompositeAggsRowSet rowSet = makeRowSet.apply(queryAsBytes); + CompositeAggRowSet rowSet = makeRowSet.get(); + Map afterKey = rowSet.afterKey(); + + byte[] queryAsBytes = null; + if (afterKey != null) { + updateSourceAfterKey(afterKey, source); + queryAsBytes = serializeQuery(source); + } Cursor next = rowSet.remainingData() == 0 ? Cursor.EMPTY - : new CompositeAggregationCursor(queryAsBytes, rowSet.extractors(), rowSet.mask(), - rowSet.remainingData(), includeFrozen, indices); - onPage.accept(new Page(rowSet, next)); + : makeCursor.apply(queryAsBytes, rowSet); + listener.onResponse(new Page(rowSet, next)); } catch (Exception ex) { - onFailure.accept(ex); + listener.onFailure(ex); } } // no results else { - onPage.accept(Page.last(Rows.empty(schema))); + listener.onResponse(Page.last(Rows.empty(schema))); } } - static boolean shouldRetryDueToEmptyPage(SearchResponse response) { + private static boolean shouldRetryDueToEmptyPage(SearchResponse response) { CompositeAggregation composite = getComposite(response); // if there are no buckets but a next page, go fetch it instead of sending an empty response to the client return composite != null && composite.getBuckets().isEmpty() && composite.afterKey() != null && !composite.afterKey().isEmpty(); @@ -204,25 +219,22 @@ public class CompositeAggregationCursor implements Cursor { throw new SqlIllegalArgumentException("Unrecognized root group found; {}", agg.getClass()); } - static boolean updateCompositeAfterKey(SearchResponse r, SearchSourceBuilder next) { + private static void updateCompositeAfterKey(SearchResponse r, SearchSourceBuilder search) { CompositeAggregation composite = getComposite(r); if (composite == null) { throw new SqlIllegalArgumentException("Invalid server response; no group-by detected"); } - Map afterKey = composite.afterKey(); - // a null after-key means done - if (afterKey == null) { - return false; + updateSourceAfterKey(composite.afterKey(), search); } - AggregationBuilder aggBuilder = next.aggregations().getAggregatorFactories().iterator().next(); + private static void updateSourceAfterKey(Map afterKey, SearchSourceBuilder search) { + AggregationBuilder aggBuilder = search.aggregations().getAggregatorFactories().iterator().next(); // update after-key with the new value if (aggBuilder instanceof CompositeAggregationBuilder) { CompositeAggregationBuilder comp = (CompositeAggregationBuilder) aggBuilder; comp.aggregateAfter(afterKey); - return true; } else { throw new SqlIllegalArgumentException("Invalid client request; expected a group-by but instead got {}", aggBuilder); } @@ -240,7 +252,7 @@ public class CompositeAggregationCursor implements Cursor { /** * Serializes the search source to a byte array. */ - static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { + private static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { if (source == null) { return new byte[0]; } @@ -259,7 +271,7 @@ public class CompositeAggregationCursor implements Cursor { @Override public int hashCode() { - return Objects.hash(Arrays.hashCode(indices), Arrays.hashCode(nextQuery), extractors, limit); + return Objects.hash(Arrays.hashCode(indices), Arrays.hashCode(nextQuery), extractors, limit, mask, includeFrozen); } @Override @@ -267,15 +279,16 @@ public class CompositeAggregationCursor implements Cursor { if (obj == null || obj.getClass() != getClass()) { return false; } - CompositeAggregationCursor other = (CompositeAggregationCursor) obj; + CompositeAggCursor other = (CompositeAggCursor) obj; return Arrays.equals(indices, other.indices) && Arrays.equals(nextQuery, other.nextQuery) && Objects.equals(extractors, other.extractors) - && Objects.equals(limit, other.limit); + && Objects.equals(limit, other.limit) + && Objects.equals(includeFrozen, other.includeFrozen); } @Override public String toString() { return "cursor for composite on index [" + Arrays.toString(indices) + "]"; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggRowSet.java similarity index 70% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggRowSet.java index dd6b85279cb..1262e80e066 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggRowSet.java @@ -12,50 +12,50 @@ import org.elasticsearch.xpack.sql.session.RowSet; import java.util.BitSet; import java.util.List; +import java.util.Map; import static java.util.Collections.emptyList; /** * {@link RowSet} specific to (GROUP BY) aggregation. */ -class CompositeAggsRowSet extends ResultRowSet { +class CompositeAggRowSet extends ResultRowSet { - private final List buckets; - private final int remainingData; - private final int size; - private int row = 0; + final List buckets; - CompositeAggsRowSet(List exts, BitSet mask, SearchResponse response, int limit, byte[] next) { + Map afterKey; + int remainingData; + int size; + int row = 0; + + CompositeAggRowSet(List exts, BitSet mask, SearchResponse response, int limit) { super(exts, mask); - CompositeAggregation composite = CompositeAggregationCursor.getComposite(response); + CompositeAggregation composite = CompositeAggCursor.getComposite(response); if (composite != null) { buckets = composite.getBuckets(); + afterKey = composite.afterKey(); } else { buckets = emptyList(); + afterKey = null; } // page size size = limit == -1 ? buckets.size() : Math.min(buckets.size(), limit); + remainingData = remainingData(afterKey != null, size, limit); + } - if (next == null) { - remainingData = 0; + static int remainingData(boolean hasNextPage, int size, int limit) { + if (hasNextPage == false) { + return 0; } else { - // Compute remaining limit - - // If the limit is -1 then we have a local sorting (sort on aggregate function) that requires all the buckets - // to be processed so we stop only when all data is exhausted. int remainingLimit = (limit == -1) ? limit : ((limit - size) >= 0 ? (limit - size) : 0); // if the computed limit is zero, or the size is zero it means either there's nothing left or the limit has been reached // note that a composite agg might be valid but return zero groups (since these can be filtered with HAVING/bucket selector) // however the Querier takes care of that and keeps making requests until either the query is invalid or at least one response // is returned. - if (size == 0 || remainingLimit == 0) { - remainingData = 0; - } else { - remainingData = remainingLimit; - } + return size == 0 ? size : remainingLimit; } } @@ -91,4 +91,8 @@ class CompositeAggsRowSet extends ResultRowSet { int remainingData() { return remainingData; } + + Map afterKey() { + return afterKey; + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotCursor.java new file mode 100644 index 00000000000..a815602d950 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotCursor.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractor; +import org.elasticsearch.xpack.sql.type.Schema; + +import java.io.IOException; +import java.util.Arrays; +import java.util.BitSet; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Supplier; + +public class PivotCursor extends CompositeAggCursor { + + public static final String NAME = "p"; + + private final Map previousKey; + + PivotCursor(Map previousKey, byte[] next, List exts, BitSet mask, int remainingLimit, + boolean includeFrozen, + String... indices) { + super(next, exts, mask, remainingLimit, includeFrozen, indices); + this.previousKey = previousKey; + } + + public PivotCursor(StreamInput in) throws IOException { + super(in); + previousKey = in.readBoolean() == true ? in.readMap() : null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (previousKey != null) { + out.writeBoolean(true); + out.writeMap(previousKey); + } else { + out.writeBoolean(false); + } + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Supplier makeRowSet(SearchResponse response) { + return () -> new PivotRowSet(Schema.EMPTY, extractors(), mask(), response, limit(), previousKey); + } + + @Override + protected BiFunction makeCursor() { + return (q, r) -> { + Map lastAfterKey = r instanceof PivotRowSet ? ((PivotRowSet) r).lastAfterKey() : null; + return new PivotCursor(lastAfterKey, q, r.extractors(), r.mask(), r.remainingData(), includeFrozen(), indices()); + }; + } + + @Override + public String toString() { + return "pivot for index [" + Arrays.toString(indices()) + "]"; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java new file mode 100644 index 00000000000..6839e7275ae --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractor; +import org.elasticsearch.xpack.sql.type.Schema; + +import java.util.ArrayList; +import java.util.BitSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; + +import static java.util.Collections.emptyList; + +class PivotRowSet extends SchemaCompositeAggRowSet { + + private final List data; + private final Map lastAfterKey; + + PivotRowSet(Schema schema, List exts, BitSet mask, SearchResponse response, int limit, + Map previousLastKey) { + super(schema, exts, mask, response, limit); + + data = buckets.isEmpty() ? emptyList() : new ArrayList<>(); + + // the last page contains no data, handle that to avoid NPEs and such + if (buckets.isEmpty()) { + lastAfterKey = null; + return; + } + + // consume buckets until all pivot columns are initialized or the next grouping starts + // to determine a group, find all group-by extractors (CompositeKeyExtractor) + // extract their values and keep iterating through the buckets as long as the result is the same + + Map currentRowGroupKey = null; + Map lastCompletedGroupKey = null; + Object[] currentRow = new Object[columnCount()]; + + for (int bucketIndex = 0; bucketIndex < buckets.size(); bucketIndex++) { + CompositeAggregation.Bucket bucket = buckets.get(bucketIndex); + Map key = bucket.getKey(); + + // does the bucket below to the same group? + if (currentRowGroupKey == null || sameCompositeKey(currentRowGroupKey, key)) { + currentRowGroupKey = key; + } + // done computing row + else { + // be sure to remember the last consumed group before changing to the new one + lastCompletedGroupKey = currentRowGroupKey; + currentRowGroupKey = key; + // save the data + data.add(currentRow); + // create a new row + currentRow = new Object[columnCount()]; + } + + for (int columnIndex = 0; columnIndex < currentRow.length; columnIndex++) { + BucketExtractor extractor = userExtractor(columnIndex); + Object value = extractor.extract(bucket); + + // rerun the bucket through all the extractors but update only the non-null components + // since the pivot extractors will react only when encountering the matching group + if (currentRow[columnIndex] == null && value != null) { + currentRow[columnIndex] = value; + } + } + } + + // add the last group if any of the following matches: + // a. the last key has been sent before (it's the last page) + if ((previousLastKey != null && sameCompositeKey(previousLastKey, currentRowGroupKey))) { + data.add(currentRow); + afterKey = null; + } + // b. all the values are initialized (there might be another page but no need to ask for the group again) + // c. or no data was added (typically because there's a null value such as the group) + else if (hasNull(currentRow) == false || data.isEmpty()) { + data.add(currentRow); + afterKey = currentRowGroupKey; + } + //otherwise we can't tell whether it's complete or not + // so discard the last group and ask for it on the next page + else { + afterKey = lastCompletedGroupKey; + } + + // lastly initialize the size and remainingData + size = data.size(); + remainingData = remainingData(afterKey != null, size, limit); + lastAfterKey = currentRowGroupKey; + } + + private boolean hasNull(Object[] currentRow) { + for (Object object : currentRow) { + if (object == null) { + return true; + } + } + return false; + } + + // compare the equality of two composite key WITHOUT the last group + // this method relies on the internal map implementation which preserves the key position + // hence why the comparison happens against the current key (not the previous one which might + // have a different order due to serialization) + static boolean sameCompositeKey(Map previous, Map current) { + int keys = current.size() - 1; + int keyIndex = 0; + for (Entry entry : current.entrySet()) { + if (keyIndex++ >= keys) { + return true; + } + if (Objects.equals(entry.getValue(), previous.get(entry.getKey())) == false) { + return false; + } + } + // there's no other key, it's the same group + return true; + } + + @Override + protected Object getColumn(int column) { + return data.get(row)[column]; + } + + Map lastAfterKey() { + return lastAfterKey; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 9e0d4f3a691..333d320e908 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.FieldHitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.MetricAggExtractor; +import org.elasticsearch.xpack.sql.execution.search.extractor.PivotExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.TopHitsAggExtractor; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.ExpressionId; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef; import org.elasticsearch.xpack.sql.querydsl.container.GlobalCountRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef; import org.elasticsearch.xpack.sql.querydsl.container.MetricAggRef; +import org.elasticsearch.xpack.sql.querydsl.container.PivotColumnRef; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef; import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef; @@ -71,9 +73,12 @@ import java.util.BitSet; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiFunction; +import java.util.function.Supplier; import static java.util.Collections.singletonList; import static org.elasticsearch.action.ActionListener.wrap; @@ -320,21 +325,39 @@ public class Querier { */ static class CompositeActionListener extends BaseAggActionListener { + private final boolean isPivot; + CompositeActionListener(ActionListener listener, Client client, Configuration cfg, List output, QueryContainer query, SearchRequest request) { super(listener, client, cfg, output, query, request); + + isPivot = query.fields().stream().anyMatch(t -> t.v1() instanceof PivotColumnRef); } @Override protected void handleResponse(SearchResponse response, ActionListener listener) { - CompositeAggregationCursor.handle(response, request.source(), - ba -> new SchemaCompositeAggsRowSet(schema, initBucketExtractors(response), mask, response, - query.sortingColumns().isEmpty() ? query.limit() : -1, ba), + Supplier makeRowSet = isPivot ? + () -> new PivotRowSet(schema, initBucketExtractors(response), mask, response, + query.sortingColumns().isEmpty() ? query.limit() : -1, null) : + () -> new SchemaCompositeAggRowSet(schema, initBucketExtractors(response), mask, response, + query.sortingColumns().isEmpty() ? query.limit() : -1); + + BiFunction makeCursor = isPivot ? + (q, r) -> { + Map lastAfterKey = r instanceof PivotRowSet ? ((PivotRowSet) r).lastAfterKey() : null; + return new PivotCursor(lastAfterKey, q, r.extractors(), r.mask(), r.remainingData(), query.shouldIncludeFrozen(), + request.indices()); + } : + (q, r) -> new CompositeAggCursor(q, r.extractors(), r.mask(), r.remainingData, query.shouldIncludeFrozen(), + request.indices()); + + CompositeAggCursor.handle(response, request.source(), + makeRowSet, + makeCursor, () -> client.search(request, this), - p -> listener.onResponse(p), - e -> listener.onFailure(e), - schema, query.shouldIncludeFrozen(), request.indices()); + listener, + schema); } } @@ -380,6 +403,11 @@ public class Querier { return new TopHitsAggExtractor(r.name(), r.fieldDataType(), cfg.zoneId()); } + if (ref instanceof PivotColumnRef) { + PivotColumnRef r = (PivotColumnRef) ref; + return new PivotExtractor(createExtractor(r.pivot(), totalCount), createExtractor(r.agg(), totalCount), r.value()); + } + if (ref == GlobalCountRef.INSTANCE) { return totalCount; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggsRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggRowSet.java similarity index 77% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggsRowSet.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggRowSet.java index 7eeb8b28f15..eb4d568f557 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggsRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggRowSet.java @@ -18,12 +18,12 @@ import java.util.List; * Extension of the {@link RowSet} over a composite agg, extending it to provide its schema. * Used for the initial response. */ -class SchemaCompositeAggsRowSet extends CompositeAggsRowSet implements SchemaRowSet { +class SchemaCompositeAggRowSet extends CompositeAggRowSet implements SchemaRowSet { private final Schema schema; - SchemaCompositeAggsRowSet(Schema schema, List exts, BitSet mask, SearchResponse r, int limitAggs, byte[] next) { - super(exts, mask, r, limitAggs, next); + SchemaCompositeAggRowSet(Schema schema, List exts, BitSet mask, SearchResponse r, int limitAggs) { + super(exts, mask, r, limitAggs); this.schema = schema; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaDelegatingRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaDelegatingRowSet.java new file mode 100644 index 00000000000..ccfe1ad55f2 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaDelegatingRowSet.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search; + +import org.elasticsearch.xpack.sql.session.RowSet; +import org.elasticsearch.xpack.sql.session.SchemaRowSet; +import org.elasticsearch.xpack.sql.type.Schema; + +class SchemaDelegatingRowSet implements SchemaRowSet { + + private final Schema schema; + private final RowSet delegate; + + SchemaDelegatingRowSet(Schema schema, RowSet delegate) { + this.schema = schema; + this.delegate = delegate; + } + + @Override + public Schema schema() { + return schema; + } + + @Override + public boolean hasCurrentRow() { + return delegate.hasCurrentRow(); + } + + @Override + public boolean advanceRow() { + return delegate.advanceRow(); + } + + @Override + public int size() { + return delegate.size(); + } + + @Override + public void reset() { + delegate.reset(); + } + + @Override + public Object column(int index) { + return delegate.column(index); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java index 4e343c1e54f..868dd2dcfff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java @@ -76,6 +76,10 @@ public abstract class SourceGenerator { // set page size if (size != null) { int sz = container.limit() > 0 ? Math.min(container.limit(), size) : size; + // now take into account the the minimum page (if set) + // that is, return the multiple of the minimum page size closer to the set size + int minSize = container.minPageSize(); + sz = minSize > 0 ? (Math.max(sz / minSize, 1) * minSize) : sz; if (source.size() == -1) { source.size(sz); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java index 221662b79c1..bcbbce8e457 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java @@ -26,6 +26,7 @@ public final class BucketExtractors { entries.add(new Entry(BucketExtractor.class, MetricAggExtractor.NAME, MetricAggExtractor::new)); entries.add(new Entry(BucketExtractor.class, TopHitsAggExtractor.NAME, TopHitsAggExtractor::new)); entries.add(new Entry(BucketExtractor.class, ConstantExtractor.NAME, ConstantExtractor::new)); + entries.add(new Entry(BucketExtractor.class, PivotExtractor.NAME, PivotExtractor::new)); return entries; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/PivotExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/PivotExtractor.java new file mode 100644 index 00000000000..e7c1b8dfa30 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/PivotExtractor.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search.extractor; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; + +import java.io.IOException; +import java.util.Objects; + +public class PivotExtractor implements BucketExtractor { + + static final String NAME = "pv"; + + private final BucketExtractor groupExtractor; + private final BucketExtractor metricExtractor; + private final Object value; + + public PivotExtractor(BucketExtractor groupExtractor, BucketExtractor metricExtractor, Object value) { + this.groupExtractor = groupExtractor; + this.metricExtractor = metricExtractor; + this.value = value; + } + + PivotExtractor(StreamInput in) throws IOException { + groupExtractor = in.readNamedWriteable(BucketExtractor.class); + metricExtractor = in.readNamedWriteable(BucketExtractor.class); + value = in.readGenericValue(); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeNamedWriteable(groupExtractor); + out.writeNamedWriteable(metricExtractor); + out.writeGenericValue(value); + } + + @Override + public Object extract(Bucket bucket) { + if (Objects.equals(value, groupExtractor.extract(bucket))) { + return metricExtractor.extract(bucket); + } + return null; + } + + @Override + public int hashCode() { + return Objects.hash(groupExtractor, metricExtractor, value); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + PivotExtractor other = (PivotExtractor) obj; + return Objects.equals(groupExtractor, other.groupExtractor) + && Objects.equals(metricExtractor, other.metricExtractor) + && Objects.equals(value, other.value); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java index f4c8526bf47..4ebc030c281 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java @@ -108,7 +108,7 @@ public class Alias extends NamedExpression { Attribute attr = Expressions.attribute(c); if (attr != null) { - return attr.clone(source(), name(), qualifier, child.nullable(), id(), synthetic()); + return attr.clone(source(), name(), child.dataType(), qualifier, child.nullable(), id(), synthetic()); } else { // TODO: WE need to fix this fake Field diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java index 2f8b6633249..9f6b54badaf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java @@ -9,6 +9,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; import java.util.List; import java.util.Objects; @@ -87,19 +88,33 @@ public abstract class Attribute extends NamedExpression { } public Attribute withLocation(Source source) { - return Objects.equals(source(), source) ? this : clone(source, name(), qualifier(), nullable(), id(), synthetic()); + return Objects.equals(source(), source) ? this : clone(source, name(), dataType(), qualifier(), nullable(), id(), synthetic()); } public Attribute withQualifier(String qualifier) { - return Objects.equals(qualifier(), qualifier) ? this : clone(source(), name(), qualifier, nullable(), id(), synthetic()); + return Objects.equals(qualifier(), qualifier) ? this : clone(source(), name(), dataType(), qualifier, nullable(), id(), + synthetic()); + } + + public Attribute withName(String name) { + return Objects.equals(name(), name) ? this : clone(source(), name, dataType(), qualifier(), nullable(), id(), synthetic()); } public Attribute withNullability(Nullability nullability) { - return Objects.equals(nullable(), nullability) ? this : clone(source(), name(), qualifier(), nullability, id(), synthetic()); + return Objects.equals(nullable(), nullability) ? this : clone(source(), name(), dataType(), qualifier(), nullability, id(), + synthetic()); } - protected abstract Attribute clone(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, - boolean synthetic); + public Attribute withDataType(DataType type) { + return Objects.equals(dataType(), type) ? this : clone(source(), name(), type, qualifier(), nullable(), id(), synthetic()); + } + + public Attribute withId(ExpressionId id) { + return clone(source(), name(), dataType(), qualifier(), nullable(), id, synthetic()); + } + + protected abstract Attribute clone(Source source, String name, DataType type, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic); @Override public Attribute toAttribute() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java index 55f947a20ac..cbc622a615c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java @@ -25,6 +25,10 @@ public class ExpressionId { this.id = COUNTER.incrementAndGet(); } + public ExpressionId(long id) { + this.id = id; + } + @Override public int hashCode() { return Objects.hash(id); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index ca5e4b75756..0515d4f11b4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; import java.util.ArrayList; import java.util.Collection; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import static java.util.Collections.emptyList; @@ -134,6 +137,30 @@ public final class Expressions { return true; } + public static List onlyPrimitiveFieldAttributes(Collection attributes) { + List filtered = new ArrayList<>(); + // add only primitives + // but filter out multi fields (allow only the top-level value) + Set seenMultiFields = new LinkedHashSet<>(); + + for (Attribute a : attributes) { + if (!DataTypes.isUnsupported(a.dataType()) && a.dataType().isPrimitive()) { + if (a instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) a; + // skip nested fields and seen multi-fields + if (!fa.isNested() && !seenMultiFields.contains(fa.parent())) { + filtered.add(a); + seenMultiFields.add(a); + } + } else { + filtered.add(a); + } + } + } + + return filtered; + } + public static Pipe pipe(Expression e) { if (e instanceof NamedExpression) { return ((NamedExpression) e).asPipe(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java index cb86e2742b2..c0cd9a95eb6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java @@ -36,10 +36,15 @@ public class FieldAttribute extends TypedAttribute { public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field) { this(source, parent, name, field, null, Nullability.TRUE, null, false); } + + public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic) { + this(source, parent, name, field.getDataType(), field, qualifier, nullability, id, synthetic); + } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, String qualifier, + public FieldAttribute(Source source, FieldAttribute parent, String name, DataType type, EsField field, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - super(source, name, field.getDataType(), qualifier, nullability, id, synthetic); + super(source, name, type, qualifier, nullability, id, synthetic); this.path = parent != null ? parent.name() : StringUtils.EMPTY; this.parent = parent; this.field = field; @@ -57,7 +62,7 @@ public class FieldAttribute extends TypedAttribute { @Override protected NodeInfo info() { - return NodeInfo.create(this, FieldAttribute::new, parent, name(), field, qualifier(), nullable(), id(), synthetic()); + return NodeInfo.create(this, FieldAttribute::new, parent, name(), dataType(), field, qualifier(), nullable(), id(), synthetic()); } public FieldAttribute parent() { @@ -103,8 +108,8 @@ public class FieldAttribute extends TypedAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, - ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, DataType type, String qualifier, + Nullability nullability, ExpressionId id, boolean synthetic) { FieldAttribute qualifiedParent = parent != null ? (FieldAttribute) parent.withQualifier(qualifier) : null; return new FieldAttribute(source, qualifiedParent, name, field, qualifier, nullability, id, synthetic); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java index b4ccd7eb9ff..b22483bda36 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java @@ -77,7 +77,7 @@ public class Literal extends NamedExpression { @Override public Attribute toAttribute() { - return new LiteralAttribute(source(), name(), null, nullable(), id(), false, dataType, this); + return new LiteralAttribute(source(), name(), dataType, null, nullable(), id(), false, this); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java index 1305240b609..506f3f8a073 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java @@ -14,8 +14,8 @@ public class LiteralAttribute extends TypedAttribute { private final Literal literal; - public LiteralAttribute(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic, - DataType dataType, Literal literal) { + public LiteralAttribute(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, + boolean synthetic, Literal literal) { super(source, name, dataType, qualifier, nullability, id, synthetic); this.literal = literal; } @@ -23,13 +23,13 @@ public class LiteralAttribute extends TypedAttribute { @Override protected NodeInfo info() { return NodeInfo.create(this, LiteralAttribute::new, - name(), qualifier(), nullable(), id(), synthetic(), dataType(), literal); + name(), dataType(), qualifier(), nullable(), id(), synthetic(), literal); } @Override - protected LiteralAttribute clone(Source source, String name, String qualifier, Nullability nullability, + protected LiteralAttribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - return new LiteralAttribute(source, name, qualifier, nullability, id, synthetic, dataType(), literal); + return new LiteralAttribute(source, name, dataType, qualifier, nullability, id, synthetic, literal); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java index 476c69fea09..add7f702e04 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java @@ -65,7 +65,7 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java index bcd0aab16c6..7d93db3d862 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java @@ -41,9 +41,9 @@ public class ScoreAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - return new ScoreAttribute(source, name, dataType(), qualifier, nullability, id, synthetic); + return new ScoreAttribute(source, name, dataType, qualifier, nullability, id, synthetic); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java index 177f598dc9a..59b4f345a4a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java @@ -28,7 +28,7 @@ import static java.util.Collections.singletonList; public abstract class AggregateFunction extends Function { private final Expression field; - private final List parameters; + private final List parameters; private AggregateFunctionAttribute lazyAttribute; @@ -36,7 +36,7 @@ public abstract class AggregateFunction extends Function { this(source, field, emptyList()); } - protected AggregateFunction(Source source, Expression field, List parameters) { + protected AggregateFunction(Source source, Expression field, List parameters) { super(source, CollectionUtils.combine(singletonList(field), parameters)); this.field = field; this.parameters = parameters; @@ -46,7 +46,7 @@ public abstract class AggregateFunction extends Function { return field; } - public List parameters() { + public List parameters() { return parameters; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java index 96f072acda5..0bd0c9199bc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java @@ -60,10 +60,11 @@ public class AggregateFunctionAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, + boolean synthetic) { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) // that is the functionId is actually derived from the expression id to easily track it across contexts - return new AggregateFunctionAttribute(source, name, dataType(), qualifier, nullability, id, synthetic, functionId(), innerId, + return new AggregateFunctionAttribute(source, name, dataType, qualifier, nullability, id, synthetic, functionId(), innerId, propertyPath); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java index c33c893141b..2fed4cf3060 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java @@ -37,11 +37,11 @@ public class GroupingFunctionAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) // that is the functionId is actually derived from the expression id to easily track it across contexts - return new GroupingFunctionAttribute(source, name, dataType(), qualifier, nullability, id, synthetic, functionId()); + return new GroupingFunctionAttribute(source, name, dataType, qualifier, nullability, id, synthetic, functionId()); } public GroupingFunctionAttribute withFunctionId(String functionId, String propertyPath) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java index 6a0980c2690..67324ba466c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java @@ -66,9 +66,9 @@ public class ScalarFunctionAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - return new ScalarFunctionAttribute(source, name, dataType(), qualifier, nullability, + return new ScalarFunctionAttribute(source, name, dataType, qualifier, nullability, id, synthetic, functionId(), script, orderBy, pipe); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index 6689a33b162..e702c4ecdbb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.Order; +import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.Functions; @@ -72,6 +73,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.UnaryPlan; @@ -96,6 +98,7 @@ import java.util.Map.Entry; import java.util.Set; import java.util.function.Consumer; +import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Expressions.equalsAsAttribute; import static org.elasticsearch.xpack.sql.expression.Literal.FALSE; import static org.elasticsearch.xpack.sql.expression.Literal.TRUE; @@ -120,6 +123,9 @@ public class Optimizer extends RuleExecutor { @Override protected Iterable.Batch> batches() { + Batch pivot = new Batch("Pivot Rewrite", Limiter.ONCE, + new RewritePivot()); + Batch operators = new Batch("Operator Optimization", new PruneDuplicatesInGroupBy(), // combining @@ -170,9 +176,40 @@ public class Optimizer extends RuleExecutor { CleanAliases.INSTANCE, new SetAsOptimized()); - return Arrays.asList(operators, aggregate, local, label); + return Arrays.asList(pivot, operators, aggregate, local, label); } + static class RewritePivot extends OptimizerRule { + + @Override + protected LogicalPlan rule(Pivot plan) { + // 1. add the IN filter + List rawValues = new ArrayList<>(plan.values().size()); + for (NamedExpression namedExpression : plan.values()) { + // everything should have resolved to an alias + if (namedExpression instanceof Alias) { + rawValues.add(((Alias) namedExpression).child()); + } + // TODO: this should be removed when refactoring NamedExpression + else if (namedExpression instanceof Literal) { + rawValues.add(namedExpression); + } + // TODO: NamedExpression refactoring should remove this + else if (namedExpression.foldable()) { + rawValues.add(Literal.of(namedExpression.name(), namedExpression)); + } + // TODO: same as above + else { + UnresolvedAttribute attr = new UnresolvedAttribute(namedExpression.source(), namedExpression.name(), null, + "Unexpected alias"); + return new Pivot(plan.source(), plan.child(), plan.column(), singletonList(attr), plan.aggregates()); + } + } + Filter filter = new Filter(plan.source(), plan.child(), new In(plan.source(), plan.column(), rawValues)); + // 2. preserve the PIVOT + return new Pivot(plan.source(), filter, plan.column(), plan.values(), plan.aggregates()); + } + } static class PruneDuplicatesInGroupBy extends OptimizerRule { @@ -1038,7 +1075,14 @@ public class Optimizer extends RuleExecutor { Aggregate a = (Aggregate) child; return new Aggregate(a.source(), a.child(), a.groupings(), combineProjections(project.projections(), a.aggregates())); } - + // if the pivot custom columns are not used, convert the project + pivot into a GROUP BY/Aggregate + if (child instanceof Pivot) { + Pivot p = (Pivot) child; + if (project.outputSet().subsetOf(p.groupingSet())) { + return new Aggregate(p.source(), p.child(), new ArrayList<>(project.projections()), project.projections()); + } + } + // TODO: add rule for combining Agg/Pivot with underlying project return project; } @@ -1172,7 +1216,7 @@ public class Optimizer extends RuleExecutor { return Literal.of(in, null); } - } else if (e instanceof Alias == false + } else if (e instanceof Alias == false && e.nullable() == Nullability.TRUE && Expressions.anyMatch(e.children(), Expressions::isNull)) { return Literal.of(e, null); @@ -1976,7 +2020,8 @@ public class Optimizer extends RuleExecutor { } } else if (n.foldable()) { values.add(n.fold()); - } else { + } + else { // not everything is foldable, bail-out early return values; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index 429e572878f..9a663994ccf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -8,11 +8,13 @@ package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.TerminalNode; +import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.UnresolvedAlias; +import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.AliasedQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.AliasedRelationContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FromClauseContext; @@ -22,7 +24,10 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedValueExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.OrderByContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PivotArgsContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PivotClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryNoWithContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QuerySpecificationContext; @@ -39,20 +44,22 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation; import org.elasticsearch.xpack.sql.plan.logical.With; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.session.SingletonExecutable; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; +import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import static java.util.Collections.emptyList; -import static java.util.stream.Collectors.toList; abstract class LogicalPlanBuilder extends ExpressionBuilder { @@ -119,14 +126,8 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { query = new Filter(source(ctx), query, expression(ctx.where)); } - List selectTarget = emptyList(); - - // SELECT a, b, c ... - if (!ctx.selectItem().isEmpty()) { - selectTarget = expressions(ctx.selectItem()).stream() - .map(e -> (e instanceof NamedExpression) ? (NamedExpression) e : new UnresolvedAlias(e.source(), e)) - .collect(toList()); - } + List selectTarget = ctx.selectItems().isEmpty() ? emptyList() : visitList(ctx.selectItems().selectItem(), + NamedExpression.class); // GROUP BY GroupByContext groupByCtx = ctx.groupBy(); @@ -142,7 +143,7 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { query = new Aggregate(source(ctx.GROUP(), endSource), query, groupBy, selectTarget); } else if (!selectTarget.isEmpty()) { - query = new Project(source(ctx.selectItem(0)), query, selectTarget); + query = new Project(source(ctx.selectItems()), query, selectTarget); } // HAVING @@ -160,9 +161,37 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { public LogicalPlan visitFromClause(FromClauseContext ctx) { // if there are multiple FROM clauses, convert each pair in a inner join List plans = plans(ctx.relation()); - return plans.stream() + LogicalPlan plan = plans.stream() .reduce((left, right) -> new Join(source(ctx), left, right, Join.JoinType.IMPLICIT, null)) .get(); + + // PIVOT + if (ctx.pivotClause() != null) { + PivotClauseContext pivotClause = ctx.pivotClause(); + UnresolvedAttribute column = new UnresolvedAttribute(source(pivotClause.column), visitQualifiedName(pivotClause.column)); + List values = namedValues(pivotClause.aggs); + if (values.size() > 1) { + throw new ParsingException(source(pivotClause.aggs), "PIVOT currently supports only one aggregation, found [{}]", + values.size()); + } + plan = new Pivot(source(pivotClause), plan, column, namedValues(pivotClause.vals), namedValues(pivotClause.aggs)); + } + return plan; + } + + private List namedValues(PivotArgsContext args) { + if (args == null || args.isEmpty()) { + return emptyList(); + } + List values = new ArrayList<>(); + + for (NamedValueExpressionContext value : args.namedValueExpression()) { + Expression exp = expression(value.valueExpression()); + String alias = visitIdentifier(value.identifier()); + Source source = source(value); + values.add(alias != null ? new Alias(source, alias, exp) : new UnresolvedAlias(source, exp)); + } + return values; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index 9e8dd6cd6af..15531de7036 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -311,6 +311,18 @@ class SqlBaseBaseListener implements SqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterSelectItems(SqlBaseParser.SelectItemsContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitSelectItems(SqlBaseParser.SelectItemsContext ctx) { } /** * {@inheritDoc} * @@ -407,6 +419,42 @@ class SqlBaseBaseListener implements SqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterPivotClause(SqlBaseParser.PivotClauseContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitPivotClause(SqlBaseParser.PivotClauseContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterPivotArgs(SqlBaseParser.PivotArgsContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitPivotArgs(SqlBaseParser.PivotArgsContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index 199fb407698..dc05e66c1e7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -186,6 +186,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitSelectItems(SqlBaseParser.SelectItemsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -242,6 +249,27 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPivotClause(SqlBaseParser.PivotClauseContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPivotArgs(SqlBaseParser.PivotArgsContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index de8afac1526..cba3c1ee9a3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -22,21 +22,22 @@ class SqlBaseLexer extends Lexer { COLUMNS=18, CONVERT=19, CURRENT_DATE=20, CURRENT_TIME=21, CURRENT_TIMESTAMP=22, DAY=23, DAYS=24, DEBUG=25, DESC=26, DESCRIBE=27, DISTINCT=28, ELSE=29, END=30, ESCAPE=31, EXECUTABLE=32, EXISTS=33, EXPLAIN=34, EXTRACT=35, FALSE=36, - FIRST=37, FORMAT=38, FROM=39, FROZEN=40, FULL=41, FUNCTIONS=42, GRAPHVIZ=43, - GROUP=44, HAVING=45, HOUR=46, HOURS=47, IN=48, INCLUDE=49, INNER=50, INTERVAL=51, - IS=52, JOIN=53, LAST=54, LEFT=55, LIKE=56, LIMIT=57, MAPPED=58, MATCH=59, - MINUTE=60, MINUTES=61, MONTH=62, MONTHS=63, NATURAL=64, NOT=65, NULL=66, - NULLS=67, ON=68, OPTIMIZED=69, OR=70, ORDER=71, OUTER=72, PARSED=73, PHYSICAL=74, - PLAN=75, RIGHT=76, RLIKE=77, QUERY=78, SCHEMAS=79, SECOND=80, SECONDS=81, - SELECT=82, SHOW=83, SYS=84, TABLE=85, TABLES=86, TEXT=87, THEN=88, TRUE=89, - TO=90, TYPE=91, TYPES=92, USING=93, VERIFY=94, WHEN=95, WHERE=96, WITH=97, - YEAR=98, YEARS=99, ESCAPE_ESC=100, FUNCTION_ESC=101, LIMIT_ESC=102, DATE_ESC=103, - TIME_ESC=104, TIMESTAMP_ESC=105, GUID_ESC=106, ESC_END=107, EQ=108, NULLEQ=109, - NEQ=110, LT=111, LTE=112, GT=113, GTE=114, PLUS=115, MINUS=116, ASTERISK=117, - SLASH=118, PERCENT=119, CAST_OP=120, CONCAT=121, DOT=122, PARAM=123, STRING=124, - INTEGER_VALUE=125, DECIMAL_VALUE=126, IDENTIFIER=127, DIGIT_IDENTIFIER=128, - TABLE_IDENTIFIER=129, QUOTED_IDENTIFIER=130, BACKQUOTED_IDENTIFIER=131, - SIMPLE_COMMENT=132, BRACKETED_COMMENT=133, WS=134, UNRECOGNIZED=135; + FIRST=37, FOR=38, FORMAT=39, FROM=40, FROZEN=41, FULL=42, FUNCTIONS=43, + GRAPHVIZ=44, GROUP=45, HAVING=46, HOUR=47, HOURS=48, IN=49, INCLUDE=50, + INNER=51, INTERVAL=52, IS=53, JOIN=54, LAST=55, LEFT=56, LIKE=57, LIMIT=58, + MAPPED=59, MATCH=60, MINUTE=61, MINUTES=62, MONTH=63, MONTHS=64, NATURAL=65, + NOT=66, NULL=67, NULLS=68, ON=69, OPTIMIZED=70, OR=71, ORDER=72, OUTER=73, + PARSED=74, PHYSICAL=75, PIVOT=76, PLAN=77, RIGHT=78, RLIKE=79, QUERY=80, + SCHEMAS=81, SECOND=82, SECONDS=83, SELECT=84, SHOW=85, SYS=86, TABLE=87, + TABLES=88, TEXT=89, THEN=90, TRUE=91, TO=92, TYPE=93, TYPES=94, USING=95, + VERIFY=96, WHEN=97, WHERE=98, WITH=99, YEAR=100, YEARS=101, ESCAPE_ESC=102, + FUNCTION_ESC=103, LIMIT_ESC=104, DATE_ESC=105, TIME_ESC=106, TIMESTAMP_ESC=107, + GUID_ESC=108, ESC_END=109, EQ=110, NULLEQ=111, NEQ=112, LT=113, LTE=114, + GT=115, GTE=116, PLUS=117, MINUS=118, ASTERISK=119, SLASH=120, PERCENT=121, + CAST_OP=122, CONCAT=123, DOT=124, PARAM=125, STRING=126, INTEGER_VALUE=127, + DECIMAL_VALUE=128, IDENTIFIER=129, DIGIT_IDENTIFIER=130, TABLE_IDENTIFIER=131, + QUOTED_IDENTIFIER=132, BACKQUOTED_IDENTIFIER=133, SIMPLE_COMMENT=134, + BRACKETED_COMMENT=135, WS=136, UNRECOGNIZED=137; public static String[] modeNames = { "DEFAULT_MODE" }; @@ -46,21 +47,22 @@ class SqlBaseLexer extends Lexer { "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", - "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", - "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", - "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", - "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", - "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", - "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", - "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", - "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", - "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", - "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", - "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", + "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", + "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", + "WS", "UNRECOGNIZED" }; private static final String[] _LITERAL_NAMES = { @@ -69,40 +71,40 @@ class SqlBaseLexer extends Lexer { "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", - "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", + "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FOR'", "'FORMAT'", "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", - "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", - "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", - "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", - "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", - "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", "'{TS'", "'{GUID'", - "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", - "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" + "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", "'RIGHT'", "'RLIKE'", + "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", + "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", + "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", + "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", + "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", + "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", - "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", - "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", - "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", - "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", - "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", - "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", - "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", - "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", - "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", - "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", - "WS", "UNRECOGNIZED" + "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", + "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", + "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -159,7 +161,7 @@ class SqlBaseLexer extends Lexer { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u0089\u0471\b\1\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u008b\u047f\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -175,384 +177,391 @@ class SqlBaseLexer extends Lexer { "\4w\tw\4x\tx\4y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t"+ "\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084\t\u0084"+ "\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087\4\u0088\t\u0088\4\u0089"+ - "\t\u0089\4\u008a\t\u008a\4\u008b\t\u008b\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3"+ - "\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b"+ - "\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3"+ - "\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3"+ - "\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3"+ - "\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3"+ - "\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3"+ - "\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3"+ - "\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3"+ - "\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3"+ - "\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3"+ - "\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3"+ - "\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3"+ - "\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3"+ - "!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3#\3$\3"+ - "$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3"+ - "\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3"+ - "+\3+\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3"+ - "-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60\3\60\3\60\3\60\3"+ - "\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3"+ - "\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3"+ - "\65\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\3"+ - "9\39\39\39\39\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3"+ - "<\3=\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3@\3"+ - "@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3C\3C\3C\3C\3C\3"+ - "D\3D\3D\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G\3H\3"+ - "H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3"+ - "K\3K\3K\3K\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3N\3O\3O\3"+ - "O\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3"+ - "R\3R\3R\3R\3S\3S\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3U\3U\3U\3U\3V\3V\3V\3"+ - "V\3V\3V\3W\3W\3W\3W\3W\3W\3W\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3"+ - "Z\3Z\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3]\3]\3]\3]\3]\3]\3^\3^\3^\3^\3^\3^"+ - "\3_\3_\3_\3_\3_\3_\3_\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a\3a\3b\3b\3b\3b\3b"+ - "\3c\3c\3c\3c\3c\3d\3d\3d\3d\3d\3d\3e\3e\3e\3e\3e\3e\3e\3e\3f\3f\3f\3f"+ - "\3g\3g\3g\3g\3g\3g\3g\3h\3h\3h\3i\3i\3i\3j\3j\3j\3j\3k\3k\3k\3k\3k\3k"+ - "\3l\3l\3m\3m\3n\3n\3n\3n\3o\3o\3o\3o\5o\u03af\no\3p\3p\3q\3q\3q\3r\3r"+ - "\3s\3s\3s\3t\3t\3u\3u\3v\3v\3w\3w\3x\3x\3y\3y\3y\3z\3z\3z\3{\3{\3|\3|"+ - "\3}\3}\3}\3}\7}\u03d3\n}\f}\16}\u03d6\13}\3}\3}\3~\6~\u03db\n~\r~\16~"+ - "\u03dc\3\177\6\177\u03e0\n\177\r\177\16\177\u03e1\3\177\3\177\7\177\u03e6"+ - "\n\177\f\177\16\177\u03e9\13\177\3\177\3\177\6\177\u03ed\n\177\r\177\16"+ - "\177\u03ee\3\177\6\177\u03f2\n\177\r\177\16\177\u03f3\3\177\3\177\7\177"+ - "\u03f8\n\177\f\177\16\177\u03fb\13\177\5\177\u03fd\n\177\3\177\3\177\3"+ - "\177\3\177\6\177\u0403\n\177\r\177\16\177\u0404\3\177\3\177\5\177\u0409"+ - "\n\177\3\u0080\3\u0080\5\u0080\u040d\n\u0080\3\u0080\3\u0080\3\u0080\7"+ - "\u0080\u0412\n\u0080\f\u0080\16\u0080\u0415\13\u0080\3\u0081\3\u0081\3"+ - "\u0081\3\u0081\6\u0081\u041b\n\u0081\r\u0081\16\u0081\u041c\3\u0082\3"+ - "\u0082\3\u0082\6\u0082\u0422\n\u0082\r\u0082\16\u0082\u0423\3\u0083\3"+ - "\u0083\3\u0083\3\u0083\7\u0083\u042a\n\u0083\f\u0083\16\u0083\u042d\13"+ - "\u0083\3\u0083\3\u0083\3\u0084\3\u0084\3\u0084\3\u0084\7\u0084\u0435\n"+ - "\u0084\f\u0084\16\u0084\u0438\13\u0084\3\u0084\3\u0084\3\u0085\3\u0085"+ - "\5\u0085\u043e\n\u0085\3\u0085\6\u0085\u0441\n\u0085\r\u0085\16\u0085"+ - "\u0442\3\u0086\3\u0086\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088\3\u0088"+ - "\7\u0088\u044d\n\u0088\f\u0088\16\u0088\u0450\13\u0088\3\u0088\5\u0088"+ - "\u0453\n\u0088\3\u0088\5\u0088\u0456\n\u0088\3\u0088\3\u0088\3\u0089\3"+ - "\u0089\3\u0089\3\u0089\3\u0089\7\u0089\u045f\n\u0089\f\u0089\16\u0089"+ - "\u0462\13\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u008a\6\u008a"+ - "\u046a\n\u008a\r\u008a\16\u008a\u046b\3\u008a\3\u008a\3\u008b\3\u008b"+ - "\3\u0460\2\u008c\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31"+ - "\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65"+ - "\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64"+ - "g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089"+ - "F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009d"+ - "P\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1"+ - "Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5"+ - "d\u00c7e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9"+ - "n\u00dbo\u00ddp\u00dfq\u00e1r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00ed"+ - "x\u00efy\u00f1z\u00f3{\u00f5|\u00f7}\u00f9~\u00fb\177\u00fd\u0080\u00ff"+ - "\u0081\u0101\u0082\u0103\u0083\u0105\u0084\u0107\u0085\u0109\2\u010b\2"+ - "\u010d\2\u010f\u0086\u0111\u0087\u0113\u0088\u0115\u0089\3\2\13\3\2))"+ - "\4\2BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17"+ - "\17\"\"\u0491\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2"+ - "\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2"+ - "\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2"+ - "\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2"+ - "\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3"+ - "\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2"+ - "\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2"+ - "S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3"+ - "\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2"+ - "\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2"+ - "y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083"+ - "\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2"+ - "\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095"+ - "\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2"+ - "\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7"+ - "\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2"+ - "\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9"+ - "\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2"+ - "\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb"+ - "\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3\3\2\2"+ - "\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\2\u00db\3\2\2\2\2\u00dd"+ - "\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5\3\2\2"+ - "\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2\2\2\u00ef"+ - "\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5\3\2\2\2\2\u00f7\3\2\2"+ - "\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2\2\2\u00ff\3\2\2\2\2\u0101"+ - "\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2\2\2\u0107\3\2\2\2\2\u010f\3\2\2"+ - "\2\2\u0111\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\3\u0117\3\2\2\2\5\u0119"+ - "\3\2\2\2\7\u011b\3\2\2\2\t\u011d\3\2\2\2\13\u011f\3\2\2\2\r\u0123\3\2"+ - "\2\2\17\u012b\3\2\2\2\21\u0134\3\2\2\2\23\u0138\3\2\2\2\25\u013c\3\2\2"+ - "\2\27\u013f\3\2\2\2\31\u0143\3\2\2\2\33\u014b\3\2\2\2\35\u014e\3\2\2\2"+ - "\37\u0153\3\2\2\2!\u0158\3\2\2\2#\u0160\3\2\2\2%\u0169\3\2\2\2\'\u0171"+ - "\3\2\2\2)\u0179\3\2\2\2+\u0186\3\2\2\2-\u0193\3\2\2\2/\u01a5\3\2\2\2\61"+ - "\u01a9\3\2\2\2\63\u01ae\3\2\2\2\65\u01b4\3\2\2\2\67\u01b9\3\2\2\29\u01c2"+ - "\3\2\2\2;\u01cb\3\2\2\2=\u01d0\3\2\2\2?\u01d4\3\2\2\2A\u01db\3\2\2\2C"+ - "\u01e6\3\2\2\2E\u01ed\3\2\2\2G\u01f5\3\2\2\2I\u01fd\3\2\2\2K\u0203\3\2"+ - "\2\2M\u0209\3\2\2\2O\u0210\3\2\2\2Q\u0215\3\2\2\2S\u021c\3\2\2\2U\u0221"+ - "\3\2\2\2W\u022b\3\2\2\2Y\u0234\3\2\2\2[\u023a\3\2\2\2]\u0241\3\2\2\2_"+ - "\u0246\3\2\2\2a\u024c\3\2\2\2c\u024f\3\2\2\2e\u0257\3\2\2\2g\u025d\3\2"+ - "\2\2i\u0266\3\2\2\2k\u0269\3\2\2\2m\u026e\3\2\2\2o\u0273\3\2\2\2q\u0278"+ - "\3\2\2\2s\u027d\3\2\2\2u\u0283\3\2\2\2w\u028a\3\2\2\2y\u0290\3\2\2\2{"+ - "\u0297\3\2\2\2}\u029f\3\2\2\2\177\u02a5\3\2\2\2\u0081\u02ac\3\2\2\2\u0083"+ - "\u02b4\3\2\2\2\u0085\u02b8\3\2\2\2\u0087\u02bd\3\2\2\2\u0089\u02c3\3\2"+ - "\2\2\u008b\u02c6\3\2\2\2\u008d\u02d0\3\2\2\2\u008f\u02d3\3\2\2\2\u0091"+ - "\u02d9\3\2\2\2\u0093\u02df\3\2\2\2\u0095\u02e6\3\2\2\2\u0097\u02ef\3\2"+ - "\2\2\u0099\u02f4\3\2\2\2\u009b\u02fa\3\2\2\2\u009d\u0300\3\2\2\2\u009f"+ - "\u0306\3\2\2\2\u00a1\u030e\3\2\2\2\u00a3\u0315\3\2\2\2\u00a5\u031d\3\2"+ - "\2\2\u00a7\u0324\3\2\2\2\u00a9\u0329\3\2\2\2\u00ab\u032d\3\2\2\2\u00ad"+ - "\u0333\3\2\2\2\u00af\u033a\3\2\2\2\u00b1\u033f\3\2\2\2\u00b3\u0344\3\2"+ - "\2\2\u00b5\u0349\3\2\2\2\u00b7\u034c\3\2\2\2\u00b9\u0351\3\2\2\2\u00bb"+ - "\u0357\3\2\2\2\u00bd\u035d\3\2\2\2\u00bf\u0364\3\2\2\2\u00c1\u0369\3\2"+ - "\2\2\u00c3\u036f\3\2\2\2\u00c5\u0374\3\2\2\2\u00c7\u0379\3\2\2\2\u00c9"+ - "\u037f\3\2\2\2\u00cb\u0387\3\2\2\2\u00cd\u038b\3\2\2\2\u00cf\u0392\3\2"+ - "\2\2\u00d1\u0395\3\2\2\2\u00d3\u0398\3\2\2\2\u00d5\u039c\3\2\2\2\u00d7"+ - "\u03a2\3\2\2\2\u00d9\u03a4\3\2\2\2\u00db\u03a6\3\2\2\2\u00dd\u03ae\3\2"+ - "\2\2\u00df\u03b0\3\2\2\2\u00e1\u03b2\3\2\2\2\u00e3\u03b5\3\2\2\2\u00e5"+ - "\u03b7\3\2\2\2\u00e7\u03ba\3\2\2\2\u00e9\u03bc\3\2\2\2\u00eb\u03be\3\2"+ - "\2\2\u00ed\u03c0\3\2\2\2\u00ef\u03c2\3\2\2\2\u00f1\u03c4\3\2\2\2\u00f3"+ - "\u03c7\3\2\2\2\u00f5\u03ca\3\2\2\2\u00f7\u03cc\3\2\2\2\u00f9\u03ce\3\2"+ - "\2\2\u00fb\u03da\3\2\2\2\u00fd\u0408\3\2\2\2\u00ff\u040c\3\2\2\2\u0101"+ - "\u0416\3\2\2\2\u0103\u0421\3\2\2\2\u0105\u0425\3\2\2\2\u0107\u0430\3\2"+ - "\2\2\u0109\u043b\3\2\2\2\u010b\u0444\3\2\2\2\u010d\u0446\3\2\2\2\u010f"+ - "\u0448\3\2\2\2\u0111\u0459\3\2\2\2\u0113\u0469\3\2\2\2\u0115\u046f\3\2"+ - "\2\2\u0117\u0118\7*\2\2\u0118\4\3\2\2\2\u0119\u011a\7+\2\2\u011a\6\3\2"+ - "\2\2\u011b\u011c\7.\2\2\u011c\b\3\2\2\2\u011d\u011e\7<\2\2\u011e\n\3\2"+ - "\2\2\u011f\u0120\7C\2\2\u0120\u0121\7N\2\2\u0121\u0122\7N\2\2\u0122\f"+ - "\3\2\2\2\u0123\u0124\7C\2\2\u0124\u0125\7P\2\2\u0125\u0126\7C\2\2\u0126"+ - "\u0127\7N\2\2\u0127\u0128\7[\2\2\u0128\u0129\7\\\2\2\u0129\u012a\7G\2"+ - "\2\u012a\16\3\2\2\2\u012b\u012c\7C\2\2\u012c\u012d\7P\2\2\u012d\u012e"+ - "\7C\2\2\u012e\u012f\7N\2\2\u012f\u0130\7[\2\2\u0130\u0131\7\\\2\2\u0131"+ - "\u0132\7G\2\2\u0132\u0133\7F\2\2\u0133\20\3\2\2\2\u0134\u0135\7C\2\2\u0135"+ - "\u0136\7P\2\2\u0136\u0137\7F\2\2\u0137\22\3\2\2\2\u0138\u0139\7C\2\2\u0139"+ - "\u013a\7P\2\2\u013a\u013b\7[\2\2\u013b\24\3\2\2\2\u013c\u013d\7C\2\2\u013d"+ - "\u013e\7U\2\2\u013e\26\3\2\2\2\u013f\u0140\7C\2\2\u0140\u0141\7U\2\2\u0141"+ - "\u0142\7E\2\2\u0142\30\3\2\2\2\u0143\u0144\7D\2\2\u0144\u0145\7G\2\2\u0145"+ - "\u0146\7V\2\2\u0146\u0147\7Y\2\2\u0147\u0148\7G\2\2\u0148\u0149\7G\2\2"+ - "\u0149\u014a\7P\2\2\u014a\32\3\2\2\2\u014b\u014c\7D\2\2\u014c\u014d\7"+ - "[\2\2\u014d\34\3\2\2\2\u014e\u014f\7E\2\2\u014f\u0150\7C\2\2\u0150\u0151"+ - "\7U\2\2\u0151\u0152\7G\2\2\u0152\36\3\2\2\2\u0153\u0154\7E\2\2\u0154\u0155"+ - "\7C\2\2\u0155\u0156\7U\2\2\u0156\u0157\7V\2\2\u0157 \3\2\2\2\u0158\u0159"+ - "\7E\2\2\u0159\u015a\7C\2\2\u015a\u015b\7V\2\2\u015b\u015c\7C\2\2\u015c"+ - "\u015d\7N\2\2\u015d\u015e\7Q\2\2\u015e\u015f\7I\2\2\u015f\"\3\2\2\2\u0160"+ - "\u0161\7E\2\2\u0161\u0162\7C\2\2\u0162\u0163\7V\2\2\u0163\u0164\7C\2\2"+ - "\u0164\u0165\7N\2\2\u0165\u0166\7Q\2\2\u0166\u0167\7I\2\2\u0167\u0168"+ - "\7U\2\2\u0168$\3\2\2\2\u0169\u016a\7E\2\2\u016a\u016b\7Q\2\2\u016b\u016c"+ - "\7N\2\2\u016c\u016d\7W\2\2\u016d\u016e\7O\2\2\u016e\u016f\7P\2\2\u016f"+ - "\u0170\7U\2\2\u0170&\3\2\2\2\u0171\u0172\7E\2\2\u0172\u0173\7Q\2\2\u0173"+ - "\u0174\7P\2\2\u0174\u0175\7X\2\2\u0175\u0176\7G\2\2\u0176\u0177\7T\2\2"+ - "\u0177\u0178\7V\2\2\u0178(\3\2\2\2\u0179\u017a\7E\2\2\u017a\u017b\7W\2"+ - "\2\u017b\u017c\7T\2\2\u017c\u017d\7T\2\2\u017d\u017e\7G\2\2\u017e\u017f"+ - "\7P\2\2\u017f\u0180\7V\2\2\u0180\u0181\7a\2\2\u0181\u0182\7F\2\2\u0182"+ - "\u0183\7C\2\2\u0183\u0184\7V\2\2\u0184\u0185\7G\2\2\u0185*\3\2\2\2\u0186"+ - "\u0187\7E\2\2\u0187\u0188\7W\2\2\u0188\u0189\7T\2\2\u0189\u018a\7T\2\2"+ - "\u018a\u018b\7G\2\2\u018b\u018c\7P\2\2\u018c\u018d\7V\2\2\u018d\u018e"+ - "\7a\2\2\u018e\u018f\7V\2\2\u018f\u0190\7K\2\2\u0190\u0191\7O\2\2\u0191"+ - "\u0192\7G\2\2\u0192,\3\2\2\2\u0193\u0194\7E\2\2\u0194\u0195\7W\2\2\u0195"+ - "\u0196\7T\2\2\u0196\u0197\7T\2\2\u0197\u0198\7G\2\2\u0198\u0199\7P\2\2"+ - "\u0199\u019a\7V\2\2\u019a\u019b\7a\2\2\u019b\u019c\7V\2\2\u019c\u019d"+ - "\7K\2\2\u019d\u019e\7O\2\2\u019e\u019f\7G\2\2\u019f\u01a0\7U\2\2\u01a0"+ - "\u01a1\7V\2\2\u01a1\u01a2\7C\2\2\u01a2\u01a3\7O\2\2\u01a3\u01a4\7R\2\2"+ - "\u01a4.\3\2\2\2\u01a5\u01a6\7F\2\2\u01a6\u01a7\7C\2\2\u01a7\u01a8\7[\2"+ - "\2\u01a8\60\3\2\2\2\u01a9\u01aa\7F\2\2\u01aa\u01ab\7C\2\2\u01ab\u01ac"+ - "\7[\2\2\u01ac\u01ad\7U\2\2\u01ad\62\3\2\2\2\u01ae\u01af\7F\2\2\u01af\u01b0"+ - "\7G\2\2\u01b0\u01b1\7D\2\2\u01b1\u01b2\7W\2\2\u01b2\u01b3\7I\2\2\u01b3"+ - "\64\3\2\2\2\u01b4\u01b5\7F\2\2\u01b5\u01b6\7G\2\2\u01b6\u01b7\7U\2\2\u01b7"+ - "\u01b8\7E\2\2\u01b8\66\3\2\2\2\u01b9\u01ba\7F\2\2\u01ba\u01bb\7G\2\2\u01bb"+ - "\u01bc\7U\2\2\u01bc\u01bd\7E\2\2\u01bd\u01be\7T\2\2\u01be\u01bf\7K\2\2"+ - "\u01bf\u01c0\7D\2\2\u01c0\u01c1\7G\2\2\u01c18\3\2\2\2\u01c2\u01c3\7F\2"+ - "\2\u01c3\u01c4\7K\2\2\u01c4\u01c5\7U\2\2\u01c5\u01c6\7V\2\2\u01c6\u01c7"+ - "\7K\2\2\u01c7\u01c8\7P\2\2\u01c8\u01c9\7E\2\2\u01c9\u01ca\7V\2\2\u01ca"+ - ":\3\2\2\2\u01cb\u01cc\7G\2\2\u01cc\u01cd\7N\2\2\u01cd\u01ce\7U\2\2\u01ce"+ - "\u01cf\7G\2\2\u01cf<\3\2\2\2\u01d0\u01d1\7G\2\2\u01d1\u01d2\7P\2\2\u01d2"+ - "\u01d3\7F\2\2\u01d3>\3\2\2\2\u01d4\u01d5\7G\2\2\u01d5\u01d6\7U\2\2\u01d6"+ - "\u01d7\7E\2\2\u01d7\u01d8\7C\2\2\u01d8\u01d9\7R\2\2\u01d9\u01da\7G\2\2"+ - "\u01da@\3\2\2\2\u01db\u01dc\7G\2\2\u01dc\u01dd\7Z\2\2\u01dd\u01de\7G\2"+ - "\2\u01de\u01df\7E\2\2\u01df\u01e0\7W\2\2\u01e0\u01e1\7V\2\2\u01e1\u01e2"+ - "\7C\2\2\u01e2\u01e3\7D\2\2\u01e3\u01e4\7N\2\2\u01e4\u01e5\7G\2\2\u01e5"+ - "B\3\2\2\2\u01e6\u01e7\7G\2\2\u01e7\u01e8\7Z\2\2\u01e8\u01e9\7K\2\2\u01e9"+ - "\u01ea\7U\2\2\u01ea\u01eb\7V\2\2\u01eb\u01ec\7U\2\2\u01ecD\3\2\2\2\u01ed"+ - "\u01ee\7G\2\2\u01ee\u01ef\7Z\2\2\u01ef\u01f0\7R\2\2\u01f0\u01f1\7N\2\2"+ - "\u01f1\u01f2\7C\2\2\u01f2\u01f3\7K\2\2\u01f3\u01f4\7P\2\2\u01f4F\3\2\2"+ - "\2\u01f5\u01f6\7G\2\2\u01f6\u01f7\7Z\2\2\u01f7\u01f8\7V\2\2\u01f8\u01f9"+ - "\7T\2\2\u01f9\u01fa\7C\2\2\u01fa\u01fb\7E\2\2\u01fb\u01fc\7V\2\2\u01fc"+ - "H\3\2\2\2\u01fd\u01fe\7H\2\2\u01fe\u01ff\7C\2\2\u01ff\u0200\7N\2\2\u0200"+ - "\u0201\7U\2\2\u0201\u0202\7G\2\2\u0202J\3\2\2\2\u0203\u0204\7H\2\2\u0204"+ - "\u0205\7K\2\2\u0205\u0206\7T\2\2\u0206\u0207\7U\2\2\u0207\u0208\7V\2\2"+ - "\u0208L\3\2\2\2\u0209\u020a\7H\2\2\u020a\u020b\7Q\2\2\u020b\u020c\7T\2"+ - "\2\u020c\u020d\7O\2\2\u020d\u020e\7C\2\2\u020e\u020f\7V\2\2\u020fN\3\2"+ - "\2\2\u0210\u0211\7H\2\2\u0211\u0212\7T\2\2\u0212\u0213\7Q\2\2\u0213\u0214"+ - "\7O\2\2\u0214P\3\2\2\2\u0215\u0216\7H\2\2\u0216\u0217\7T\2\2\u0217\u0218"+ - "\7Q\2\2\u0218\u0219\7\\\2\2\u0219\u021a\7G\2\2\u021a\u021b\7P\2\2\u021b"+ - "R\3\2\2\2\u021c\u021d\7H\2\2\u021d\u021e\7W\2\2\u021e\u021f\7N\2\2\u021f"+ - "\u0220\7N\2\2\u0220T\3\2\2\2\u0221\u0222\7H\2\2\u0222\u0223\7W\2\2\u0223"+ - "\u0224\7P\2\2\u0224\u0225\7E\2\2\u0225\u0226\7V\2\2\u0226\u0227\7K\2\2"+ - "\u0227\u0228\7Q\2\2\u0228\u0229\7P\2\2\u0229\u022a\7U\2\2\u022aV\3\2\2"+ - "\2\u022b\u022c\7I\2\2\u022c\u022d\7T\2\2\u022d\u022e\7C\2\2\u022e\u022f"+ - "\7R\2\2\u022f\u0230\7J\2\2\u0230\u0231\7X\2\2\u0231\u0232\7K\2\2\u0232"+ - "\u0233\7\\\2\2\u0233X\3\2\2\2\u0234\u0235\7I\2\2\u0235\u0236\7T\2\2\u0236"+ - "\u0237\7Q\2\2\u0237\u0238\7W\2\2\u0238\u0239\7R\2\2\u0239Z\3\2\2\2\u023a"+ - "\u023b\7J\2\2\u023b\u023c\7C\2\2\u023c\u023d\7X\2\2\u023d\u023e\7K\2\2"+ - "\u023e\u023f\7P\2\2\u023f\u0240\7I\2\2\u0240\\\3\2\2\2\u0241\u0242\7J"+ - "\2\2\u0242\u0243\7Q\2\2\u0243\u0244\7W\2\2\u0244\u0245\7T\2\2\u0245^\3"+ - "\2\2\2\u0246\u0247\7J\2\2\u0247\u0248\7Q\2\2\u0248\u0249\7W\2\2\u0249"+ - "\u024a\7T\2\2\u024a\u024b\7U\2\2\u024b`\3\2\2\2\u024c\u024d\7K\2\2\u024d"+ - "\u024e\7P\2\2\u024eb\3\2\2\2\u024f\u0250\7K\2\2\u0250\u0251\7P\2\2\u0251"+ - "\u0252\7E\2\2\u0252\u0253\7N\2\2\u0253\u0254\7W\2\2\u0254\u0255\7F\2\2"+ - "\u0255\u0256\7G\2\2\u0256d\3\2\2\2\u0257\u0258\7K\2\2\u0258\u0259\7P\2"+ - "\2\u0259\u025a\7P\2\2\u025a\u025b\7G\2\2\u025b\u025c\7T\2\2\u025cf\3\2"+ - "\2\2\u025d\u025e\7K\2\2\u025e\u025f\7P\2\2\u025f\u0260\7V\2\2\u0260\u0261"+ - "\7G\2\2\u0261\u0262\7T\2\2\u0262\u0263\7X\2\2\u0263\u0264\7C\2\2\u0264"+ - "\u0265\7N\2\2\u0265h\3\2\2\2\u0266\u0267\7K\2\2\u0267\u0268\7U\2\2\u0268"+ - "j\3\2\2\2\u0269\u026a\7L\2\2\u026a\u026b\7Q\2\2\u026b\u026c\7K\2\2\u026c"+ - "\u026d\7P\2\2\u026dl\3\2\2\2\u026e\u026f\7N\2\2\u026f\u0270\7C\2\2\u0270"+ - "\u0271\7U\2\2\u0271\u0272\7V\2\2\u0272n\3\2\2\2\u0273\u0274\7N\2\2\u0274"+ - "\u0275\7G\2\2\u0275\u0276\7H\2\2\u0276\u0277\7V\2\2\u0277p\3\2\2\2\u0278"+ - "\u0279\7N\2\2\u0279\u027a\7K\2\2\u027a\u027b\7M\2\2\u027b\u027c\7G\2\2"+ - "\u027cr\3\2\2\2\u027d\u027e\7N\2\2\u027e\u027f\7K\2\2\u027f\u0280\7O\2"+ - "\2\u0280\u0281\7K\2\2\u0281\u0282\7V\2\2\u0282t\3\2\2\2\u0283\u0284\7"+ - "O\2\2\u0284\u0285\7C\2\2\u0285\u0286\7R\2\2\u0286\u0287\7R\2\2\u0287\u0288"+ - "\7G\2\2\u0288\u0289\7F\2\2\u0289v\3\2\2\2\u028a\u028b\7O\2\2\u028b\u028c"+ - "\7C\2\2\u028c\u028d\7V\2\2\u028d\u028e\7E\2\2\u028e\u028f\7J\2\2\u028f"+ - "x\3\2\2\2\u0290\u0291\7O\2\2\u0291\u0292\7K\2\2\u0292\u0293\7P\2\2\u0293"+ - "\u0294\7W\2\2\u0294\u0295\7V\2\2\u0295\u0296\7G\2\2\u0296z\3\2\2\2\u0297"+ - "\u0298\7O\2\2\u0298\u0299\7K\2\2\u0299\u029a\7P\2\2\u029a\u029b\7W\2\2"+ - "\u029b\u029c\7V\2\2\u029c\u029d\7G\2\2\u029d\u029e\7U\2\2\u029e|\3\2\2"+ - "\2\u029f\u02a0\7O\2\2\u02a0\u02a1\7Q\2\2\u02a1\u02a2\7P\2\2\u02a2\u02a3"+ - "\7V\2\2\u02a3\u02a4\7J\2\2\u02a4~\3\2\2\2\u02a5\u02a6\7O\2\2\u02a6\u02a7"+ - "\7Q\2\2\u02a7\u02a8\7P\2\2\u02a8\u02a9\7V\2\2\u02a9\u02aa\7J\2\2\u02aa"+ - "\u02ab\7U\2\2\u02ab\u0080\3\2\2\2\u02ac\u02ad\7P\2\2\u02ad\u02ae\7C\2"+ - "\2\u02ae\u02af\7V\2\2\u02af\u02b0\7W\2\2\u02b0\u02b1\7T\2\2\u02b1\u02b2"+ - "\7C\2\2\u02b2\u02b3\7N\2\2\u02b3\u0082\3\2\2\2\u02b4\u02b5\7P\2\2\u02b5"+ - "\u02b6\7Q\2\2\u02b6\u02b7\7V\2\2\u02b7\u0084\3\2\2\2\u02b8\u02b9\7P\2"+ - "\2\u02b9\u02ba\7W\2\2\u02ba\u02bb\7N\2\2\u02bb\u02bc\7N\2\2\u02bc\u0086"+ - "\3\2\2\2\u02bd\u02be\7P\2\2\u02be\u02bf\7W\2\2\u02bf\u02c0\7N\2\2\u02c0"+ - "\u02c1\7N\2\2\u02c1\u02c2\7U\2\2\u02c2\u0088\3\2\2\2\u02c3\u02c4\7Q\2"+ - "\2\u02c4\u02c5\7P\2\2\u02c5\u008a\3\2\2\2\u02c6\u02c7\7Q\2\2\u02c7\u02c8"+ - "\7R\2\2\u02c8\u02c9\7V\2\2\u02c9\u02ca\7K\2\2\u02ca\u02cb\7O\2\2\u02cb"+ - "\u02cc\7K\2\2\u02cc\u02cd\7\\\2\2\u02cd\u02ce\7G\2\2\u02ce\u02cf\7F\2"+ - "\2\u02cf\u008c\3\2\2\2\u02d0\u02d1\7Q\2\2\u02d1\u02d2\7T\2\2\u02d2\u008e"+ - "\3\2\2\2\u02d3\u02d4\7Q\2\2\u02d4\u02d5\7T\2\2\u02d5\u02d6\7F\2\2\u02d6"+ - "\u02d7\7G\2\2\u02d7\u02d8\7T\2\2\u02d8\u0090\3\2\2\2\u02d9\u02da\7Q\2"+ - "\2\u02da\u02db\7W\2\2\u02db\u02dc\7V\2\2\u02dc\u02dd\7G\2\2\u02dd\u02de"+ - "\7T\2\2\u02de\u0092\3\2\2\2\u02df\u02e0\7R\2\2\u02e0\u02e1\7C\2\2\u02e1"+ - "\u02e2\7T\2\2\u02e2\u02e3\7U\2\2\u02e3\u02e4\7G\2\2\u02e4\u02e5\7F\2\2"+ - "\u02e5\u0094\3\2\2\2\u02e6\u02e7\7R\2\2\u02e7\u02e8\7J\2\2\u02e8\u02e9"+ - "\7[\2\2\u02e9\u02ea\7U\2\2\u02ea\u02eb\7K\2\2\u02eb\u02ec\7E\2\2\u02ec"+ - "\u02ed\7C\2\2\u02ed\u02ee\7N\2\2\u02ee\u0096\3\2\2\2\u02ef\u02f0\7R\2"+ - "\2\u02f0\u02f1\7N\2\2\u02f1\u02f2\7C\2\2\u02f2\u02f3\7P\2\2\u02f3\u0098"+ - "\3\2\2\2\u02f4\u02f5\7T\2\2\u02f5\u02f6\7K\2\2\u02f6\u02f7\7I\2\2\u02f7"+ - "\u02f8\7J\2\2\u02f8\u02f9\7V\2\2\u02f9\u009a\3\2\2\2\u02fa\u02fb\7T\2"+ - "\2\u02fb\u02fc\7N\2\2\u02fc\u02fd\7K\2\2\u02fd\u02fe\7M\2\2\u02fe\u02ff"+ - "\7G\2\2\u02ff\u009c\3\2\2\2\u0300\u0301\7S\2\2\u0301\u0302\7W\2\2\u0302"+ - "\u0303\7G\2\2\u0303\u0304\7T\2\2\u0304\u0305\7[\2\2\u0305\u009e\3\2\2"+ - "\2\u0306\u0307\7U\2\2\u0307\u0308\7E\2\2\u0308\u0309\7J\2\2\u0309\u030a"+ - "\7G\2\2\u030a\u030b\7O\2\2\u030b\u030c\7C\2\2\u030c\u030d\7U\2\2\u030d"+ - "\u00a0\3\2\2\2\u030e\u030f\7U\2\2\u030f\u0310\7G\2\2\u0310\u0311\7E\2"+ - "\2\u0311\u0312\7Q\2\2\u0312\u0313\7P\2\2\u0313\u0314\7F\2\2\u0314\u00a2"+ - "\3\2\2\2\u0315\u0316\7U\2\2\u0316\u0317\7G\2\2\u0317\u0318\7E\2\2\u0318"+ - "\u0319\7Q\2\2\u0319\u031a\7P\2\2\u031a\u031b\7F\2\2\u031b\u031c\7U\2\2"+ - "\u031c\u00a4\3\2\2\2\u031d\u031e\7U\2\2\u031e\u031f\7G\2\2\u031f\u0320"+ - "\7N\2\2\u0320\u0321\7G\2\2\u0321\u0322\7E\2\2\u0322\u0323\7V\2\2\u0323"+ - "\u00a6\3\2\2\2\u0324\u0325\7U\2\2\u0325\u0326\7J\2\2\u0326\u0327\7Q\2"+ - "\2\u0327\u0328\7Y\2\2\u0328\u00a8\3\2\2\2\u0329\u032a\7U\2\2\u032a\u032b"+ - "\7[\2\2\u032b\u032c\7U\2\2\u032c\u00aa\3\2\2\2\u032d\u032e\7V\2\2\u032e"+ - "\u032f\7C\2\2\u032f\u0330\7D\2\2\u0330\u0331\7N\2\2\u0331\u0332\7G\2\2"+ - "\u0332\u00ac\3\2\2\2\u0333\u0334\7V\2\2\u0334\u0335\7C\2\2\u0335\u0336"+ - "\7D\2\2\u0336\u0337\7N\2\2\u0337\u0338\7G\2\2\u0338\u0339\7U\2\2\u0339"+ - "\u00ae\3\2\2\2\u033a\u033b\7V\2\2\u033b\u033c\7G\2\2\u033c\u033d\7Z\2"+ - "\2\u033d\u033e\7V\2\2\u033e\u00b0\3\2\2\2\u033f\u0340\7V\2\2\u0340\u0341"+ - "\7J\2\2\u0341\u0342\7G\2\2\u0342\u0343\7P\2\2\u0343\u00b2\3\2\2\2\u0344"+ - "\u0345\7V\2\2\u0345\u0346\7T\2\2\u0346\u0347\7W\2\2\u0347\u0348\7G\2\2"+ - "\u0348\u00b4\3\2\2\2\u0349\u034a\7V\2\2\u034a\u034b\7Q\2\2\u034b\u00b6"+ - "\3\2\2\2\u034c\u034d\7V\2\2\u034d\u034e\7[\2\2\u034e\u034f\7R\2\2\u034f"+ - "\u0350\7G\2\2\u0350\u00b8\3\2\2\2\u0351\u0352\7V\2\2\u0352\u0353\7[\2"+ - "\2\u0353\u0354\7R\2\2\u0354\u0355\7G\2\2\u0355\u0356\7U\2\2\u0356\u00ba"+ - "\3\2\2\2\u0357\u0358\7W\2\2\u0358\u0359\7U\2\2\u0359\u035a\7K\2\2\u035a"+ - "\u035b\7P\2\2\u035b\u035c\7I\2\2\u035c\u00bc\3\2\2\2\u035d\u035e\7X\2"+ - "\2\u035e\u035f\7G\2\2\u035f\u0360\7T\2\2\u0360\u0361\7K\2\2\u0361\u0362"+ - "\7H\2\2\u0362\u0363\7[\2\2\u0363\u00be\3\2\2\2\u0364\u0365\7Y\2\2\u0365"+ - "\u0366\7J\2\2\u0366\u0367\7G\2\2\u0367\u0368\7P\2\2\u0368\u00c0\3\2\2"+ - "\2\u0369\u036a\7Y\2\2\u036a\u036b\7J\2\2\u036b\u036c\7G\2\2\u036c\u036d"+ - "\7T\2\2\u036d\u036e\7G\2\2\u036e\u00c2\3\2\2\2\u036f\u0370\7Y\2\2\u0370"+ - "\u0371\7K\2\2\u0371\u0372\7V\2\2\u0372\u0373\7J\2\2\u0373\u00c4\3\2\2"+ - "\2\u0374\u0375\7[\2\2\u0375\u0376\7G\2\2\u0376\u0377\7C\2\2\u0377\u0378"+ - "\7T\2\2\u0378\u00c6\3\2\2\2\u0379\u037a\7[\2\2\u037a\u037b\7G\2\2\u037b"+ - "\u037c\7C\2\2\u037c\u037d\7T\2\2\u037d\u037e\7U\2\2\u037e\u00c8\3\2\2"+ - "\2\u037f\u0380\7}\2\2\u0380\u0381\7G\2\2\u0381\u0382\7U\2\2\u0382\u0383"+ - "\7E\2\2\u0383\u0384\7C\2\2\u0384\u0385\7R\2\2\u0385\u0386\7G\2\2\u0386"+ - "\u00ca\3\2\2\2\u0387\u0388\7}\2\2\u0388\u0389\7H\2\2\u0389\u038a\7P\2"+ - "\2\u038a\u00cc\3\2\2\2\u038b\u038c\7}\2\2\u038c\u038d\7N\2\2\u038d\u038e"+ - "\7K\2\2\u038e\u038f\7O\2\2\u038f\u0390\7K\2\2\u0390\u0391\7V\2\2\u0391"+ - "\u00ce\3\2\2\2\u0392\u0393\7}\2\2\u0393\u0394\7F\2\2\u0394\u00d0\3\2\2"+ - "\2\u0395\u0396\7}\2\2\u0396\u0397\7V\2\2\u0397\u00d2\3\2\2\2\u0398\u0399"+ - "\7}\2\2\u0399\u039a\7V\2\2\u039a\u039b\7U\2\2\u039b\u00d4\3\2\2\2\u039c"+ - "\u039d\7}\2\2\u039d\u039e\7I\2\2\u039e\u039f\7W\2\2\u039f\u03a0\7K\2\2"+ - "\u03a0\u03a1\7F\2\2\u03a1\u00d6\3\2\2\2\u03a2\u03a3\7\177\2\2\u03a3\u00d8"+ - "\3\2\2\2\u03a4\u03a5\7?\2\2\u03a5\u00da\3\2\2\2\u03a6\u03a7\7>\2\2\u03a7"+ - "\u03a8\7?\2\2\u03a8\u03a9\7@\2\2\u03a9\u00dc\3\2\2\2\u03aa\u03ab\7>\2"+ - "\2\u03ab\u03af\7@\2\2\u03ac\u03ad\7#\2\2\u03ad\u03af\7?\2\2\u03ae\u03aa"+ - "\3\2\2\2\u03ae\u03ac\3\2\2\2\u03af\u00de\3\2\2\2\u03b0\u03b1\7>\2\2\u03b1"+ - "\u00e0\3\2\2\2\u03b2\u03b3\7>\2\2\u03b3\u03b4\7?\2\2\u03b4\u00e2\3\2\2"+ - "\2\u03b5\u03b6\7@\2\2\u03b6\u00e4\3\2\2\2\u03b7\u03b8\7@\2\2\u03b8\u03b9"+ - "\7?\2\2\u03b9\u00e6\3\2\2\2\u03ba\u03bb\7-\2\2\u03bb\u00e8\3\2\2\2\u03bc"+ - "\u03bd\7/\2\2\u03bd\u00ea\3\2\2\2\u03be\u03bf\7,\2\2\u03bf\u00ec\3\2\2"+ - "\2\u03c0\u03c1\7\61\2\2\u03c1\u00ee\3\2\2\2\u03c2\u03c3\7\'\2\2\u03c3"+ - "\u00f0\3\2\2\2\u03c4\u03c5\7<\2\2\u03c5\u03c6\7<\2\2\u03c6\u00f2\3\2\2"+ - "\2\u03c7\u03c8\7~\2\2\u03c8\u03c9\7~\2\2\u03c9\u00f4\3\2\2\2\u03ca\u03cb"+ - "\7\60\2\2\u03cb\u00f6\3\2\2\2\u03cc\u03cd\7A\2\2\u03cd\u00f8\3\2\2\2\u03ce"+ - "\u03d4\7)\2\2\u03cf\u03d3\n\2\2\2\u03d0\u03d1\7)\2\2\u03d1\u03d3\7)\2"+ - "\2\u03d2\u03cf\3\2\2\2\u03d2\u03d0\3\2\2\2\u03d3\u03d6\3\2\2\2\u03d4\u03d2"+ - "\3\2\2\2\u03d4\u03d5\3\2\2\2\u03d5\u03d7\3\2\2\2\u03d6\u03d4\3\2\2\2\u03d7"+ - "\u03d8\7)\2\2\u03d8\u00fa\3\2\2\2\u03d9\u03db\5\u010b\u0086\2\u03da\u03d9"+ - "\3\2\2\2\u03db\u03dc\3\2\2\2\u03dc\u03da\3\2\2\2\u03dc\u03dd\3\2\2\2\u03dd"+ - "\u00fc\3\2\2\2\u03de\u03e0\5\u010b\u0086\2\u03df\u03de\3\2\2\2\u03e0\u03e1"+ - "\3\2\2\2\u03e1\u03df\3\2\2\2\u03e1\u03e2\3\2\2\2\u03e2\u03e3\3\2\2\2\u03e3"+ - "\u03e7\5\u00f5{\2\u03e4\u03e6\5\u010b\u0086\2\u03e5\u03e4\3\2\2\2\u03e6"+ - "\u03e9\3\2\2\2\u03e7\u03e5\3\2\2\2\u03e7\u03e8\3\2\2\2\u03e8\u0409\3\2"+ - "\2\2\u03e9\u03e7\3\2\2\2\u03ea\u03ec\5\u00f5{\2\u03eb\u03ed\5\u010b\u0086"+ - "\2\u03ec\u03eb\3\2\2\2\u03ed\u03ee\3\2\2\2\u03ee\u03ec\3\2\2\2\u03ee\u03ef"+ - "\3\2\2\2\u03ef\u0409\3\2\2\2\u03f0\u03f2\5\u010b\u0086\2\u03f1\u03f0\3"+ - "\2\2\2\u03f2\u03f3\3\2\2\2\u03f3\u03f1\3\2\2\2\u03f3\u03f4\3\2\2\2\u03f4"+ - "\u03fc\3\2\2\2\u03f5\u03f9\5\u00f5{\2\u03f6\u03f8\5\u010b\u0086\2\u03f7"+ - "\u03f6\3\2\2\2\u03f8\u03fb\3\2\2\2\u03f9\u03f7\3\2\2\2\u03f9\u03fa\3\2"+ - "\2\2\u03fa\u03fd\3\2\2\2\u03fb\u03f9\3\2\2\2\u03fc\u03f5\3\2\2\2\u03fc"+ - "\u03fd\3\2\2\2\u03fd\u03fe\3\2\2\2\u03fe\u03ff\5\u0109\u0085\2\u03ff\u0409"+ - "\3\2\2\2\u0400\u0402\5\u00f5{\2\u0401\u0403\5\u010b\u0086\2\u0402\u0401"+ - "\3\2\2\2\u0403\u0404\3\2\2\2\u0404\u0402\3\2\2\2\u0404\u0405\3\2\2\2\u0405"+ - "\u0406\3\2\2\2\u0406\u0407\5\u0109\u0085\2\u0407\u0409\3\2\2\2\u0408\u03df"+ - "\3\2\2\2\u0408\u03ea\3\2\2\2\u0408\u03f1\3\2\2\2\u0408\u0400\3\2\2\2\u0409"+ - "\u00fe\3\2\2\2\u040a\u040d\5\u010d\u0087\2\u040b\u040d\7a\2\2\u040c\u040a"+ - "\3\2\2\2\u040c\u040b\3\2\2\2\u040d\u0413\3\2\2\2\u040e\u0412\5\u010d\u0087"+ - "\2\u040f\u0412\5\u010b\u0086\2\u0410\u0412\t\3\2\2\u0411\u040e\3\2\2\2"+ - "\u0411\u040f\3\2\2\2\u0411\u0410\3\2\2\2\u0412\u0415\3\2\2\2\u0413\u0411"+ - "\3\2\2\2\u0413\u0414\3\2\2\2\u0414\u0100\3\2\2\2\u0415\u0413\3\2\2\2\u0416"+ - "\u041a\5\u010b\u0086\2\u0417\u041b\5\u010d\u0087\2\u0418\u041b\5\u010b"+ - "\u0086\2\u0419\u041b\t\3\2\2\u041a\u0417\3\2\2\2\u041a\u0418\3\2\2\2\u041a"+ - "\u0419\3\2\2\2\u041b\u041c\3\2\2\2\u041c\u041a\3\2\2\2\u041c\u041d\3\2"+ - "\2\2\u041d\u0102\3\2\2\2\u041e\u0422\5\u010d\u0087\2\u041f\u0422\5\u010b"+ - "\u0086\2\u0420\u0422\7a\2\2\u0421\u041e\3\2\2\2\u0421\u041f\3\2\2\2\u0421"+ - "\u0420\3\2\2\2\u0422\u0423\3\2\2\2\u0423\u0421\3\2\2\2\u0423\u0424\3\2"+ - "\2\2\u0424\u0104\3\2\2\2\u0425\u042b\7$\2\2\u0426\u042a\n\4\2\2\u0427"+ - "\u0428\7$\2\2\u0428\u042a\7$\2\2\u0429\u0426\3\2\2\2\u0429\u0427\3\2\2"+ - "\2\u042a\u042d\3\2\2\2\u042b\u0429\3\2\2\2\u042b\u042c\3\2\2\2\u042c\u042e"+ - "\3\2\2\2\u042d\u042b\3\2\2\2\u042e\u042f\7$\2\2\u042f\u0106\3\2\2\2\u0430"+ - "\u0436\7b\2\2\u0431\u0435\n\5\2\2\u0432\u0433\7b\2\2\u0433\u0435\7b\2"+ - "\2\u0434\u0431\3\2\2\2\u0434\u0432\3\2\2\2\u0435\u0438\3\2\2\2\u0436\u0434"+ - "\3\2\2\2\u0436\u0437\3\2\2\2\u0437\u0439\3\2\2\2\u0438\u0436\3\2\2\2\u0439"+ - "\u043a\7b\2\2\u043a\u0108\3\2\2\2\u043b\u043d\7G\2\2\u043c\u043e\t\6\2"+ - "\2\u043d\u043c\3\2\2\2\u043d\u043e\3\2\2\2\u043e\u0440\3\2\2\2\u043f\u0441"+ - "\5\u010b\u0086\2\u0440\u043f\3\2\2\2\u0441\u0442\3\2\2\2\u0442\u0440\3"+ - "\2\2\2\u0442\u0443\3\2\2\2\u0443\u010a\3\2\2\2\u0444\u0445\t\7\2\2\u0445"+ - "\u010c\3\2\2\2\u0446\u0447\t\b\2\2\u0447\u010e\3\2\2\2\u0448\u0449\7/"+ - "\2\2\u0449\u044a\7/\2\2\u044a\u044e\3\2\2\2\u044b\u044d\n\t\2\2\u044c"+ - "\u044b\3\2\2\2\u044d\u0450\3\2\2\2\u044e\u044c\3\2\2\2\u044e\u044f\3\2"+ - "\2\2\u044f\u0452\3\2\2\2\u0450\u044e\3\2\2\2\u0451\u0453\7\17\2\2\u0452"+ - "\u0451\3\2\2\2\u0452\u0453\3\2\2\2\u0453\u0455\3\2\2\2\u0454\u0456\7\f"+ - "\2\2\u0455\u0454\3\2\2\2\u0455\u0456\3\2\2\2\u0456\u0457\3\2\2\2\u0457"+ - "\u0458\b\u0088\2\2\u0458\u0110\3\2\2\2\u0459\u045a\7\61\2\2\u045a\u045b"+ - "\7,\2\2\u045b\u0460\3\2\2\2\u045c\u045f\5\u0111\u0089\2\u045d\u045f\13"+ - "\2\2\2\u045e\u045c\3\2\2\2\u045e\u045d\3\2\2\2\u045f\u0462\3\2\2\2\u0460"+ - "\u0461\3\2\2\2\u0460\u045e\3\2\2\2\u0461\u0463\3\2\2\2\u0462\u0460\3\2"+ - "\2\2\u0463\u0464\7,\2\2\u0464\u0465\7\61\2\2\u0465\u0466\3\2\2\2\u0466"+ - "\u0467\b\u0089\2\2\u0467\u0112\3\2\2\2\u0468\u046a\t\n\2\2\u0469\u0468"+ - "\3\2\2\2\u046a\u046b\3\2\2\2\u046b\u0469\3\2\2\2\u046b\u046c\3\2\2\2\u046c"+ - "\u046d\3\2\2\2\u046d\u046e\b\u008a\2\2\u046e\u0114\3\2\2\2\u046f\u0470"+ - "\13\2\2\2\u0470\u0116\3\2\2\2\"\2\u03ae\u03d2\u03d4\u03dc\u03e1\u03e7"+ - "\u03ee\u03f3\u03f9\u03fc\u0404\u0408\u040c\u0411\u0413\u041a\u041c\u0421"+ - "\u0423\u0429\u042b\u0434\u0436\u043d\u0442\u044e\u0452\u0455\u045e\u0460"+ - "\u046b\3\2\3\2"; + "\t\u0089\4\u008a\t\u008a\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d"+ + "\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3"+ + "\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n"+ + "\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3"+ + "\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3"+ + "\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3"+ + "\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3"+ + "\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3"+ + "\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3"+ + "\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3"+ + "\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3"+ + "\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3"+ + "\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3"+ + "\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3"+ + " \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3"+ + "\"\3#\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3"+ + "&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3"+ + "*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3,\3,\3-\3"+ + "-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3\60\3\60"+ + "\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\63\3\63"+ + "\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65"+ + "\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\3\67"+ + "\3\67\38\38\38\38\38\39\39\39\39\39\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3"+ + "<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3"+ + "?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3"+ + "B\3B\3B\3C\3C\3C\3C\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3G\3G\3"+ + "G\3G\3G\3G\3G\3G\3G\3G\3H\3H\3H\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3"+ + "K\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3N\3"+ + "N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3R\3"+ + "R\3R\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3T\3T\3T\3U\3"+ + "U\3U\3U\3U\3U\3U\3V\3V\3V\3V\3V\3W\3W\3W\3W\3X\3X\3X\3X\3X\3X\3Y\3Y\3"+ + "Y\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3]\3]"+ + "\3]\3^\3^\3^\3^\3^\3_\3_\3_\3_\3_\3_\3`\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a"+ + "\3a\3a\3b\3b\3b\3b\3b\3c\3c\3c\3c\3c\3c\3d\3d\3d\3d\3d\3e\3e\3e\3e\3e"+ + "\3f\3f\3f\3f\3f\3f\3g\3g\3g\3g\3g\3g\3g\3g\3h\3h\3h\3h\3i\3i\3i\3i\3i"+ + "\3i\3i\3j\3j\3j\3k\3k\3k\3l\3l\3l\3l\3m\3m\3m\3m\3m\3m\3n\3n\3o\3o\3p"+ + "\3p\3p\3p\3q\3q\3q\3q\5q\u03bd\nq\3r\3r\3s\3s\3s\3t\3t\3u\3u\3u\3v\3v"+ + "\3w\3w\3x\3x\3y\3y\3z\3z\3{\3{\3{\3|\3|\3|\3}\3}\3~\3~\3\177\3\177\3\177"+ + "\3\177\7\177\u03e1\n\177\f\177\16\177\u03e4\13\177\3\177\3\177\3\u0080"+ + "\6\u0080\u03e9\n\u0080\r\u0080\16\u0080\u03ea\3\u0081\6\u0081\u03ee\n"+ + "\u0081\r\u0081\16\u0081\u03ef\3\u0081\3\u0081\7\u0081\u03f4\n\u0081\f"+ + "\u0081\16\u0081\u03f7\13\u0081\3\u0081\3\u0081\6\u0081\u03fb\n\u0081\r"+ + "\u0081\16\u0081\u03fc\3\u0081\6\u0081\u0400\n\u0081\r\u0081\16\u0081\u0401"+ + "\3\u0081\3\u0081\7\u0081\u0406\n\u0081\f\u0081\16\u0081\u0409\13\u0081"+ + "\5\u0081\u040b\n\u0081\3\u0081\3\u0081\3\u0081\3\u0081\6\u0081\u0411\n"+ + "\u0081\r\u0081\16\u0081\u0412\3\u0081\3\u0081\5\u0081\u0417\n\u0081\3"+ + "\u0082\3\u0082\5\u0082\u041b\n\u0082\3\u0082\3\u0082\3\u0082\7\u0082\u0420"+ + "\n\u0082\f\u0082\16\u0082\u0423\13\u0082\3\u0083\3\u0083\3\u0083\3\u0083"+ + "\6\u0083\u0429\n\u0083\r\u0083\16\u0083\u042a\3\u0084\3\u0084\3\u0084"+ + "\6\u0084\u0430\n\u0084\r\u0084\16\u0084\u0431\3\u0085\3\u0085\3\u0085"+ + "\3\u0085\7\u0085\u0438\n\u0085\f\u0085\16\u0085\u043b\13\u0085\3\u0085"+ + "\3\u0085\3\u0086\3\u0086\3\u0086\3\u0086\7\u0086\u0443\n\u0086\f\u0086"+ + "\16\u0086\u0446\13\u0086\3\u0086\3\u0086\3\u0087\3\u0087\5\u0087\u044c"+ + "\n\u0087\3\u0087\6\u0087\u044f\n\u0087\r\u0087\16\u0087\u0450\3\u0088"+ + "\3\u0088\3\u0089\3\u0089\3\u008a\3\u008a\3\u008a\3\u008a\7\u008a\u045b"+ + "\n\u008a\f\u008a\16\u008a\u045e\13\u008a\3\u008a\5\u008a\u0461\n\u008a"+ + "\3\u008a\5\u008a\u0464\n\u008a\3\u008a\3\u008a\3\u008b\3\u008b\3\u008b"+ + "\3\u008b\3\u008b\7\u008b\u046d\n\u008b\f\u008b\16\u008b\u0470\13\u008b"+ + "\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b\3\u008c\6\u008c\u0478\n\u008c"+ + "\r\u008c\16\u008c\u0479\3\u008c\3\u008c\3\u008d\3\u008d\3\u046e\2\u008e"+ + "\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20"+ + "\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37"+ + "= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o"+ + "9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH"+ + "\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1"+ + "R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5"+ + "\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9"+ + "f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9n\u00dbo\u00dd"+ + "p\u00dfq\u00e1r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00edx\u00efy\u00f1"+ + "z\u00f3{\u00f5|\u00f7}\u00f9~\u00fb\177\u00fd\u0080\u00ff\u0081\u0101"+ + "\u0082\u0103\u0083\u0105\u0084\u0107\u0085\u0109\u0086\u010b\u0087\u010d"+ + "\2\u010f\2\u0111\2\u0113\u0088\u0115\u0089\u0117\u008a\u0119\u008b\3\2"+ + "\13\3\2))\4\2BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2"+ + "\13\f\17\17\"\"\u049f\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2"+ + "\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25"+ + "\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2"+ + "\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2"+ + "\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3"+ + "\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2"+ + "\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2"+ + "Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3"+ + "\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2"+ + "\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2"+ + "w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2"+ + "\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b"+ + "\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2"+ + "\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d"+ + "\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2"+ + "\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af"+ + "\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2"+ + "\2\2\u00b9\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1"+ + "\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2"+ + "\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3"+ + "\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\2\u00db\3\2\2"+ + "\2\2\u00dd\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5"+ + "\3\2\2\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2"+ + "\2\2\u00ef\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5\3\2\2\2\2\u00f7"+ + "\3\2\2\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2\2\2\u00ff\3\2\2"+ + "\2\2\u0101\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2\2\2\u0107\3\2\2\2\2\u0109"+ + "\3\2\2\2\2\u010b\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\2\u0117\3\2\2"+ + "\2\2\u0119\3\2\2\2\3\u011b\3\2\2\2\5\u011d\3\2\2\2\7\u011f\3\2\2\2\t\u0121"+ + "\3\2\2\2\13\u0123\3\2\2\2\r\u0127\3\2\2\2\17\u012f\3\2\2\2\21\u0138\3"+ + "\2\2\2\23\u013c\3\2\2\2\25\u0140\3\2\2\2\27\u0143\3\2\2\2\31\u0147\3\2"+ + "\2\2\33\u014f\3\2\2\2\35\u0152\3\2\2\2\37\u0157\3\2\2\2!\u015c\3\2\2\2"+ + "#\u0164\3\2\2\2%\u016d\3\2\2\2\'\u0175\3\2\2\2)\u017d\3\2\2\2+\u018a\3"+ + "\2\2\2-\u0197\3\2\2\2/\u01a9\3\2\2\2\61\u01ad\3\2\2\2\63\u01b2\3\2\2\2"+ + "\65\u01b8\3\2\2\2\67\u01bd\3\2\2\29\u01c6\3\2\2\2;\u01cf\3\2\2\2=\u01d4"+ + "\3\2\2\2?\u01d8\3\2\2\2A\u01df\3\2\2\2C\u01ea\3\2\2\2E\u01f1\3\2\2\2G"+ + "\u01f9\3\2\2\2I\u0201\3\2\2\2K\u0207\3\2\2\2M\u020d\3\2\2\2O\u0211\3\2"+ + "\2\2Q\u0218\3\2\2\2S\u021d\3\2\2\2U\u0224\3\2\2\2W\u0229\3\2\2\2Y\u0233"+ + "\3\2\2\2[\u023c\3\2\2\2]\u0242\3\2\2\2_\u0249\3\2\2\2a\u024e\3\2\2\2c"+ + "\u0254\3\2\2\2e\u0257\3\2\2\2g\u025f\3\2\2\2i\u0265\3\2\2\2k\u026e\3\2"+ + "\2\2m\u0271\3\2\2\2o\u0276\3\2\2\2q\u027b\3\2\2\2s\u0280\3\2\2\2u\u0285"+ + "\3\2\2\2w\u028b\3\2\2\2y\u0292\3\2\2\2{\u0298\3\2\2\2}\u029f\3\2\2\2\177"+ + "\u02a7\3\2\2\2\u0081\u02ad\3\2\2\2\u0083\u02b4\3\2\2\2\u0085\u02bc\3\2"+ + "\2\2\u0087\u02c0\3\2\2\2\u0089\u02c5\3\2\2\2\u008b\u02cb\3\2\2\2\u008d"+ + "\u02ce\3\2\2\2\u008f\u02d8\3\2\2\2\u0091\u02db\3\2\2\2\u0093\u02e1\3\2"+ + "\2\2\u0095\u02e7\3\2\2\2\u0097\u02ee\3\2\2\2\u0099\u02f7\3\2\2\2\u009b"+ + "\u02fd\3\2\2\2\u009d\u0302\3\2\2\2\u009f\u0308\3\2\2\2\u00a1\u030e\3\2"+ + "\2\2\u00a3\u0314\3\2\2\2\u00a5\u031c\3\2\2\2\u00a7\u0323\3\2\2\2\u00a9"+ + "\u032b\3\2\2\2\u00ab\u0332\3\2\2\2\u00ad\u0337\3\2\2\2\u00af\u033b\3\2"+ + "\2\2\u00b1\u0341\3\2\2\2\u00b3\u0348\3\2\2\2\u00b5\u034d\3\2\2\2\u00b7"+ + "\u0352\3\2\2\2\u00b9\u0357\3\2\2\2\u00bb\u035a\3\2\2\2\u00bd\u035f\3\2"+ + "\2\2\u00bf\u0365\3\2\2\2\u00c1\u036b\3\2\2\2\u00c3\u0372\3\2\2\2\u00c5"+ + "\u0377\3\2\2\2\u00c7\u037d\3\2\2\2\u00c9\u0382\3\2\2\2\u00cb\u0387\3\2"+ + "\2\2\u00cd\u038d\3\2\2\2\u00cf\u0395\3\2\2\2\u00d1\u0399\3\2\2\2\u00d3"+ + "\u03a0\3\2\2\2\u00d5\u03a3\3\2\2\2\u00d7\u03a6\3\2\2\2\u00d9\u03aa\3\2"+ + "\2\2\u00db\u03b0\3\2\2\2\u00dd\u03b2\3\2\2\2\u00df\u03b4\3\2\2\2\u00e1"+ + "\u03bc\3\2\2\2\u00e3\u03be\3\2\2\2\u00e5\u03c0\3\2\2\2\u00e7\u03c3\3\2"+ + "\2\2\u00e9\u03c5\3\2\2\2\u00eb\u03c8\3\2\2\2\u00ed\u03ca\3\2\2\2\u00ef"+ + "\u03cc\3\2\2\2\u00f1\u03ce\3\2\2\2\u00f3\u03d0\3\2\2\2\u00f5\u03d2\3\2"+ + "\2\2\u00f7\u03d5\3\2\2\2\u00f9\u03d8\3\2\2\2\u00fb\u03da\3\2\2\2\u00fd"+ + "\u03dc\3\2\2\2\u00ff\u03e8\3\2\2\2\u0101\u0416\3\2\2\2\u0103\u041a\3\2"+ + "\2\2\u0105\u0424\3\2\2\2\u0107\u042f\3\2\2\2\u0109\u0433\3\2\2\2\u010b"+ + "\u043e\3\2\2\2\u010d\u0449\3\2\2\2\u010f\u0452\3\2\2\2\u0111\u0454\3\2"+ + "\2\2\u0113\u0456\3\2\2\2\u0115\u0467\3\2\2\2\u0117\u0477\3\2\2\2\u0119"+ + "\u047d\3\2\2\2\u011b\u011c\7*\2\2\u011c\4\3\2\2\2\u011d\u011e\7+\2\2\u011e"+ + "\6\3\2\2\2\u011f\u0120\7.\2\2\u0120\b\3\2\2\2\u0121\u0122\7<\2\2\u0122"+ + "\n\3\2\2\2\u0123\u0124\7C\2\2\u0124\u0125\7N\2\2\u0125\u0126\7N\2\2\u0126"+ + "\f\3\2\2\2\u0127\u0128\7C\2\2\u0128\u0129\7P\2\2\u0129\u012a\7C\2\2\u012a"+ + "\u012b\7N\2\2\u012b\u012c\7[\2\2\u012c\u012d\7\\\2\2\u012d\u012e\7G\2"+ + "\2\u012e\16\3\2\2\2\u012f\u0130\7C\2\2\u0130\u0131\7P\2\2\u0131\u0132"+ + "\7C\2\2\u0132\u0133\7N\2\2\u0133\u0134\7[\2\2\u0134\u0135\7\\\2\2\u0135"+ + "\u0136\7G\2\2\u0136\u0137\7F\2\2\u0137\20\3\2\2\2\u0138\u0139\7C\2\2\u0139"+ + "\u013a\7P\2\2\u013a\u013b\7F\2\2\u013b\22\3\2\2\2\u013c\u013d\7C\2\2\u013d"+ + "\u013e\7P\2\2\u013e\u013f\7[\2\2\u013f\24\3\2\2\2\u0140\u0141\7C\2\2\u0141"+ + "\u0142\7U\2\2\u0142\26\3\2\2\2\u0143\u0144\7C\2\2\u0144\u0145\7U\2\2\u0145"+ + "\u0146\7E\2\2\u0146\30\3\2\2\2\u0147\u0148\7D\2\2\u0148\u0149\7G\2\2\u0149"+ + "\u014a\7V\2\2\u014a\u014b\7Y\2\2\u014b\u014c\7G\2\2\u014c\u014d\7G\2\2"+ + "\u014d\u014e\7P\2\2\u014e\32\3\2\2\2\u014f\u0150\7D\2\2\u0150\u0151\7"+ + "[\2\2\u0151\34\3\2\2\2\u0152\u0153\7E\2\2\u0153\u0154\7C\2\2\u0154\u0155"+ + "\7U\2\2\u0155\u0156\7G\2\2\u0156\36\3\2\2\2\u0157\u0158\7E\2\2\u0158\u0159"+ + "\7C\2\2\u0159\u015a\7U\2\2\u015a\u015b\7V\2\2\u015b \3\2\2\2\u015c\u015d"+ + "\7E\2\2\u015d\u015e\7C\2\2\u015e\u015f\7V\2\2\u015f\u0160\7C\2\2\u0160"+ + "\u0161\7N\2\2\u0161\u0162\7Q\2\2\u0162\u0163\7I\2\2\u0163\"\3\2\2\2\u0164"+ + "\u0165\7E\2\2\u0165\u0166\7C\2\2\u0166\u0167\7V\2\2\u0167\u0168\7C\2\2"+ + "\u0168\u0169\7N\2\2\u0169\u016a\7Q\2\2\u016a\u016b\7I\2\2\u016b\u016c"+ + "\7U\2\2\u016c$\3\2\2\2\u016d\u016e\7E\2\2\u016e\u016f\7Q\2\2\u016f\u0170"+ + "\7N\2\2\u0170\u0171\7W\2\2\u0171\u0172\7O\2\2\u0172\u0173\7P\2\2\u0173"+ + "\u0174\7U\2\2\u0174&\3\2\2\2\u0175\u0176\7E\2\2\u0176\u0177\7Q\2\2\u0177"+ + "\u0178\7P\2\2\u0178\u0179\7X\2\2\u0179\u017a\7G\2\2\u017a\u017b\7T\2\2"+ + "\u017b\u017c\7V\2\2\u017c(\3\2\2\2\u017d\u017e\7E\2\2\u017e\u017f\7W\2"+ + "\2\u017f\u0180\7T\2\2\u0180\u0181\7T\2\2\u0181\u0182\7G\2\2\u0182\u0183"+ + "\7P\2\2\u0183\u0184\7V\2\2\u0184\u0185\7a\2\2\u0185\u0186\7F\2\2\u0186"+ + "\u0187\7C\2\2\u0187\u0188\7V\2\2\u0188\u0189\7G\2\2\u0189*\3\2\2\2\u018a"+ + "\u018b\7E\2\2\u018b\u018c\7W\2\2\u018c\u018d\7T\2\2\u018d\u018e\7T\2\2"+ + "\u018e\u018f\7G\2\2\u018f\u0190\7P\2\2\u0190\u0191\7V\2\2\u0191\u0192"+ + "\7a\2\2\u0192\u0193\7V\2\2\u0193\u0194\7K\2\2\u0194\u0195\7O\2\2\u0195"+ + "\u0196\7G\2\2\u0196,\3\2\2\2\u0197\u0198\7E\2\2\u0198\u0199\7W\2\2\u0199"+ + "\u019a\7T\2\2\u019a\u019b\7T\2\2\u019b\u019c\7G\2\2\u019c\u019d\7P\2\2"+ + "\u019d\u019e\7V\2\2\u019e\u019f\7a\2\2\u019f\u01a0\7V\2\2\u01a0\u01a1"+ + "\7K\2\2\u01a1\u01a2\7O\2\2\u01a2\u01a3\7G\2\2\u01a3\u01a4\7U\2\2\u01a4"+ + "\u01a5\7V\2\2\u01a5\u01a6\7C\2\2\u01a6\u01a7\7O\2\2\u01a7\u01a8\7R\2\2"+ + "\u01a8.\3\2\2\2\u01a9\u01aa\7F\2\2\u01aa\u01ab\7C\2\2\u01ab\u01ac\7[\2"+ + "\2\u01ac\60\3\2\2\2\u01ad\u01ae\7F\2\2\u01ae\u01af\7C\2\2\u01af\u01b0"+ + "\7[\2\2\u01b0\u01b1\7U\2\2\u01b1\62\3\2\2\2\u01b2\u01b3\7F\2\2\u01b3\u01b4"+ + "\7G\2\2\u01b4\u01b5\7D\2\2\u01b5\u01b6\7W\2\2\u01b6\u01b7\7I\2\2\u01b7"+ + "\64\3\2\2\2\u01b8\u01b9\7F\2\2\u01b9\u01ba\7G\2\2\u01ba\u01bb\7U\2\2\u01bb"+ + "\u01bc\7E\2\2\u01bc\66\3\2\2\2\u01bd\u01be\7F\2\2\u01be\u01bf\7G\2\2\u01bf"+ + "\u01c0\7U\2\2\u01c0\u01c1\7E\2\2\u01c1\u01c2\7T\2\2\u01c2\u01c3\7K\2\2"+ + "\u01c3\u01c4\7D\2\2\u01c4\u01c5\7G\2\2\u01c58\3\2\2\2\u01c6\u01c7\7F\2"+ + "\2\u01c7\u01c8\7K\2\2\u01c8\u01c9\7U\2\2\u01c9\u01ca\7V\2\2\u01ca\u01cb"+ + "\7K\2\2\u01cb\u01cc\7P\2\2\u01cc\u01cd\7E\2\2\u01cd\u01ce\7V\2\2\u01ce"+ + ":\3\2\2\2\u01cf\u01d0\7G\2\2\u01d0\u01d1\7N\2\2\u01d1\u01d2\7U\2\2\u01d2"+ + "\u01d3\7G\2\2\u01d3<\3\2\2\2\u01d4\u01d5\7G\2\2\u01d5\u01d6\7P\2\2\u01d6"+ + "\u01d7\7F\2\2\u01d7>\3\2\2\2\u01d8\u01d9\7G\2\2\u01d9\u01da\7U\2\2\u01da"+ + "\u01db\7E\2\2\u01db\u01dc\7C\2\2\u01dc\u01dd\7R\2\2\u01dd\u01de\7G\2\2"+ + "\u01de@\3\2\2\2\u01df\u01e0\7G\2\2\u01e0\u01e1\7Z\2\2\u01e1\u01e2\7G\2"+ + "\2\u01e2\u01e3\7E\2\2\u01e3\u01e4\7W\2\2\u01e4\u01e5\7V\2\2\u01e5\u01e6"+ + "\7C\2\2\u01e6\u01e7\7D\2\2\u01e7\u01e8\7N\2\2\u01e8\u01e9\7G\2\2\u01e9"+ + "B\3\2\2\2\u01ea\u01eb\7G\2\2\u01eb\u01ec\7Z\2\2\u01ec\u01ed\7K\2\2\u01ed"+ + "\u01ee\7U\2\2\u01ee\u01ef\7V\2\2\u01ef\u01f0\7U\2\2\u01f0D\3\2\2\2\u01f1"+ + "\u01f2\7G\2\2\u01f2\u01f3\7Z\2\2\u01f3\u01f4\7R\2\2\u01f4\u01f5\7N\2\2"+ + "\u01f5\u01f6\7C\2\2\u01f6\u01f7\7K\2\2\u01f7\u01f8\7P\2\2\u01f8F\3\2\2"+ + "\2\u01f9\u01fa\7G\2\2\u01fa\u01fb\7Z\2\2\u01fb\u01fc\7V\2\2\u01fc\u01fd"+ + "\7T\2\2\u01fd\u01fe\7C\2\2\u01fe\u01ff\7E\2\2\u01ff\u0200\7V\2\2\u0200"+ + "H\3\2\2\2\u0201\u0202\7H\2\2\u0202\u0203\7C\2\2\u0203\u0204\7N\2\2\u0204"+ + "\u0205\7U\2\2\u0205\u0206\7G\2\2\u0206J\3\2\2\2\u0207\u0208\7H\2\2\u0208"+ + "\u0209\7K\2\2\u0209\u020a\7T\2\2\u020a\u020b\7U\2\2\u020b\u020c\7V\2\2"+ + "\u020cL\3\2\2\2\u020d\u020e\7H\2\2\u020e\u020f\7Q\2\2\u020f\u0210\7T\2"+ + "\2\u0210N\3\2\2\2\u0211\u0212\7H\2\2\u0212\u0213\7Q\2\2\u0213\u0214\7"+ + "T\2\2\u0214\u0215\7O\2\2\u0215\u0216\7C\2\2\u0216\u0217\7V\2\2\u0217P"+ + "\3\2\2\2\u0218\u0219\7H\2\2\u0219\u021a\7T\2\2\u021a\u021b\7Q\2\2\u021b"+ + "\u021c\7O\2\2\u021cR\3\2\2\2\u021d\u021e\7H\2\2\u021e\u021f\7T\2\2\u021f"+ + "\u0220\7Q\2\2\u0220\u0221\7\\\2\2\u0221\u0222\7G\2\2\u0222\u0223\7P\2"+ + "\2\u0223T\3\2\2\2\u0224\u0225\7H\2\2\u0225\u0226\7W\2\2\u0226\u0227\7"+ + "N\2\2\u0227\u0228\7N\2\2\u0228V\3\2\2\2\u0229\u022a\7H\2\2\u022a\u022b"+ + "\7W\2\2\u022b\u022c\7P\2\2\u022c\u022d\7E\2\2\u022d\u022e\7V\2\2\u022e"+ + "\u022f\7K\2\2\u022f\u0230\7Q\2\2\u0230\u0231\7P\2\2\u0231\u0232\7U\2\2"+ + "\u0232X\3\2\2\2\u0233\u0234\7I\2\2\u0234\u0235\7T\2\2\u0235\u0236\7C\2"+ + "\2\u0236\u0237\7R\2\2\u0237\u0238\7J\2\2\u0238\u0239\7X\2\2\u0239\u023a"+ + "\7K\2\2\u023a\u023b\7\\\2\2\u023bZ\3\2\2\2\u023c\u023d\7I\2\2\u023d\u023e"+ + "\7T\2\2\u023e\u023f\7Q\2\2\u023f\u0240\7W\2\2\u0240\u0241\7R\2\2\u0241"+ + "\\\3\2\2\2\u0242\u0243\7J\2\2\u0243\u0244\7C\2\2\u0244\u0245\7X\2\2\u0245"+ + "\u0246\7K\2\2\u0246\u0247\7P\2\2\u0247\u0248\7I\2\2\u0248^\3\2\2\2\u0249"+ + "\u024a\7J\2\2\u024a\u024b\7Q\2\2\u024b\u024c\7W\2\2\u024c\u024d\7T\2\2"+ + "\u024d`\3\2\2\2\u024e\u024f\7J\2\2\u024f\u0250\7Q\2\2\u0250\u0251\7W\2"+ + "\2\u0251\u0252\7T\2\2\u0252\u0253\7U\2\2\u0253b\3\2\2\2\u0254\u0255\7"+ + "K\2\2\u0255\u0256\7P\2\2\u0256d\3\2\2\2\u0257\u0258\7K\2\2\u0258\u0259"+ + "\7P\2\2\u0259\u025a\7E\2\2\u025a\u025b\7N\2\2\u025b\u025c\7W\2\2\u025c"+ + "\u025d\7F\2\2\u025d\u025e\7G\2\2\u025ef\3\2\2\2\u025f\u0260\7K\2\2\u0260"+ + "\u0261\7P\2\2\u0261\u0262\7P\2\2\u0262\u0263\7G\2\2\u0263\u0264\7T\2\2"+ + "\u0264h\3\2\2\2\u0265\u0266\7K\2\2\u0266\u0267\7P\2\2\u0267\u0268\7V\2"+ + "\2\u0268\u0269\7G\2\2\u0269\u026a\7T\2\2\u026a\u026b\7X\2\2\u026b\u026c"+ + "\7C\2\2\u026c\u026d\7N\2\2\u026dj\3\2\2\2\u026e\u026f\7K\2\2\u026f\u0270"+ + "\7U\2\2\u0270l\3\2\2\2\u0271\u0272\7L\2\2\u0272\u0273\7Q\2\2\u0273\u0274"+ + "\7K\2\2\u0274\u0275\7P\2\2\u0275n\3\2\2\2\u0276\u0277\7N\2\2\u0277\u0278"+ + "\7C\2\2\u0278\u0279\7U\2\2\u0279\u027a\7V\2\2\u027ap\3\2\2\2\u027b\u027c"+ + "\7N\2\2\u027c\u027d\7G\2\2\u027d\u027e\7H\2\2\u027e\u027f\7V\2\2\u027f"+ + "r\3\2\2\2\u0280\u0281\7N\2\2\u0281\u0282\7K\2\2\u0282\u0283\7M\2\2\u0283"+ + "\u0284\7G\2\2\u0284t\3\2\2\2\u0285\u0286\7N\2\2\u0286\u0287\7K\2\2\u0287"+ + "\u0288\7O\2\2\u0288\u0289\7K\2\2\u0289\u028a\7V\2\2\u028av\3\2\2\2\u028b"+ + "\u028c\7O\2\2\u028c\u028d\7C\2\2\u028d\u028e\7R\2\2\u028e\u028f\7R\2\2"+ + "\u028f\u0290\7G\2\2\u0290\u0291\7F\2\2\u0291x\3\2\2\2\u0292\u0293\7O\2"+ + "\2\u0293\u0294\7C\2\2\u0294\u0295\7V\2\2\u0295\u0296\7E\2\2\u0296\u0297"+ + "\7J\2\2\u0297z\3\2\2\2\u0298\u0299\7O\2\2\u0299\u029a\7K\2\2\u029a\u029b"+ + "\7P\2\2\u029b\u029c\7W\2\2\u029c\u029d\7V\2\2\u029d\u029e\7G\2\2\u029e"+ + "|\3\2\2\2\u029f\u02a0\7O\2\2\u02a0\u02a1\7K\2\2\u02a1\u02a2\7P\2\2\u02a2"+ + "\u02a3\7W\2\2\u02a3\u02a4\7V\2\2\u02a4\u02a5\7G\2\2\u02a5\u02a6\7U\2\2"+ + "\u02a6~\3\2\2\2\u02a7\u02a8\7O\2\2\u02a8\u02a9\7Q\2\2\u02a9\u02aa\7P\2"+ + "\2\u02aa\u02ab\7V\2\2\u02ab\u02ac\7J\2\2\u02ac\u0080\3\2\2\2\u02ad\u02ae"+ + "\7O\2\2\u02ae\u02af\7Q\2\2\u02af\u02b0\7P\2\2\u02b0\u02b1\7V\2\2\u02b1"+ + "\u02b2\7J\2\2\u02b2\u02b3\7U\2\2\u02b3\u0082\3\2\2\2\u02b4\u02b5\7P\2"+ + "\2\u02b5\u02b6\7C\2\2\u02b6\u02b7\7V\2\2\u02b7\u02b8\7W\2\2\u02b8\u02b9"+ + "\7T\2\2\u02b9\u02ba\7C\2\2\u02ba\u02bb\7N\2\2\u02bb\u0084\3\2\2\2\u02bc"+ + "\u02bd\7P\2\2\u02bd\u02be\7Q\2\2\u02be\u02bf\7V\2\2\u02bf\u0086\3\2\2"+ + "\2\u02c0\u02c1\7P\2\2\u02c1\u02c2\7W\2\2\u02c2\u02c3\7N\2\2\u02c3\u02c4"+ + "\7N\2\2\u02c4\u0088\3\2\2\2\u02c5\u02c6\7P\2\2\u02c6\u02c7\7W\2\2\u02c7"+ + "\u02c8\7N\2\2\u02c8\u02c9\7N\2\2\u02c9\u02ca\7U\2\2\u02ca\u008a\3\2\2"+ + "\2\u02cb\u02cc\7Q\2\2\u02cc\u02cd\7P\2\2\u02cd\u008c\3\2\2\2\u02ce\u02cf"+ + "\7Q\2\2\u02cf\u02d0\7R\2\2\u02d0\u02d1\7V\2\2\u02d1\u02d2\7K\2\2\u02d2"+ + "\u02d3\7O\2\2\u02d3\u02d4\7K\2\2\u02d4\u02d5\7\\\2\2\u02d5\u02d6\7G\2"+ + "\2\u02d6\u02d7\7F\2\2\u02d7\u008e\3\2\2\2\u02d8\u02d9\7Q\2\2\u02d9\u02da"+ + "\7T\2\2\u02da\u0090\3\2\2\2\u02db\u02dc\7Q\2\2\u02dc\u02dd\7T\2\2\u02dd"+ + "\u02de\7F\2\2\u02de\u02df\7G\2\2\u02df\u02e0\7T\2\2\u02e0\u0092\3\2\2"+ + "\2\u02e1\u02e2\7Q\2\2\u02e2\u02e3\7W\2\2\u02e3\u02e4\7V\2\2\u02e4\u02e5"+ + "\7G\2\2\u02e5\u02e6\7T\2\2\u02e6\u0094\3\2\2\2\u02e7\u02e8\7R\2\2\u02e8"+ + "\u02e9\7C\2\2\u02e9\u02ea\7T\2\2\u02ea\u02eb\7U\2\2\u02eb\u02ec\7G\2\2"+ + "\u02ec\u02ed\7F\2\2\u02ed\u0096\3\2\2\2\u02ee\u02ef\7R\2\2\u02ef\u02f0"+ + "\7J\2\2\u02f0\u02f1\7[\2\2\u02f1\u02f2\7U\2\2\u02f2\u02f3\7K\2\2\u02f3"+ + "\u02f4\7E\2\2\u02f4\u02f5\7C\2\2\u02f5\u02f6\7N\2\2\u02f6\u0098\3\2\2"+ + "\2\u02f7\u02f8\7R\2\2\u02f8\u02f9\7K\2\2\u02f9\u02fa\7X\2\2\u02fa\u02fb"+ + "\7Q\2\2\u02fb\u02fc\7V\2\2\u02fc\u009a\3\2\2\2\u02fd\u02fe\7R\2\2\u02fe"+ + "\u02ff\7N\2\2\u02ff\u0300\7C\2\2\u0300\u0301\7P\2\2\u0301\u009c\3\2\2"+ + "\2\u0302\u0303\7T\2\2\u0303\u0304\7K\2\2\u0304\u0305\7I\2\2\u0305\u0306"+ + "\7J\2\2\u0306\u0307\7V\2\2\u0307\u009e\3\2\2\2\u0308\u0309\7T\2\2\u0309"+ + "\u030a\7N\2\2\u030a\u030b\7K\2\2\u030b\u030c\7M\2\2\u030c\u030d\7G\2\2"+ + "\u030d\u00a0\3\2\2\2\u030e\u030f\7S\2\2\u030f\u0310\7W\2\2\u0310\u0311"+ + "\7G\2\2\u0311\u0312\7T\2\2\u0312\u0313\7[\2\2\u0313\u00a2\3\2\2\2\u0314"+ + "\u0315\7U\2\2\u0315\u0316\7E\2\2\u0316\u0317\7J\2\2\u0317\u0318\7G\2\2"+ + "\u0318\u0319\7O\2\2\u0319\u031a\7C\2\2\u031a\u031b\7U\2\2\u031b\u00a4"+ + "\3\2\2\2\u031c\u031d\7U\2\2\u031d\u031e\7G\2\2\u031e\u031f\7E\2\2\u031f"+ + "\u0320\7Q\2\2\u0320\u0321\7P\2\2\u0321\u0322\7F\2\2\u0322\u00a6\3\2\2"+ + "\2\u0323\u0324\7U\2\2\u0324\u0325\7G\2\2\u0325\u0326\7E\2\2\u0326\u0327"+ + "\7Q\2\2\u0327\u0328\7P\2\2\u0328\u0329\7F\2\2\u0329\u032a\7U\2\2\u032a"+ + "\u00a8\3\2\2\2\u032b\u032c\7U\2\2\u032c\u032d\7G\2\2\u032d\u032e\7N\2"+ + "\2\u032e\u032f\7G\2\2\u032f\u0330\7E\2\2\u0330\u0331\7V\2\2\u0331\u00aa"+ + "\3\2\2\2\u0332\u0333\7U\2\2\u0333\u0334\7J\2\2\u0334\u0335\7Q\2\2\u0335"+ + "\u0336\7Y\2\2\u0336\u00ac\3\2\2\2\u0337\u0338\7U\2\2\u0338\u0339\7[\2"+ + "\2\u0339\u033a\7U\2\2\u033a\u00ae\3\2\2\2\u033b\u033c\7V\2\2\u033c\u033d"+ + "\7C\2\2\u033d\u033e\7D\2\2\u033e\u033f\7N\2\2\u033f\u0340\7G\2\2\u0340"+ + "\u00b0\3\2\2\2\u0341\u0342\7V\2\2\u0342\u0343\7C\2\2\u0343\u0344\7D\2"+ + "\2\u0344\u0345\7N\2\2\u0345\u0346\7G\2\2\u0346\u0347\7U\2\2\u0347\u00b2"+ + "\3\2\2\2\u0348\u0349\7V\2\2\u0349\u034a\7G\2\2\u034a\u034b\7Z\2\2\u034b"+ + "\u034c\7V\2\2\u034c\u00b4\3\2\2\2\u034d\u034e\7V\2\2\u034e\u034f\7J\2"+ + "\2\u034f\u0350\7G\2\2\u0350\u0351\7P\2\2\u0351\u00b6\3\2\2\2\u0352\u0353"+ + "\7V\2\2\u0353\u0354\7T\2\2\u0354\u0355\7W\2\2\u0355\u0356\7G\2\2\u0356"+ + "\u00b8\3\2\2\2\u0357\u0358\7V\2\2\u0358\u0359\7Q\2\2\u0359\u00ba\3\2\2"+ + "\2\u035a\u035b\7V\2\2\u035b\u035c\7[\2\2\u035c\u035d\7R\2\2\u035d\u035e"+ + "\7G\2\2\u035e\u00bc\3\2\2\2\u035f\u0360\7V\2\2\u0360\u0361\7[\2\2\u0361"+ + "\u0362\7R\2\2\u0362\u0363\7G\2\2\u0363\u0364\7U\2\2\u0364\u00be\3\2\2"+ + "\2\u0365\u0366\7W\2\2\u0366\u0367\7U\2\2\u0367\u0368\7K\2\2\u0368\u0369"+ + "\7P\2\2\u0369\u036a\7I\2\2\u036a\u00c0\3\2\2\2\u036b\u036c\7X\2\2\u036c"+ + "\u036d\7G\2\2\u036d\u036e\7T\2\2\u036e\u036f\7K\2\2\u036f\u0370\7H\2\2"+ + "\u0370\u0371\7[\2\2\u0371\u00c2\3\2\2\2\u0372\u0373\7Y\2\2\u0373\u0374"+ + "\7J\2\2\u0374\u0375\7G\2\2\u0375\u0376\7P\2\2\u0376\u00c4\3\2\2\2\u0377"+ + "\u0378\7Y\2\2\u0378\u0379\7J\2\2\u0379\u037a\7G\2\2\u037a\u037b\7T\2\2"+ + "\u037b\u037c\7G\2\2\u037c\u00c6\3\2\2\2\u037d\u037e\7Y\2\2\u037e\u037f"+ + "\7K\2\2\u037f\u0380\7V\2\2\u0380\u0381\7J\2\2\u0381\u00c8\3\2\2\2\u0382"+ + "\u0383\7[\2\2\u0383\u0384\7G\2\2\u0384\u0385\7C\2\2\u0385\u0386\7T\2\2"+ + "\u0386\u00ca\3\2\2\2\u0387\u0388\7[\2\2\u0388\u0389\7G\2\2\u0389\u038a"+ + "\7C\2\2\u038a\u038b\7T\2\2\u038b\u038c\7U\2\2\u038c\u00cc\3\2\2\2\u038d"+ + "\u038e\7}\2\2\u038e\u038f\7G\2\2\u038f\u0390\7U\2\2\u0390\u0391\7E\2\2"+ + "\u0391\u0392\7C\2\2\u0392\u0393\7R\2\2\u0393\u0394\7G\2\2\u0394\u00ce"+ + "\3\2\2\2\u0395\u0396\7}\2\2\u0396\u0397\7H\2\2\u0397\u0398\7P\2\2\u0398"+ + "\u00d0\3\2\2\2\u0399\u039a\7}\2\2\u039a\u039b\7N\2\2\u039b\u039c\7K\2"+ + "\2\u039c\u039d\7O\2\2\u039d\u039e\7K\2\2\u039e\u039f\7V\2\2\u039f\u00d2"+ + "\3\2\2\2\u03a0\u03a1\7}\2\2\u03a1\u03a2\7F\2\2\u03a2\u00d4\3\2\2\2\u03a3"+ + "\u03a4\7}\2\2\u03a4\u03a5\7V\2\2\u03a5\u00d6\3\2\2\2\u03a6\u03a7\7}\2"+ + "\2\u03a7\u03a8\7V\2\2\u03a8\u03a9\7U\2\2\u03a9\u00d8\3\2\2\2\u03aa\u03ab"+ + "\7}\2\2\u03ab\u03ac\7I\2\2\u03ac\u03ad\7W\2\2\u03ad\u03ae\7K\2\2\u03ae"+ + "\u03af\7F\2\2\u03af\u00da\3\2\2\2\u03b0\u03b1\7\177\2\2\u03b1\u00dc\3"+ + "\2\2\2\u03b2\u03b3\7?\2\2\u03b3\u00de\3\2\2\2\u03b4\u03b5\7>\2\2\u03b5"+ + "\u03b6\7?\2\2\u03b6\u03b7\7@\2\2\u03b7\u00e0\3\2\2\2\u03b8\u03b9\7>\2"+ + "\2\u03b9\u03bd\7@\2\2\u03ba\u03bb\7#\2\2\u03bb\u03bd\7?\2\2\u03bc\u03b8"+ + "\3\2\2\2\u03bc\u03ba\3\2\2\2\u03bd\u00e2\3\2\2\2\u03be\u03bf\7>\2\2\u03bf"+ + "\u00e4\3\2\2\2\u03c0\u03c1\7>\2\2\u03c1\u03c2\7?\2\2\u03c2\u00e6\3\2\2"+ + "\2\u03c3\u03c4\7@\2\2\u03c4\u00e8\3\2\2\2\u03c5\u03c6\7@\2\2\u03c6\u03c7"+ + "\7?\2\2\u03c7\u00ea\3\2\2\2\u03c8\u03c9\7-\2\2\u03c9\u00ec\3\2\2\2\u03ca"+ + "\u03cb\7/\2\2\u03cb\u00ee\3\2\2\2\u03cc\u03cd\7,\2\2\u03cd\u00f0\3\2\2"+ + "\2\u03ce\u03cf\7\61\2\2\u03cf\u00f2\3\2\2\2\u03d0\u03d1\7\'\2\2\u03d1"+ + "\u00f4\3\2\2\2\u03d2\u03d3\7<\2\2\u03d3\u03d4\7<\2\2\u03d4\u00f6\3\2\2"+ + "\2\u03d5\u03d6\7~\2\2\u03d6\u03d7\7~\2\2\u03d7\u00f8\3\2\2\2\u03d8\u03d9"+ + "\7\60\2\2\u03d9\u00fa\3\2\2\2\u03da\u03db\7A\2\2\u03db\u00fc\3\2\2\2\u03dc"+ + "\u03e2\7)\2\2\u03dd\u03e1\n\2\2\2\u03de\u03df\7)\2\2\u03df\u03e1\7)\2"+ + "\2\u03e0\u03dd\3\2\2\2\u03e0\u03de\3\2\2\2\u03e1\u03e4\3\2\2\2\u03e2\u03e0"+ + "\3\2\2\2\u03e2\u03e3\3\2\2\2\u03e3\u03e5\3\2\2\2\u03e4\u03e2\3\2\2\2\u03e5"+ + "\u03e6\7)\2\2\u03e6\u00fe\3\2\2\2\u03e7\u03e9\5\u010f\u0088\2\u03e8\u03e7"+ + "\3\2\2\2\u03e9\u03ea\3\2\2\2\u03ea\u03e8\3\2\2\2\u03ea\u03eb\3\2\2\2\u03eb"+ + "\u0100\3\2\2\2\u03ec\u03ee\5\u010f\u0088\2\u03ed\u03ec\3\2\2\2\u03ee\u03ef"+ + "\3\2\2\2\u03ef\u03ed\3\2\2\2\u03ef\u03f0\3\2\2\2\u03f0\u03f1\3\2\2\2\u03f1"+ + "\u03f5\5\u00f9}\2\u03f2\u03f4\5\u010f\u0088\2\u03f3\u03f2\3\2\2\2\u03f4"+ + "\u03f7\3\2\2\2\u03f5\u03f3\3\2\2\2\u03f5\u03f6\3\2\2\2\u03f6\u0417\3\2"+ + "\2\2\u03f7\u03f5\3\2\2\2\u03f8\u03fa\5\u00f9}\2\u03f9\u03fb\5\u010f\u0088"+ + "\2\u03fa\u03f9\3\2\2\2\u03fb\u03fc\3\2\2\2\u03fc\u03fa\3\2\2\2\u03fc\u03fd"+ + "\3\2\2\2\u03fd\u0417\3\2\2\2\u03fe\u0400\5\u010f\u0088\2\u03ff\u03fe\3"+ + "\2\2\2\u0400\u0401\3\2\2\2\u0401\u03ff\3\2\2\2\u0401\u0402\3\2\2\2\u0402"+ + "\u040a\3\2\2\2\u0403\u0407\5\u00f9}\2\u0404\u0406\5\u010f\u0088\2\u0405"+ + "\u0404\3\2\2\2\u0406\u0409\3\2\2\2\u0407\u0405\3\2\2\2\u0407\u0408\3\2"+ + "\2\2\u0408\u040b\3\2\2\2\u0409\u0407\3\2\2\2\u040a\u0403\3\2\2\2\u040a"+ + "\u040b\3\2\2\2\u040b\u040c\3\2\2\2\u040c\u040d\5\u010d\u0087\2\u040d\u0417"+ + "\3\2\2\2\u040e\u0410\5\u00f9}\2\u040f\u0411\5\u010f\u0088\2\u0410\u040f"+ + "\3\2\2\2\u0411\u0412\3\2\2\2\u0412\u0410\3\2\2\2\u0412\u0413\3\2\2\2\u0413"+ + "\u0414\3\2\2\2\u0414\u0415\5\u010d\u0087\2\u0415\u0417\3\2\2\2\u0416\u03ed"+ + "\3\2\2\2\u0416\u03f8\3\2\2\2\u0416\u03ff\3\2\2\2\u0416\u040e\3\2\2\2\u0417"+ + "\u0102\3\2\2\2\u0418\u041b\5\u0111\u0089\2\u0419\u041b\7a\2\2\u041a\u0418"+ + "\3\2\2\2\u041a\u0419\3\2\2\2\u041b\u0421\3\2\2\2\u041c\u0420\5\u0111\u0089"+ + "\2\u041d\u0420\5\u010f\u0088\2\u041e\u0420\t\3\2\2\u041f\u041c\3\2\2\2"+ + "\u041f\u041d\3\2\2\2\u041f\u041e\3\2\2\2\u0420\u0423\3\2\2\2\u0421\u041f"+ + "\3\2\2\2\u0421\u0422\3\2\2\2\u0422\u0104\3\2\2\2\u0423\u0421\3\2\2\2\u0424"+ + "\u0428\5\u010f\u0088\2\u0425\u0429\5\u0111\u0089\2\u0426\u0429\5\u010f"+ + "\u0088\2\u0427\u0429\t\3\2\2\u0428\u0425\3\2\2\2\u0428\u0426\3\2\2\2\u0428"+ + "\u0427\3\2\2\2\u0429\u042a\3\2\2\2\u042a\u0428\3\2\2\2\u042a\u042b\3\2"+ + "\2\2\u042b\u0106\3\2\2\2\u042c\u0430\5\u0111\u0089\2\u042d\u0430\5\u010f"+ + "\u0088\2\u042e\u0430\7a\2\2\u042f\u042c\3\2\2\2\u042f\u042d\3\2\2\2\u042f"+ + "\u042e\3\2\2\2\u0430\u0431\3\2\2\2\u0431\u042f\3\2\2\2\u0431\u0432\3\2"+ + "\2\2\u0432\u0108\3\2\2\2\u0433\u0439\7$\2\2\u0434\u0438\n\4\2\2\u0435"+ + "\u0436\7$\2\2\u0436\u0438\7$\2\2\u0437\u0434\3\2\2\2\u0437\u0435\3\2\2"+ + "\2\u0438\u043b\3\2\2\2\u0439\u0437\3\2\2\2\u0439\u043a\3\2\2\2\u043a\u043c"+ + "\3\2\2\2\u043b\u0439\3\2\2\2\u043c\u043d\7$\2\2\u043d\u010a\3\2\2\2\u043e"+ + "\u0444\7b\2\2\u043f\u0443\n\5\2\2\u0440\u0441\7b\2\2\u0441\u0443\7b\2"+ + "\2\u0442\u043f\3\2\2\2\u0442\u0440\3\2\2\2\u0443\u0446\3\2\2\2\u0444\u0442"+ + "\3\2\2\2\u0444\u0445\3\2\2\2\u0445\u0447\3\2\2\2\u0446\u0444\3\2\2\2\u0447"+ + "\u0448\7b\2\2\u0448\u010c\3\2\2\2\u0449\u044b\7G\2\2\u044a\u044c\t\6\2"+ + "\2\u044b\u044a\3\2\2\2\u044b\u044c\3\2\2\2\u044c\u044e\3\2\2\2\u044d\u044f"+ + "\5\u010f\u0088\2\u044e\u044d\3\2\2\2\u044f\u0450\3\2\2\2\u0450\u044e\3"+ + "\2\2\2\u0450\u0451\3\2\2\2\u0451\u010e\3\2\2\2\u0452\u0453\t\7\2\2\u0453"+ + "\u0110\3\2\2\2\u0454\u0455\t\b\2\2\u0455\u0112\3\2\2\2\u0456\u0457\7/"+ + "\2\2\u0457\u0458\7/\2\2\u0458\u045c\3\2\2\2\u0459\u045b\n\t\2\2\u045a"+ + "\u0459\3\2\2\2\u045b\u045e\3\2\2\2\u045c\u045a\3\2\2\2\u045c\u045d\3\2"+ + "\2\2\u045d\u0460\3\2\2\2\u045e\u045c\3\2\2\2\u045f\u0461\7\17\2\2\u0460"+ + "\u045f\3\2\2\2\u0460\u0461\3\2\2\2\u0461\u0463\3\2\2\2\u0462\u0464\7\f"+ + "\2\2\u0463\u0462\3\2\2\2\u0463\u0464\3\2\2\2\u0464\u0465\3\2\2\2\u0465"+ + "\u0466\b\u008a\2\2\u0466\u0114\3\2\2\2\u0467\u0468\7\61\2\2\u0468\u0469"+ + "\7,\2\2\u0469\u046e\3\2\2\2\u046a\u046d\5\u0115\u008b\2\u046b\u046d\13"+ + "\2\2\2\u046c\u046a\3\2\2\2\u046c\u046b\3\2\2\2\u046d\u0470\3\2\2\2\u046e"+ + "\u046f\3\2\2\2\u046e\u046c\3\2\2\2\u046f\u0471\3\2\2\2\u0470\u046e\3\2"+ + "\2\2\u0471\u0472\7,\2\2\u0472\u0473\7\61\2\2\u0473\u0474\3\2\2\2\u0474"+ + "\u0475\b\u008b\2\2\u0475\u0116\3\2\2\2\u0476\u0478\t\n\2\2\u0477\u0476"+ + "\3\2\2\2\u0478\u0479\3\2\2\2\u0479\u0477\3\2\2\2\u0479\u047a\3\2\2\2\u047a"+ + "\u047b\3\2\2\2\u047b\u047c\b\u008c\2\2\u047c\u0118\3\2\2\2\u047d\u047e"+ + "\13\2\2\2\u047e\u011a\3\2\2\2\"\2\u03bc\u03e0\u03e2\u03ea\u03ef\u03f5"+ + "\u03fc\u0401\u0407\u040a\u0412\u0416\u041a\u041f\u0421\u0428\u042a\u042f"+ + "\u0431\u0437\u0439\u0442\u0444\u044b\u0450\u045c\u0460\u0463\u046c\u046e"+ + "\u0479\3\2\3\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index c0845b7adb5..671368342e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -283,6 +283,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#selectItems}. + * @param ctx the parse tree + */ + void enterSelectItems(SqlBaseParser.SelectItemsContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#selectItems}. + * @param ctx the parse tree + */ + void exitSelectItems(SqlBaseParser.SelectItemsContext ctx); /** * Enter a parse tree produced by the {@code selectExpression} * labeled alternative in {@link SqlBaseParser#selectItem}. @@ -371,6 +381,36 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#pivotClause}. + * @param ctx the parse tree + */ + void enterPivotClause(SqlBaseParser.PivotClauseContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#pivotClause}. + * @param ctx the parse tree + */ + void exitPivotClause(SqlBaseParser.PivotClauseContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#pivotArgs}. + * @param ctx the parse tree + */ + void enterPivotArgs(SqlBaseParser.PivotArgsContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#pivotArgs}. + * @param ctx the parse tree + */ + void exitPivotArgs(SqlBaseParser.PivotArgsContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#namedValueExpression}. + * @param ctx the parse tree + */ + void enterNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#namedValueExpression}. + * @param ctx the parse tree + */ + void exitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx); /** * Enter a parse tree produced by {@link SqlBaseParser#expression}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 76e0f4654df..63cc1bd7a3f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -22,51 +22,54 @@ class SqlBaseParser extends Parser { COLUMNS=18, CONVERT=19, CURRENT_DATE=20, CURRENT_TIME=21, CURRENT_TIMESTAMP=22, DAY=23, DAYS=24, DEBUG=25, DESC=26, DESCRIBE=27, DISTINCT=28, ELSE=29, END=30, ESCAPE=31, EXECUTABLE=32, EXISTS=33, EXPLAIN=34, EXTRACT=35, FALSE=36, - FIRST=37, FORMAT=38, FROM=39, FROZEN=40, FULL=41, FUNCTIONS=42, GRAPHVIZ=43, - GROUP=44, HAVING=45, HOUR=46, HOURS=47, IN=48, INCLUDE=49, INNER=50, INTERVAL=51, - IS=52, JOIN=53, LAST=54, LEFT=55, LIKE=56, LIMIT=57, MAPPED=58, MATCH=59, - MINUTE=60, MINUTES=61, MONTH=62, MONTHS=63, NATURAL=64, NOT=65, NULL=66, - NULLS=67, ON=68, OPTIMIZED=69, OR=70, ORDER=71, OUTER=72, PARSED=73, PHYSICAL=74, - PLAN=75, RIGHT=76, RLIKE=77, QUERY=78, SCHEMAS=79, SECOND=80, SECONDS=81, - SELECT=82, SHOW=83, SYS=84, TABLE=85, TABLES=86, TEXT=87, THEN=88, TRUE=89, - TO=90, TYPE=91, TYPES=92, USING=93, VERIFY=94, WHEN=95, WHERE=96, WITH=97, - YEAR=98, YEARS=99, ESCAPE_ESC=100, FUNCTION_ESC=101, LIMIT_ESC=102, DATE_ESC=103, - TIME_ESC=104, TIMESTAMP_ESC=105, GUID_ESC=106, ESC_END=107, EQ=108, NULLEQ=109, - NEQ=110, LT=111, LTE=112, GT=113, GTE=114, PLUS=115, MINUS=116, ASTERISK=117, - SLASH=118, PERCENT=119, CAST_OP=120, CONCAT=121, DOT=122, PARAM=123, STRING=124, - INTEGER_VALUE=125, DECIMAL_VALUE=126, IDENTIFIER=127, DIGIT_IDENTIFIER=128, - TABLE_IDENTIFIER=129, QUOTED_IDENTIFIER=130, BACKQUOTED_IDENTIFIER=131, - SIMPLE_COMMENT=132, BRACKETED_COMMENT=133, WS=134, UNRECOGNIZED=135, DELIMITER=136; + FIRST=37, FOR=38, FORMAT=39, FROM=40, FROZEN=41, FULL=42, FUNCTIONS=43, + GRAPHVIZ=44, GROUP=45, HAVING=46, HOUR=47, HOURS=48, IN=49, INCLUDE=50, + INNER=51, INTERVAL=52, IS=53, JOIN=54, LAST=55, LEFT=56, LIKE=57, LIMIT=58, + MAPPED=59, MATCH=60, MINUTE=61, MINUTES=62, MONTH=63, MONTHS=64, NATURAL=65, + NOT=66, NULL=67, NULLS=68, ON=69, OPTIMIZED=70, OR=71, ORDER=72, OUTER=73, + PARSED=74, PHYSICAL=75, PIVOT=76, PLAN=77, RIGHT=78, RLIKE=79, QUERY=80, + SCHEMAS=81, SECOND=82, SECONDS=83, SELECT=84, SHOW=85, SYS=86, TABLE=87, + TABLES=88, TEXT=89, THEN=90, TRUE=91, TO=92, TYPE=93, TYPES=94, USING=95, + VERIFY=96, WHEN=97, WHERE=98, WITH=99, YEAR=100, YEARS=101, ESCAPE_ESC=102, + FUNCTION_ESC=103, LIMIT_ESC=104, DATE_ESC=105, TIME_ESC=106, TIMESTAMP_ESC=107, + GUID_ESC=108, ESC_END=109, EQ=110, NULLEQ=111, NEQ=112, LT=113, LTE=114, + GT=115, GTE=116, PLUS=117, MINUS=118, ASTERISK=119, SLASH=120, PERCENT=121, + CAST_OP=122, CONCAT=123, DOT=124, PARAM=125, STRING=126, INTEGER_VALUE=127, + DECIMAL_VALUE=128, IDENTIFIER=129, DIGIT_IDENTIFIER=130, TABLE_IDENTIFIER=131, + QUOTED_IDENTIFIER=132, BACKQUOTED_IDENTIFIER=133, SIMPLE_COMMENT=134, + BRACKETED_COMMENT=135, WS=136, UNRECOGNIZED=137, DELIMITER=138; public static final int RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2, RULE_query = 3, RULE_queryNoWith = 4, RULE_limitClause = 5, RULE_queryTerm = 6, RULE_orderBy = 7, RULE_querySpecification = 8, RULE_fromClause = 9, RULE_groupBy = 10, RULE_groupingElement = 11, RULE_groupingExpressions = 12, RULE_namedQuery = 13, - RULE_setQuantifier = 14, RULE_selectItem = 15, RULE_relation = 16, RULE_joinRelation = 17, - RULE_joinType = 18, RULE_joinCriteria = 19, RULE_relationPrimary = 20, - RULE_expression = 21, RULE_booleanExpression = 22, RULE_matchQueryOptions = 23, - RULE_predicated = 24, RULE_predicate = 25, RULE_likePattern = 26, RULE_pattern = 27, - RULE_patternEscape = 28, RULE_valueExpression = 29, RULE_primaryExpression = 30, - RULE_builtinDateTimeFunction = 31, RULE_castExpression = 32, RULE_castTemplate = 33, - RULE_convertTemplate = 34, RULE_extractExpression = 35, RULE_extractTemplate = 36, - RULE_functionExpression = 37, RULE_functionTemplate = 38, RULE_functionName = 39, - RULE_constant = 40, RULE_comparisonOperator = 41, RULE_booleanValue = 42, - RULE_interval = 43, RULE_intervalField = 44, RULE_dataType = 45, RULE_qualifiedName = 46, - RULE_identifier = 47, RULE_tableIdentifier = 48, RULE_quoteIdentifier = 49, - RULE_unquoteIdentifier = 50, RULE_number = 51, RULE_string = 52, RULE_whenClause = 53, - RULE_nonReserved = 54; + RULE_setQuantifier = 14, RULE_selectItems = 15, RULE_selectItem = 16, + RULE_relation = 17, RULE_joinRelation = 18, RULE_joinType = 19, RULE_joinCriteria = 20, + RULE_relationPrimary = 21, RULE_pivotClause = 22, RULE_pivotArgs = 23, + RULE_namedValueExpression = 24, RULE_expression = 25, RULE_booleanExpression = 26, + RULE_matchQueryOptions = 27, RULE_predicated = 28, RULE_predicate = 29, + RULE_likePattern = 30, RULE_pattern = 31, RULE_patternEscape = 32, RULE_valueExpression = 33, + RULE_primaryExpression = 34, RULE_builtinDateTimeFunction = 35, RULE_castExpression = 36, + RULE_castTemplate = 37, RULE_convertTemplate = 38, RULE_extractExpression = 39, + RULE_extractTemplate = 40, RULE_functionExpression = 41, RULE_functionTemplate = 42, + RULE_functionName = 43, RULE_constant = 44, RULE_comparisonOperator = 45, + RULE_booleanValue = 46, RULE_interval = 47, RULE_intervalField = 48, RULE_dataType = 49, + RULE_qualifiedName = 50, RULE_identifier = 51, RULE_tableIdentifier = 52, + RULE_quoteIdentifier = 53, RULE_unquoteIdentifier = 54, RULE_number = 55, + RULE_string = 56, RULE_whenClause = 57, RULE_nonReserved = 58; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", "groupBy", "groupingElement", "groupingExpressions", "namedQuery", "setQuantifier", - "selectItem", "relation", "joinRelation", "joinType", "joinCriteria", - "relationPrimary", "expression", "booleanExpression", "matchQueryOptions", - "predicated", "predicate", "likePattern", "pattern", "patternEscape", - "valueExpression", "primaryExpression", "builtinDateTimeFunction", "castExpression", - "castTemplate", "convertTemplate", "extractExpression", "extractTemplate", - "functionExpression", "functionTemplate", "functionName", "constant", - "comparisonOperator", "booleanValue", "interval", "intervalField", "dataType", - "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", + "selectItems", "selectItem", "relation", "joinRelation", "joinType", "joinCriteria", + "relationPrimary", "pivotClause", "pivotArgs", "namedValueExpression", + "expression", "booleanExpression", "matchQueryOptions", "predicated", + "predicate", "likePattern", "pattern", "patternEscape", "valueExpression", + "primaryExpression", "builtinDateTimeFunction", "castExpression", "castTemplate", + "convertTemplate", "extractExpression", "extractTemplate", "functionExpression", + "functionTemplate", "functionName", "constant", "comparisonOperator", + "booleanValue", "interval", "intervalField", "dataType", "qualifiedName", + "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", "number", "string", "whenClause", "nonReserved" }; @@ -76,40 +79,40 @@ class SqlBaseParser extends Parser { "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", - "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", + "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FOR'", "'FORMAT'", "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", - "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", - "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", - "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", - "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", - "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", "'{TS'", "'{GUID'", - "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", - "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" + "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", "'RIGHT'", "'RLIKE'", + "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", + "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", + "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", + "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", + "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", + "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", - "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", - "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", - "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", - "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", - "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", - "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", - "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", - "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", - "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", - "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", - "WS", "UNRECOGNIZED", "DELIMITER" + "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", + "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", + "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -190,9 +193,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(110); + setState(118); statement(); - setState(111); + setState(119); match(EOF); } } @@ -237,9 +240,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(113); + setState(121); expression(); - setState(114); + setState(122); match(EOF); } } @@ -606,14 +609,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(229); + setState(237); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(116); + setState(124); query(); } break; @@ -621,27 +624,27 @@ class SqlBaseParser extends Parser { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(117); + setState(125); match(EXPLAIN); - setState(131); + setState(139); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(118); + setState(126); match(T__0); - setState(127); + setState(135); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 38)) & ~0x3f) == 0 && ((1L << (_la - 38)) & ((1L << (FORMAT - 38)) | (1L << (PLAN - 38)) | (1L << (VERIFY - 38)))) != 0)) { + while (((((_la - 39)) & ~0x3f) == 0 && ((1L << (_la - 39)) & ((1L << (FORMAT - 39)) | (1L << (PLAN - 39)) | (1L << (VERIFY - 39)))) != 0)) { { - setState(125); + setState(133); switch (_input.LA(1)) { case PLAN: { - setState(119); + setState(127); match(PLAN); - setState(120); + setState(128); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED))) != 0) || _la==OPTIMIZED || _la==PARSED) ) { @@ -653,9 +656,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(121); + setState(129); match(FORMAT); - setState(122); + setState(130); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -667,9 +670,9 @@ class SqlBaseParser extends Parser { break; case VERIFY: { - setState(123); + setState(131); match(VERIFY); - setState(124); + setState(132); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -677,16 +680,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(129); + setState(137); _errHandler.sync(this); _la = _input.LA(1); } - setState(130); + setState(138); match(T__1); } break; } - setState(133); + setState(141); statement(); } break; @@ -694,27 +697,27 @@ class SqlBaseParser extends Parser { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(134); + setState(142); match(DEBUG); - setState(146); + setState(154); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(135); + setState(143); match(T__0); - setState(142); + setState(150); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(140); + setState(148); switch (_input.LA(1)) { case PLAN: { - setState(136); + setState(144); match(PLAN); - setState(137); + setState(145); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -726,9 +729,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(138); + setState(146); match(FORMAT); - setState(139); + setState(147); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -742,16 +745,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(144); + setState(152); _errHandler.sync(this); _la = _input.LA(1); } - setState(145); + setState(153); match(T__1); } break; } - setState(148); + setState(156); statement(); } break; @@ -759,26 +762,26 @@ class SqlBaseParser extends Parser { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(149); + setState(157); match(SHOW); - setState(150); + setState(158); match(TABLES); - setState(153); + setState(161); _la = _input.LA(1); if (_la==INCLUDE) { { - setState(151); + setState(159); match(INCLUDE); - setState(152); + setState(160); match(FROZEN); } } - setState(157); + setState(165); switch (_input.LA(1)) { case LIKE: { - setState(155); + setState(163); ((ShowTablesContext)_localctx).tableLike = likePattern(); } break; @@ -808,6 +811,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -827,7 +831,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(156); + setState(164); ((ShowTablesContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -842,33 +846,33 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(159); + setState(167); match(SHOW); - setState(160); + setState(168); match(COLUMNS); - setState(163); + setState(171); _la = _input.LA(1); if (_la==INCLUDE) { { - setState(161); + setState(169); match(INCLUDE); - setState(162); + setState(170); match(FROZEN); } } - setState(165); + setState(173); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(168); + setState(176); switch (_input.LA(1)) { case LIKE: { - setState(166); + setState(174); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -898,6 +902,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -917,7 +922,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(167); + setState(175); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -930,29 +935,29 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(170); + setState(178); _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(173); + setState(181); _la = _input.LA(1); if (_la==INCLUDE) { { - setState(171); + setState(179); match(INCLUDE); - setState(172); + setState(180); match(FROZEN); } } - setState(177); + setState(185); switch (_input.LA(1)) { case LIKE: { - setState(175); + setState(183); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -982,6 +987,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -1001,7 +1007,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(176); + setState(184); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -1014,15 +1020,15 @@ class SqlBaseParser extends Parser { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(179); + setState(187); match(SHOW); - setState(180); + setState(188); match(FUNCTIONS); - setState(182); + setState(190); _la = _input.LA(1); if (_la==LIKE) { { - setState(181); + setState(189); likePattern(); } } @@ -1033,9 +1039,9 @@ class SqlBaseParser extends Parser { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(184); + setState(192); match(SHOW); - setState(185); + setState(193); match(SCHEMAS); } break; @@ -1043,58 +1049,58 @@ class SqlBaseParser extends Parser { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(186); + setState(194); match(SYS); - setState(187); + setState(195); match(TABLES); - setState(190); + setState(198); _la = _input.LA(1); if (_la==CATALOG) { { - setState(188); + setState(196); match(CATALOG); - setState(189); + setState(197); ((SysTablesContext)_localctx).clusterLike = likePattern(); } } - setState(194); + setState(202); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: { - setState(192); + setState(200); ((SysTablesContext)_localctx).tableLike = likePattern(); } break; case 2: { - setState(193); + setState(201); ((SysTablesContext)_localctx).tableIdent = tableIdentifier(); } break; } - setState(205); + setState(213); _la = _input.LA(1); if (_la==TYPE) { { - setState(196); + setState(204); match(TYPE); - setState(197); + setState(205); string(); - setState(202); + setState(210); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(198); + setState(206); match(T__2); - setState(199); + setState(207); string(); } } - setState(204); + setState(212); _errHandler.sync(this); _la = _input.LA(1); } @@ -1107,28 +1113,28 @@ class SqlBaseParser extends Parser { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(207); + setState(215); match(SYS); - setState(208); + setState(216); match(COLUMNS); - setState(211); + setState(219); _la = _input.LA(1); if (_la==CATALOG) { { - setState(209); + setState(217); match(CATALOG); - setState(210); + setState(218); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(216); + setState(224); switch (_input.LA(1)) { case TABLE: { - setState(213); + setState(221); match(TABLE); - setState(214); + setState(222); ((SysColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -1158,6 +1164,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -1177,7 +1184,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(215); + setState(223); ((SysColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -1187,11 +1194,11 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(219); + setState(227); _la = _input.LA(1); if (_la==LIKE) { { - setState(218); + setState(226); ((SysColumnsContext)_localctx).columnPattern = likePattern(); } } @@ -1202,19 +1209,19 @@ class SqlBaseParser extends Parser { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(221); + setState(229); match(SYS); - setState(222); + setState(230); match(TYPES); - setState(227); + setState(235); _la = _input.LA(1); - if (((((_la - 115)) & ~0x3f) == 0 && ((1L << (_la - 115)) & ((1L << (PLUS - 115)) | (1L << (MINUS - 115)) | (1L << (INTEGER_VALUE - 115)) | (1L << (DECIMAL_VALUE - 115)))) != 0)) { + if (((((_la - 117)) & ~0x3f) == 0 && ((1L << (_la - 117)) & ((1L << (PLUS - 117)) | (1L << (MINUS - 117)) | (1L << (INTEGER_VALUE - 117)) | (1L << (DECIMAL_VALUE - 117)))) != 0)) { { - setState(224); + setState(232); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(223); + setState(231); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -1224,7 +1231,7 @@ class SqlBaseParser extends Parser { } } - setState(226); + setState(234); ((SysTypesContext)_localctx).type = number(); } } @@ -1281,34 +1288,34 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(240); + setState(248); _la = _input.LA(1); if (_la==WITH) { { - setState(231); + setState(239); match(WITH); - setState(232); + setState(240); namedQuery(); - setState(237); + setState(245); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(233); + setState(241); match(T__2); - setState(234); + setState(242); namedQuery(); } } - setState(239); + setState(247); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(242); + setState(250); queryNoWith(); } } @@ -1364,42 +1371,42 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(244); + setState(252); queryTerm(); - setState(255); + setState(263); _la = _input.LA(1); if (_la==ORDER) { { - setState(245); + setState(253); match(ORDER); - setState(246); + setState(254); match(BY); - setState(247); + setState(255); orderBy(); - setState(252); + setState(260); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(248); + setState(256); match(T__2); - setState(249); + setState(257); orderBy(); } } - setState(254); + setState(262); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(258); + setState(266); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { - setState(257); + setState(265); limitClause(); } } @@ -1448,14 +1455,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 10, RULE_limitClause); int _la; try { - setState(265); + setState(273); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); { - setState(260); + setState(268); match(LIMIT); - setState(261); + setState(269); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1468,9 +1475,9 @@ class SqlBaseParser extends Parser { case LIMIT_ESC: enterOuterAlt(_localctx, 2); { - setState(262); + setState(270); match(LIMIT_ESC); - setState(263); + setState(271); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1478,7 +1485,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(264); + setState(272); match(ESC_END); } break; @@ -1551,13 +1558,13 @@ class SqlBaseParser extends Parser { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 12, RULE_queryTerm); try { - setState(272); + setState(280); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(267); + setState(275); querySpecification(); } break; @@ -1565,11 +1572,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(268); + setState(276); match(T__0); - setState(269); + setState(277); queryNoWith(); - setState(270); + setState(278); match(T__1); } break; @@ -1625,13 +1632,13 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(274); + setState(282); expression(); - setState(276); + setState(284); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(275); + setState(283); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1642,13 +1649,13 @@ class SqlBaseParser extends Parser { } } - setState(280); + setState(288); _la = _input.LA(1); if (_la==NULLS) { { - setState(278); + setState(286); match(NULLS); - setState(279); + setState(287); ((OrderByContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -1676,11 +1683,8 @@ class SqlBaseParser extends Parser { public BooleanExpressionContext where; public BooleanExpressionContext having; public TerminalNode SELECT() { return getToken(SqlBaseParser.SELECT, 0); } - public List selectItem() { - return getRuleContexts(SelectItemContext.class); - } - public SelectItemContext selectItem(int i) { - return getRuleContext(SelectItemContext.class,i); + public SelectItemsContext selectItems() { + return getRuleContext(SelectItemsContext.class,0); } public SetQuantifierContext setQuantifier() { return getRuleContext(SetQuantifierContext.class,0); @@ -1727,75 +1731,59 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(282); + setState(290); match(SELECT); - setState(284); + setState(292); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(283); + setState(291); setQuantifier(); } } - setState(286); - selectItem(); - setState(291); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==T__2) { - { - { - setState(287); - match(T__2); - setState(288); - selectItem(); - } - } - setState(293); - _errHandler.sync(this); - _la = _input.LA(1); - } - setState(295); + setState(294); + selectItems(); + setState(296); _la = _input.LA(1); if (_la==FROM) { { - setState(294); + setState(295); fromClause(); } } - setState(299); + setState(300); _la = _input.LA(1); if (_la==WHERE) { { - setState(297); - match(WHERE); setState(298); + match(WHERE); + setState(299); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(304); + setState(305); _la = _input.LA(1); if (_la==GROUP) { { - setState(301); - match(GROUP); setState(302); - match(BY); + match(GROUP); setState(303); + match(BY); + setState(304); groupBy(); } } - setState(308); + setState(309); _la = _input.LA(1); if (_la==HAVING) { { - setState(306); - match(HAVING); setState(307); + match(HAVING); + setState(308); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1821,6 +1809,9 @@ class SqlBaseParser extends Parser { public RelationContext relation(int i) { return getRuleContext(RelationContext.class,i); } + public PivotClauseContext pivotClause() { + return getRuleContext(PivotClauseContext.class,0); + } public FromClauseContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1847,26 +1838,35 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(310); - match(FROM); setState(311); + match(FROM); + setState(312); relation(); - setState(316); + setState(317); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(312); - match(T__2); setState(313); + match(T__2); + setState(314); relation(); } } - setState(318); + setState(319); _errHandler.sync(this); _la = _input.LA(1); } + setState(321); + _la = _input.LA(1); + if (_la==PIVOT) { + { + setState(320); + pivotClause(); + } + } + } } catch (RecognitionException re) { @@ -1916,30 +1916,30 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(320); + setState(324); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(319); + setState(323); setQuantifier(); } } - setState(322); + setState(326); groupingElement(); - setState(327); + setState(331); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(323); + setState(327); match(T__2); - setState(324); + setState(328); groupingElement(); } } - setState(329); + setState(333); _errHandler.sync(this); _la = _input.LA(1); } @@ -1994,7 +1994,7 @@ class SqlBaseParser extends Parser { _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(330); + setState(334); groupingExpressions(); } } @@ -2040,47 +2040,47 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(345); + setState(349); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(332); + setState(336); match(T__0); - setState(341); + setState(345); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)) | (1L << (IDENTIFIER - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { - setState(333); + setState(337); expression(); - setState(338); + setState(342); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(334); + setState(338); match(T__2); - setState(335); + setState(339); expression(); } } - setState(340); + setState(344); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(343); + setState(347); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(344); + setState(348); expression(); } break; @@ -2131,15 +2131,15 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(347); - ((NamedQueryContext)_localctx).name = identifier(); - setState(348); - match(AS); - setState(349); - match(T__0); - setState(350); - queryNoWith(); setState(351); + ((NamedQueryContext)_localctx).name = identifier(); + setState(352); + match(AS); + setState(353); + match(T__0); + setState(354); + queryNoWith(); + setState(355); match(T__1); } } @@ -2183,7 +2183,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(353); + setState(357); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -2203,6 +2203,70 @@ class SqlBaseParser extends Parser { return _localctx; } + public static class SelectItemsContext extends ParserRuleContext { + public List selectItem() { + return getRuleContexts(SelectItemContext.class); + } + public SelectItemContext selectItem(int i) { + return getRuleContext(SelectItemContext.class,i); + } + public SelectItemsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_selectItems; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterSelectItems(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitSelectItems(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitSelectItems(this); + else return visitor.visitChildren(this); + } + } + + public final SelectItemsContext selectItems() throws RecognitionException { + SelectItemsContext _localctx = new SelectItemsContext(_ctx, getState()); + enterRule(_localctx, 30, RULE_selectItems); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(359); + selectItem(); + setState(364); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==T__2) { + { + { + setState(360); + match(T__2); + setState(361); + selectItem(); + } + } + setState(366); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class SelectItemContext extends ParserRuleContext { public SelectItemContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2240,29 +2304,29 @@ class SqlBaseParser extends Parser { public final SelectItemContext selectItem() throws RecognitionException { SelectItemContext _localctx = new SelectItemContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_selectItem); + enterRule(_localctx, 32, RULE_selectItem); int _la; try { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(355); + setState(367); expression(); - setState(360); + setState(372); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(357); + setState(369); _la = _input.LA(1); if (_la==AS) { { - setState(356); + setState(368); match(AS); } } - setState(359); + setState(371); identifier(); } break; @@ -2311,24 +2375,24 @@ class SqlBaseParser extends Parser { public final RelationContext relation() throws RecognitionException { RelationContext _localctx = new RelationContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_relation); + enterRule(_localctx, 34, RULE_relation); int _la; try { enterOuterAlt(_localctx, 1); { - setState(362); + setState(374); relationPrimary(); - setState(366); + setState(378); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 41)) & ~0x3f) == 0 && ((1L << (_la - 41)) & ((1L << (FULL - 41)) | (1L << (INNER - 41)) | (1L << (JOIN - 41)) | (1L << (LEFT - 41)) | (1L << (NATURAL - 41)) | (1L << (RIGHT - 41)))) != 0)) { + while (((((_la - 42)) & ~0x3f) == 0 && ((1L << (_la - 42)) & ((1L << (FULL - 42)) | (1L << (INNER - 42)) | (1L << (JOIN - 42)) | (1L << (LEFT - 42)) | (1L << (NATURAL - 42)) | (1L << (RIGHT - 42)))) != 0)) { { { - setState(363); + setState(375); joinRelation(); } } - setState(368); + setState(380); _errHandler.sync(this); _la = _input.LA(1); } @@ -2379,10 +2443,10 @@ class SqlBaseParser extends Parser { public final JoinRelationContext joinRelation() throws RecognitionException { JoinRelationContext _localctx = new JoinRelationContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_joinRelation); + enterRule(_localctx, 36, RULE_joinRelation); int _la; try { - setState(380); + setState(392); switch (_input.LA(1)) { case FULL: case INNER: @@ -2392,18 +2456,18 @@ class SqlBaseParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(369); + setState(381); joinType(); } - setState(370); + setState(382); match(JOIN); - setState(371); + setState(383); ((JoinRelationContext)_localctx).right = relationPrimary(); - setState(373); + setState(385); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(372); + setState(384); joinCriteria(); } } @@ -2413,13 +2477,13 @@ class SqlBaseParser extends Parser { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(375); + setState(387); match(NATURAL); - setState(376); + setState(388); joinType(); - setState(377); + setState(389); match(JOIN); - setState(378); + setState(390); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2465,20 +2529,20 @@ class SqlBaseParser extends Parser { public final JoinTypeContext joinType() throws RecognitionException { JoinTypeContext _localctx = new JoinTypeContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_joinType); + enterRule(_localctx, 38, RULE_joinType); int _la; try { - setState(397); + setState(409); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(383); + setState(395); _la = _input.LA(1); if (_la==INNER) { { - setState(382); + setState(394); match(INNER); } } @@ -2488,13 +2552,13 @@ class SqlBaseParser extends Parser { case LEFT: enterOuterAlt(_localctx, 2); { - setState(385); + setState(397); match(LEFT); - setState(387); + setState(399); _la = _input.LA(1); if (_la==OUTER) { { - setState(386); + setState(398); match(OUTER); } } @@ -2504,13 +2568,13 @@ class SqlBaseParser extends Parser { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(389); + setState(401); match(RIGHT); - setState(391); + setState(403); _la = _input.LA(1); if (_la==OUTER) { { - setState(390); + setState(402); match(OUTER); } } @@ -2520,13 +2584,13 @@ class SqlBaseParser extends Parser { case FULL: enterOuterAlt(_localctx, 4); { - setState(393); + setState(405); match(FULL); - setState(395); + setState(407); _la = _input.LA(1); if (_la==OUTER) { { - setState(394); + setState(406); match(OUTER); } } @@ -2581,46 +2645,46 @@ class SqlBaseParser extends Parser { public final JoinCriteriaContext joinCriteria() throws RecognitionException { JoinCriteriaContext _localctx = new JoinCriteriaContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_joinCriteria); + enterRule(_localctx, 40, RULE_joinCriteria); int _la; try { - setState(413); + setState(425); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(399); + setState(411); match(ON); - setState(400); + setState(412); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(401); + setState(413); match(USING); - setState(402); + setState(414); match(T__0); - setState(403); + setState(415); identifier(); - setState(408); + setState(420); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(404); + setState(416); match(T__2); - setState(405); + setState(417); identifier(); } } - setState(410); + setState(422); _errHandler.sync(this); _la = _input.LA(1); } - setState(411); + setState(423); match(T__1); } break; @@ -2723,42 +2787,42 @@ class SqlBaseParser extends Parser { public final RelationPrimaryContext relationPrimary() throws RecognitionException { RelationPrimaryContext _localctx = new RelationPrimaryContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_relationPrimary); + enterRule(_localctx, 42, RULE_relationPrimary); int _la; try { - setState(443); + setState(455); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,63,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(416); + setState(428); _la = _input.LA(1); if (_la==FROZEN) { { - setState(415); + setState(427); match(FROZEN); } } - setState(418); + setState(430); tableIdentifier(); - setState(423); + setState(435); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: { - setState(420); + setState(432); _la = _input.LA(1); if (_la==AS) { { - setState(419); + setState(431); match(AS); } } - setState(422); + setState(434); qualifiedName(); } break; @@ -2769,27 +2833,27 @@ class SqlBaseParser extends Parser { _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(425); + setState(437); match(T__0); - setState(426); + setState(438); queryNoWith(); - setState(427); + setState(439); match(T__1); - setState(432); + setState(444); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,61,_ctx) ) { case 1: { - setState(429); + setState(441); _la = _input.LA(1); if (_la==AS) { { - setState(428); + setState(440); match(AS); } } - setState(431); + setState(443); qualifiedName(); } break; @@ -2800,27 +2864,27 @@ class SqlBaseParser extends Parser { _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(434); + setState(446); match(T__0); - setState(435); + setState(447); relation(); - setState(436); + setState(448); match(T__1); - setState(441); + setState(453); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,62,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,63,_ctx) ) { case 1: { - setState(438); + setState(450); _la = _input.LA(1); if (_la==AS) { { - setState(437); + setState(449); match(AS); } } - setState(440); + setState(452); qualifiedName(); } break; @@ -2840,6 +2904,211 @@ class SqlBaseParser extends Parser { return _localctx; } + public static class PivotClauseContext extends ParserRuleContext { + public PivotArgsContext aggs; + public QualifiedNameContext column; + public PivotArgsContext vals; + public TerminalNode PIVOT() { return getToken(SqlBaseParser.PIVOT, 0); } + public TerminalNode FOR() { return getToken(SqlBaseParser.FOR, 0); } + public TerminalNode IN() { return getToken(SqlBaseParser.IN, 0); } + public List pivotArgs() { + return getRuleContexts(PivotArgsContext.class); + } + public PivotArgsContext pivotArgs(int i) { + return getRuleContext(PivotArgsContext.class,i); + } + public QualifiedNameContext qualifiedName() { + return getRuleContext(QualifiedNameContext.class,0); + } + public PivotClauseContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_pivotClause; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterPivotClause(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitPivotClause(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitPivotClause(this); + else return visitor.visitChildren(this); + } + } + + public final PivotClauseContext pivotClause() throws RecognitionException { + PivotClauseContext _localctx = new PivotClauseContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_pivotClause); + try { + enterOuterAlt(_localctx, 1); + { + setState(457); + match(PIVOT); + setState(458); + match(T__0); + setState(459); + ((PivotClauseContext)_localctx).aggs = pivotArgs(); + setState(460); + match(FOR); + setState(461); + ((PivotClauseContext)_localctx).column = qualifiedName(); + setState(462); + match(IN); + setState(463); + match(T__0); + setState(464); + ((PivotClauseContext)_localctx).vals = pivotArgs(); + setState(465); + match(T__1); + setState(466); + match(T__1); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class PivotArgsContext extends ParserRuleContext { + public List namedValueExpression() { + return getRuleContexts(NamedValueExpressionContext.class); + } + public NamedValueExpressionContext namedValueExpression(int i) { + return getRuleContext(NamedValueExpressionContext.class,i); + } + public PivotArgsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_pivotArgs; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterPivotArgs(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitPivotArgs(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitPivotArgs(this); + else return visitor.visitChildren(this); + } + } + + public final PivotArgsContext pivotArgs() throws RecognitionException { + PivotArgsContext _localctx = new PivotArgsContext(_ctx, getState()); + enterRule(_localctx, 46, RULE_pivotArgs); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(468); + namedValueExpression(); + setState(473); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==T__2) { + { + { + setState(469); + match(T__2); + setState(470); + namedValueExpression(); + } + } + setState(475); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class NamedValueExpressionContext extends ParserRuleContext { + public ValueExpressionContext valueExpression() { + return getRuleContext(ValueExpressionContext.class,0); + } + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } + public TerminalNode AS() { return getToken(SqlBaseParser.AS, 0); } + public NamedValueExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_namedValueExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterNamedValueExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitNamedValueExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitNamedValueExpression(this); + else return visitor.visitChildren(this); + } + } + + public final NamedValueExpressionContext namedValueExpression() throws RecognitionException { + NamedValueExpressionContext _localctx = new NamedValueExpressionContext(_ctx, getState()); + enterRule(_localctx, 48, RULE_namedValueExpression); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(476); + valueExpression(0); + setState(481); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)) | (1L << (BACKQUOTED_IDENTIFIER - 70)))) != 0)) { + { + setState(478); + _la = _input.LA(1); + if (_la==AS) { + { + setState(477); + match(AS); + } + } + + setState(480); + identifier(); + } + } + + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class ExpressionContext extends ParserRuleContext { public BooleanExpressionContext booleanExpression() { return getRuleContext(BooleanExpressionContext.class,0); @@ -2865,11 +3134,11 @@ class SqlBaseParser extends Parser { public final ExpressionContext expression() throws RecognitionException { ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_expression); + enterRule(_localctx, 50, RULE_expression); try { enterOuterAlt(_localctx, 1); { - setState(445); + setState(483); booleanExpression(0); } } @@ -3071,24 +3340,24 @@ class SqlBaseParser extends Parser { int _parentState = getState(); BooleanExpressionContext _localctx = new BooleanExpressionContext(_ctx, _parentState); BooleanExpressionContext _prevctx = _localctx; - int _startState = 44; - enterRecursionRule(_localctx, 44, RULE_booleanExpression, _p); + int _startState = 52; + enterRecursionRule(_localctx, 52, RULE_booleanExpression, _p); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(478); + setState(516); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,68,_ctx) ) { case 1: { _localctx = new LogicalNotContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(448); + setState(486); match(NOT); - setState(449); + setState(487); booleanExpression(8); } break; @@ -3097,13 +3366,13 @@ class SqlBaseParser extends Parser { _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(450); + setState(488); match(EXISTS); - setState(451); + setState(489); match(T__0); - setState(452); + setState(490); query(); - setState(453); + setState(491); match(T__1); } break; @@ -3112,15 +3381,15 @@ class SqlBaseParser extends Parser { _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(455); + setState(493); match(QUERY); - setState(456); + setState(494); match(T__0); - setState(457); + setState(495); ((StringQueryContext)_localctx).queryString = string(); - setState(458); + setState(496); matchQueryOptions(); - setState(459); + setState(497); match(T__1); } break; @@ -3129,19 +3398,19 @@ class SqlBaseParser extends Parser { _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(461); + setState(499); match(MATCH); - setState(462); + setState(500); match(T__0); - setState(463); + setState(501); ((MatchQueryContext)_localctx).singleField = qualifiedName(); - setState(464); + setState(502); match(T__2); - setState(465); + setState(503); ((MatchQueryContext)_localctx).queryString = string(); - setState(466); + setState(504); matchQueryOptions(); - setState(467); + setState(505); match(T__1); } break; @@ -3150,19 +3419,19 @@ class SqlBaseParser extends Parser { _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(469); + setState(507); match(MATCH); - setState(470); + setState(508); match(T__0); - setState(471); + setState(509); ((MultiMatchQueryContext)_localctx).multiFields = string(); - setState(472); + setState(510); match(T__2); - setState(473); + setState(511); ((MultiMatchQueryContext)_localctx).queryString = string(); - setState(474); + setState(512); matchQueryOptions(); - setState(475); + setState(513); match(T__1); } break; @@ -3171,33 +3440,33 @@ class SqlBaseParser extends Parser { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(477); + setState(515); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(488); + setState(526); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,66,_ctx); + _alt = getInterpreter().adaptivePredict(_input,70,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(486); + setState(524); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,65,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,69,_ctx) ) { case 1: { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(480); + setState(518); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(481); + setState(519); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(482); + setState(520); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -3206,20 +3475,20 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(483); + setState(521); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(484); + setState(522); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(485); + setState(523); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(490); + setState(528); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,66,_ctx); + _alt = getInterpreter().adaptivePredict(_input,70,_ctx); } } } @@ -3262,24 +3531,24 @@ class SqlBaseParser extends Parser { public final MatchQueryOptionsContext matchQueryOptions() throws RecognitionException { MatchQueryOptionsContext _localctx = new MatchQueryOptionsContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_matchQueryOptions); + enterRule(_localctx, 54, RULE_matchQueryOptions); int _la; try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(533); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(491); + setState(529); match(T__2); - setState(492); + setState(530); string(); } } - setState(497); + setState(535); _errHandler.sync(this); _la = _input.LA(1); } @@ -3324,18 +3593,18 @@ class SqlBaseParser extends Parser { public final PredicatedContext predicated() throws RecognitionException { PredicatedContext _localctx = new PredicatedContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_predicated); + enterRule(_localctx, 56, RULE_predicated); try { enterOuterAlt(_localctx, 1); { - setState(498); + setState(536); valueExpression(0); - setState(500); + setState(538); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,68,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,72,_ctx) ) { case 1: { - setState(499); + setState(537); predicate(); } break; @@ -3402,145 +3671,145 @@ class SqlBaseParser extends Parser { public final PredicateContext predicate() throws RecognitionException { PredicateContext _localctx = new PredicateContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_predicate); + enterRule(_localctx, 58, RULE_predicate); int _la; try { - setState(548); + setState(586); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,80,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(503); + setState(541); _la = _input.LA(1); if (_la==NOT) { { - setState(502); + setState(540); match(NOT); } } - setState(505); + setState(543); ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(506); + setState(544); ((PredicateContext)_localctx).lower = valueExpression(0); - setState(507); + setState(545); match(AND); - setState(508); + setState(546); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(511); + setState(549); _la = _input.LA(1); if (_la==NOT) { { - setState(510); + setState(548); match(NOT); } } - setState(513); + setState(551); ((PredicateContext)_localctx).kind = match(IN); - setState(514); + setState(552); match(T__0); - setState(515); + setState(553); valueExpression(0); - setState(520); + setState(558); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(516); + setState(554); match(T__2); - setState(517); + setState(555); valueExpression(0); } } - setState(522); + setState(560); _errHandler.sync(this); _la = _input.LA(1); } - setState(523); + setState(561); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(526); + setState(564); _la = _input.LA(1); if (_la==NOT) { { - setState(525); + setState(563); match(NOT); } } - setState(528); + setState(566); ((PredicateContext)_localctx).kind = match(IN); - setState(529); + setState(567); match(T__0); - setState(530); + setState(568); query(); - setState(531); + setState(569); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(534); + setState(572); _la = _input.LA(1); if (_la==NOT) { { - setState(533); + setState(571); match(NOT); } } - setState(536); + setState(574); ((PredicateContext)_localctx).kind = match(LIKE); - setState(537); + setState(575); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(539); + setState(577); _la = _input.LA(1); if (_la==NOT) { { - setState(538); + setState(576); match(NOT); } } - setState(541); + setState(579); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(542); + setState(580); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(543); + setState(581); match(IS); - setState(545); + setState(583); _la = _input.LA(1); if (_la==NOT) { { - setState(544); + setState(582); match(NOT); } } - setState(547); + setState(585); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3583,13 +3852,13 @@ class SqlBaseParser extends Parser { public final LikePatternContext likePattern() throws RecognitionException { LikePatternContext _localctx = new LikePatternContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_likePattern); + enterRule(_localctx, 60, RULE_likePattern); try { enterOuterAlt(_localctx, 1); { - setState(550); + setState(588); match(LIKE); - setState(551); + setState(589); pattern(); } } @@ -3633,18 +3902,18 @@ class SqlBaseParser extends Parser { public final PatternContext pattern() throws RecognitionException { PatternContext _localctx = new PatternContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_pattern); + enterRule(_localctx, 62, RULE_pattern); try { enterOuterAlt(_localctx, 1); { - setState(553); + setState(591); ((PatternContext)_localctx).value = string(); - setState(555); + setState(593); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,77,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,81,_ctx) ) { case 1: { - setState(554); + setState(592); patternEscape(); } break; @@ -3690,27 +3959,27 @@ class SqlBaseParser extends Parser { public final PatternEscapeContext patternEscape() throws RecognitionException { PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_patternEscape); + enterRule(_localctx, 64, RULE_patternEscape); try { - setState(563); + setState(601); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); { - setState(557); + setState(595); match(ESCAPE); - setState(558); + setState(596); ((PatternEscapeContext)_localctx).escape = string(); } break; case ESCAPE_ESC: enterOuterAlt(_localctx, 2); { - setState(559); + setState(597); match(ESCAPE_ESC); - setState(560); + setState(598); ((PatternEscapeContext)_localctx).escape = string(); - setState(561); + setState(599); match(ESC_END); } break; @@ -3848,14 +4117,14 @@ class SqlBaseParser extends Parser { int _parentState = getState(); ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, _parentState); ValueExpressionContext _prevctx = _localctx; - int _startState = 58; - enterRecursionRule(_localctx, 58, RULE_valueExpression, _p); + int _startState = 66; + enterRecursionRule(_localctx, 66, RULE_valueExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(569); + setState(607); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3891,6 +4160,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RIGHT: case RLIKE: @@ -3925,7 +4195,7 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(566); + setState(604); primaryExpression(0); } break; @@ -3935,7 +4205,7 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(567); + setState(605); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3943,7 +4213,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(568); + setState(606); valueExpression(4); } break; @@ -3951,33 +4221,33 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(583); + setState(621); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,81,_ctx); + _alt = getInterpreter().adaptivePredict(_input,85,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(581); + setState(619); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,80,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,84,_ctx) ) { case 1: { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(571); + setState(609); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(572); + setState(610); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 117)) & ~0x3f) == 0 && ((1L << (_la - 117)) & ((1L << (ASTERISK - 117)) | (1L << (SLASH - 117)) | (1L << (PERCENT - 117)))) != 0)) ) { + if ( !(((((_la - 119)) & ~0x3f) == 0 && ((1L << (_la - 119)) & ((1L << (ASTERISK - 119)) | (1L << (SLASH - 119)) | (1L << (PERCENT - 119)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { consume(); } - setState(573); + setState(611); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3986,9 +4256,9 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(574); + setState(612); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(575); + setState(613); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3996,7 +4266,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(576); + setState(614); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -4005,20 +4275,20 @@ class SqlBaseParser extends Parser { _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(577); + setState(615); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(578); + setState(616); comparisonOperator(); - setState(579); + setState(617); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(585); + setState(623); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,81,_ctx); + _alt = getInterpreter().adaptivePredict(_input,85,_ctx); } } } @@ -4283,23 +4553,23 @@ class SqlBaseParser extends Parser { int _parentState = getState(); PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, _parentState); PrimaryExpressionContext _prevctx = _localctx; - int _startState = 60; - enterRecursionRule(_localctx, 60, RULE_primaryExpression, _p); + int _startState = 68; + enterRecursionRule(_localctx, 68, RULE_primaryExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(622); + setState(660); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,86,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,90,_ctx) ) { case 1: { _localctx = new CastContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(587); + setState(625); castExpression(); } break; @@ -4308,7 +4578,7 @@ class SqlBaseParser extends Parser { _localctx = new ExtractContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(588); + setState(626); extractExpression(); } break; @@ -4317,7 +4587,7 @@ class SqlBaseParser extends Parser { _localctx = new CurrentDateTimeFunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(589); + setState(627); builtinDateTimeFunction(); } break; @@ -4326,7 +4596,7 @@ class SqlBaseParser extends Parser { _localctx = new ConstantDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(590); + setState(628); constant(); } break; @@ -4335,18 +4605,18 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(594); + setState(632); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)) | (1L << (IDENTIFIER - 69)) | (1L << (DIGIT_IDENTIFIER - 69)) | (1L << (QUOTED_IDENTIFIER - 69)) | (1L << (BACKQUOTED_IDENTIFIER - 69)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)) | (1L << (BACKQUOTED_IDENTIFIER - 70)))) != 0)) { { - setState(591); + setState(629); qualifiedName(); - setState(592); + setState(630); match(DOT); } } - setState(596); + setState(634); match(ASTERISK); } break; @@ -4355,7 +4625,7 @@ class SqlBaseParser extends Parser { _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(597); + setState(635); functionExpression(); } break; @@ -4364,11 +4634,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(598); + setState(636); match(T__0); - setState(599); + setState(637); query(); - setState(600); + setState(638); match(T__1); } break; @@ -4377,7 +4647,7 @@ class SqlBaseParser extends Parser { _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(602); + setState(640); qualifiedName(); } break; @@ -4386,11 +4656,11 @@ class SqlBaseParser extends Parser { _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(603); + setState(641); match(T__0); - setState(604); + setState(642); expression(); - setState(605); + setState(643); match(T__1); } break; @@ -4399,51 +4669,51 @@ class SqlBaseParser extends Parser { _localctx = new CaseContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(607); + setState(645); match(CASE); - setState(609); + setState(647); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)) | (1L << (IDENTIFIER - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { - setState(608); + setState(646); ((CaseContext)_localctx).operand = booleanExpression(0); } } - setState(612); + setState(650); _errHandler.sync(this); _la = _input.LA(1); do { { { - setState(611); + setState(649); whenClause(); } } - setState(614); + setState(652); _errHandler.sync(this); _la = _input.LA(1); } while ( _la==WHEN ); - setState(618); + setState(656); _la = _input.LA(1); if (_la==ELSE) { { - setState(616); + setState(654); match(ELSE); - setState(617); + setState(655); ((CaseContext)_localctx).elseClause = booleanExpression(0); } } - setState(620); + setState(658); match(END); } break; } _ctx.stop = _input.LT(-1); - setState(629); + setState(667); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,87,_ctx); + _alt = getInterpreter().adaptivePredict(_input,91,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); @@ -4452,18 +4722,18 @@ class SqlBaseParser extends Parser { { _localctx = new CastOperatorExpressionContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(624); + setState(662); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(625); + setState(663); match(CAST_OP); - setState(626); + setState(664); dataType(); } } } - setState(631); + setState(669); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,87,_ctx); + _alt = getInterpreter().adaptivePredict(_input,91,_ctx); } } } @@ -4504,28 +4774,28 @@ class SqlBaseParser extends Parser { public final BuiltinDateTimeFunctionContext builtinDateTimeFunction() throws RecognitionException { BuiltinDateTimeFunctionContext _localctx = new BuiltinDateTimeFunctionContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_builtinDateTimeFunction); + enterRule(_localctx, 70, RULE_builtinDateTimeFunction); try { - setState(635); + setState(673); switch (_input.LA(1)) { case CURRENT_TIMESTAMP: enterOuterAlt(_localctx, 1); { - setState(632); + setState(670); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIMESTAMP); } break; case CURRENT_DATE: enterOuterAlt(_localctx, 2); { - setState(633); + setState(671); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_DATE); } break; case CURRENT_TIME: enterOuterAlt(_localctx, 3); { - setState(634); + setState(672); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIME); } break; @@ -4574,44 +4844,44 @@ class SqlBaseParser extends Parser { public final CastExpressionContext castExpression() throws RecognitionException { CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_castExpression); + enterRule(_localctx, 72, RULE_castExpression); try { - setState(647); + setState(685); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,89,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,93,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(637); + setState(675); castTemplate(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(638); + setState(676); match(FUNCTION_ESC); - setState(639); + setState(677); castTemplate(); - setState(640); + setState(678); match(ESC_END); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(642); + setState(680); convertTemplate(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(643); + setState(681); match(FUNCTION_ESC); - setState(644); + setState(682); convertTemplate(); - setState(645); + setState(683); match(ESC_END); } break; @@ -4658,21 +4928,21 @@ class SqlBaseParser extends Parser { public final CastTemplateContext castTemplate() throws RecognitionException { CastTemplateContext _localctx = new CastTemplateContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_castTemplate); + enterRule(_localctx, 74, RULE_castTemplate); try { enterOuterAlt(_localctx, 1); { - setState(649); + setState(687); match(CAST); - setState(650); + setState(688); match(T__0); - setState(651); + setState(689); expression(); - setState(652); + setState(690); match(AS); - setState(653); + setState(691); dataType(); - setState(654); + setState(692); match(T__1); } } @@ -4716,21 +4986,21 @@ class SqlBaseParser extends Parser { public final ConvertTemplateContext convertTemplate() throws RecognitionException { ConvertTemplateContext _localctx = new ConvertTemplateContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_convertTemplate); + enterRule(_localctx, 76, RULE_convertTemplate); try { enterOuterAlt(_localctx, 1); { - setState(656); + setState(694); match(CONVERT); - setState(657); + setState(695); match(T__0); - setState(658); + setState(696); expression(); - setState(659); + setState(697); match(T__2); - setState(660); + setState(698); dataType(); - setState(661); + setState(699); match(T__1); } } @@ -4772,25 +5042,25 @@ class SqlBaseParser extends Parser { public final ExtractExpressionContext extractExpression() throws RecognitionException { ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_extractExpression); + enterRule(_localctx, 78, RULE_extractExpression); try { - setState(668); + setState(706); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(663); + setState(701); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(664); + setState(702); match(FUNCTION_ESC); - setState(665); + setState(703); extractTemplate(); - setState(666); + setState(704); match(ESC_END); } break; @@ -4840,21 +5110,21 @@ class SqlBaseParser extends Parser { public final ExtractTemplateContext extractTemplate() throws RecognitionException { ExtractTemplateContext _localctx = new ExtractTemplateContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_extractTemplate); + enterRule(_localctx, 80, RULE_extractTemplate); try { enterOuterAlt(_localctx, 1); { - setState(670); + setState(708); match(EXTRACT); - setState(671); + setState(709); match(T__0); - setState(672); + setState(710); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(673); + setState(711); match(FROM); - setState(674); + setState(712); valueExpression(0); - setState(675); + setState(713); match(T__1); } } @@ -4895,9 +5165,9 @@ class SqlBaseParser extends Parser { public final FunctionExpressionContext functionExpression() throws RecognitionException { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_functionExpression); + enterRule(_localctx, 82, RULE_functionExpression); try { - setState(682); + setState(720); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4926,6 +5196,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RIGHT: case RLIKE: @@ -4946,18 +5217,18 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(677); + setState(715); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(678); + setState(716); match(FUNCTION_ESC); - setState(679); + setState(717); functionTemplate(); - setState(680); + setState(718); match(ESC_END); } break; @@ -5010,50 +5281,50 @@ class SqlBaseParser extends Parser { public final FunctionTemplateContext functionTemplate() throws RecognitionException { FunctionTemplateContext _localctx = new FunctionTemplateContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_functionTemplate); + enterRule(_localctx, 84, RULE_functionTemplate); int _la; try { enterOuterAlt(_localctx, 1); { - setState(684); + setState(722); functionName(); - setState(685); + setState(723); match(T__0); - setState(697); + setState(735); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)) | (1L << (IDENTIFIER - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { - setState(687); + setState(725); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(686); + setState(724); setQuantifier(); } } - setState(689); + setState(727); expression(); - setState(694); + setState(732); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(690); + setState(728); match(T__2); - setState(691); + setState(729); expression(); } } - setState(696); + setState(734); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(699); + setState(737); match(T__1); } } @@ -5095,21 +5366,21 @@ class SqlBaseParser extends Parser { public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_functionName); + enterRule(_localctx, 86, RULE_functionName); try { - setState(704); + setState(742); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); { - setState(701); + setState(739); match(LEFT); } break; case RIGHT: enterOuterAlt(_localctx, 2); { - setState(702); + setState(740); match(RIGHT); } break; @@ -5139,6 +5410,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -5158,7 +5430,7 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 3); { - setState(703); + setState(741); identifier(); } break; @@ -5386,16 +5658,16 @@ class SqlBaseParser extends Parser { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_constant); + enterRule(_localctx, 88, RULE_constant); try { int _alt; - setState(732); + setState(770); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(706); + setState(744); match(NULL); } break; @@ -5403,7 +5675,7 @@ class SqlBaseParser extends Parser { _localctx = new IntervalLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(707); + setState(745); interval(); } break; @@ -5412,7 +5684,7 @@ class SqlBaseParser extends Parser { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(708); + setState(746); number(); } break; @@ -5421,7 +5693,7 @@ class SqlBaseParser extends Parser { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(709); + setState(747); booleanValue(); } break; @@ -5429,7 +5701,7 @@ class SqlBaseParser extends Parser { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(711); + setState(749); _errHandler.sync(this); _alt = 1; do { @@ -5437,7 +5709,7 @@ class SqlBaseParser extends Parser { case 1: { { - setState(710); + setState(748); match(STRING); } } @@ -5445,9 +5717,9 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(713); + setState(751); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,96,_ctx); + _alt = getInterpreter().adaptivePredict(_input,100,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -5455,7 +5727,7 @@ class SqlBaseParser extends Parser { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(715); + setState(753); match(PARAM); } break; @@ -5463,11 +5735,11 @@ class SqlBaseParser extends Parser { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(716); + setState(754); match(DATE_ESC); - setState(717); + setState(755); string(); - setState(718); + setState(756); match(ESC_END); } break; @@ -5475,11 +5747,11 @@ class SqlBaseParser extends Parser { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(720); + setState(758); match(TIME_ESC); - setState(721); + setState(759); string(); - setState(722); + setState(760); match(ESC_END); } break; @@ -5487,11 +5759,11 @@ class SqlBaseParser extends Parser { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(724); + setState(762); match(TIMESTAMP_ESC); - setState(725); + setState(763); string(); - setState(726); + setState(764); match(ESC_END); } break; @@ -5499,11 +5771,11 @@ class SqlBaseParser extends Parser { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(728); + setState(766); match(GUID_ESC); - setState(729); + setState(767); string(); - setState(730); + setState(768); match(ESC_END); } break; @@ -5551,14 +5823,14 @@ class SqlBaseParser extends Parser { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_comparisonOperator); + enterRule(_localctx, 90, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(734); + setState(772); _la = _input.LA(1); - if ( !(((((_la - 108)) & ~0x3f) == 0 && ((1L << (_la - 108)) & ((1L << (EQ - 108)) | (1L << (NULLEQ - 108)) | (1L << (NEQ - 108)) | (1L << (LT - 108)) | (1L << (LTE - 108)) | (1L << (GT - 108)) | (1L << (GTE - 108)))) != 0)) ) { + if ( !(((((_la - 110)) & ~0x3f) == 0 && ((1L << (_la - 110)) & ((1L << (EQ - 110)) | (1L << (NULLEQ - 110)) | (1L << (NEQ - 110)) | (1L << (LT - 110)) | (1L << (LTE - 110)) | (1L << (GT - 110)) | (1L << (GTE - 110)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5600,12 +5872,12 @@ class SqlBaseParser extends Parser { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_booleanValue); + enterRule(_localctx, 92, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(736); + setState(774); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -5668,18 +5940,18 @@ class SqlBaseParser extends Parser { public final IntervalContext interval() throws RecognitionException { IntervalContext _localctx = new IntervalContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_interval); + enterRule(_localctx, 94, RULE_interval); int _la; try { enterOuterAlt(_localctx, 1); { - setState(738); + setState(776); match(INTERVAL); - setState(740); + setState(778); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(739); + setState(777); ((IntervalContext)_localctx).sign = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -5690,35 +5962,35 @@ class SqlBaseParser extends Parser { } } - setState(744); + setState(782); switch (_input.LA(1)) { case INTEGER_VALUE: case DECIMAL_VALUE: { - setState(742); + setState(780); ((IntervalContext)_localctx).valueNumeric = number(); } break; case PARAM: case STRING: { - setState(743); + setState(781); ((IntervalContext)_localctx).valuePattern = string(); } break; default: throw new NoViableAltException(this); } - setState(746); + setState(784); ((IntervalContext)_localctx).leading = intervalField(); - setState(749); + setState(787); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,100,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,104,_ctx) ) { case 1: { - setState(747); + setState(785); match(TO); - setState(748); + setState(786); ((IntervalContext)_localctx).trailing = intervalField(); } break; @@ -5770,14 +6042,14 @@ class SqlBaseParser extends Parser { public final IntervalFieldContext intervalField() throws RecognitionException { IntervalFieldContext _localctx = new IntervalFieldContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_intervalField); + enterRule(_localctx, 96, RULE_intervalField); int _la; try { enterOuterAlt(_localctx, 1); { - setState(751); + setState(789); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH) | (1L << MONTHS))) != 0) || ((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (SECOND - 80)) | (1L << (SECONDS - 80)) | (1L << (YEAR - 80)) | (1L << (YEARS - 80)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (MONTHS - 64)) | (1L << (SECOND - 64)) | (1L << (SECONDS - 64)) | (1L << (YEAR - 64)) | (1L << (YEARS - 64)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5828,12 +6100,12 @@ class SqlBaseParser extends Parser { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_dataType); + enterRule(_localctx, 98, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(753); + setState(791); identifier(); } } @@ -5880,30 +6152,30 @@ class SqlBaseParser extends Parser { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_qualifiedName); + enterRule(_localctx, 100, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(760); + setState(798); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,101,_ctx); + _alt = getInterpreter().adaptivePredict(_input,105,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(755); + setState(793); identifier(); - setState(756); + setState(794); match(DOT); } } } - setState(762); + setState(800); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,101,_ctx); + _alt = getInterpreter().adaptivePredict(_input,105,_ctx); } - setState(763); + setState(801); identifier(); } } @@ -5946,15 +6218,15 @@ class SqlBaseParser extends Parser { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_identifier); + enterRule(_localctx, 102, RULE_identifier); try { - setState(767); + setState(805); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(765); + setState(803); quoteIdentifier(); } break; @@ -5984,6 +6256,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -6001,7 +6274,7 @@ class SqlBaseParser extends Parser { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(766); + setState(804); unquoteIdentifier(); } break; @@ -6051,46 +6324,46 @@ class SqlBaseParser extends Parser { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_tableIdentifier); + enterRule(_localctx, 104, RULE_tableIdentifier); int _la; try { - setState(781); + setState(819); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,105,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,109,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(772); + setState(810); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)) | (1L << (IDENTIFIER - 69)) | (1L << (DIGIT_IDENTIFIER - 69)) | (1L << (QUOTED_IDENTIFIER - 69)) | (1L << (BACKQUOTED_IDENTIFIER - 69)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)) | (1L << (BACKQUOTED_IDENTIFIER - 70)))) != 0)) { { - setState(769); + setState(807); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(770); + setState(808); match(T__3); } } - setState(774); + setState(812); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(778); + setState(816); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,104,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,108,_ctx) ) { case 1: { - setState(775); + setState(813); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(776); + setState(814); match(T__3); } break; } - setState(780); + setState(818); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -6155,15 +6428,15 @@ class SqlBaseParser extends Parser { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_quoteIdentifier); + enterRule(_localctx, 106, RULE_quoteIdentifier); try { - setState(785); + setState(823); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(783); + setState(821); match(QUOTED_IDENTIFIER); } break; @@ -6171,7 +6444,7 @@ class SqlBaseParser extends Parser { _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(784); + setState(822); match(BACKQUOTED_IDENTIFIER); } break; @@ -6241,15 +6514,15 @@ class SqlBaseParser extends Parser { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_unquoteIdentifier); + enterRule(_localctx, 108, RULE_unquoteIdentifier); try { - setState(790); + setState(828); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(787); + setState(825); match(IDENTIFIER); } break; @@ -6279,6 +6552,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -6295,7 +6569,7 @@ class SqlBaseParser extends Parser { _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(788); + setState(826); nonReserved(); } break; @@ -6303,7 +6577,7 @@ class SqlBaseParser extends Parser { _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(789); + setState(827); match(DIGIT_IDENTIFIER); } break; @@ -6370,15 +6644,15 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_number); + enterRule(_localctx, 110, RULE_number); try { - setState(794); + setState(832); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(792); + setState(830); match(DECIMAL_VALUE); } break; @@ -6386,7 +6660,7 @@ class SqlBaseParser extends Parser { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(793); + setState(831); match(INTEGER_VALUE); } break; @@ -6429,12 +6703,12 @@ class SqlBaseParser extends Parser { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_string); + enterRule(_localctx, 112, RULE_string); int _la; try { enterOuterAlt(_localctx, 1); { - setState(796); + setState(834); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -6486,17 +6760,17 @@ class SqlBaseParser extends Parser { public final WhenClauseContext whenClause() throws RecognitionException { WhenClauseContext _localctx = new WhenClauseContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_whenClause); + enterRule(_localctx, 114, RULE_whenClause); try { enterOuterAlt(_localctx, 1); { - setState(798); + setState(836); match(WHEN); - setState(799); + setState(837); ((WhenClauseContext)_localctx).condition = expression(); - setState(800); + setState(838); match(THEN); - setState(801); + setState(839); ((WhenClauseContext)_localctx).result = expression(); } } @@ -6538,6 +6812,7 @@ class SqlBaseParser extends Parser { public TerminalNode OPTIMIZED() { return getToken(SqlBaseParser.OPTIMIZED, 0); } public TerminalNode PARSED() { return getToken(SqlBaseParser.PARSED, 0); } public TerminalNode PHYSICAL() { return getToken(SqlBaseParser.PHYSICAL, 0); } + public TerminalNode PIVOT() { return getToken(SqlBaseParser.PIVOT, 0); } public TerminalNode PLAN() { return getToken(SqlBaseParser.PLAN, 0); } public TerminalNode QUERY() { return getToken(SqlBaseParser.QUERY, 0); } public TerminalNode RLIKE() { return getToken(SqlBaseParser.RLIKE, 0); } @@ -6572,14 +6847,14 @@ class SqlBaseParser extends Parser { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_nonReserved); + enterRule(_localctx, 116, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(803); + setState(841); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -6599,11 +6874,11 @@ class SqlBaseParser extends Parser { public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 22: + case 26: return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex); - case 29: + case 33: return valueExpression_sempred((ValueExpressionContext)_localctx, predIndex); - case 30: + case 34: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); } return true; @@ -6637,328 +6912,341 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u008a\u0328\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u008c\u034e\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+ - "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0080\n\4\f\4\16\4\u0083\13\4\3\4\5"+ - "\4\u0086\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u008f\n\4\f\4\16\4\u0092"+ - "\13\4\3\4\5\4\u0095\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u009c\n\4\3\4\3\4\5\4"+ - "\u00a0\n\4\3\4\3\4\3\4\3\4\5\4\u00a6\n\4\3\4\3\4\3\4\5\4\u00ab\n\4\3\4"+ - "\3\4\3\4\5\4\u00b0\n\4\3\4\3\4\5\4\u00b4\n\4\3\4\3\4\3\4\5\4\u00b9\n\4"+ - "\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00c1\n\4\3\4\3\4\5\4\u00c5\n\4\3\4\3\4\3"+ - "\4\3\4\7\4\u00cb\n\4\f\4\16\4\u00ce\13\4\5\4\u00d0\n\4\3\4\3\4\3\4\3\4"+ - "\5\4\u00d6\n\4\3\4\3\4\3\4\5\4\u00db\n\4\3\4\5\4\u00de\n\4\3\4\3\4\3\4"+ - "\5\4\u00e3\n\4\3\4\5\4\u00e6\n\4\5\4\u00e8\n\4\3\5\3\5\3\5\3\5\7\5\u00ee"+ - "\n\5\f\5\16\5\u00f1\13\5\5\5\u00f3\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6"+ - "\7\6\u00fd\n\6\f\6\16\6\u0100\13\6\5\6\u0102\n\6\3\6\5\6\u0105\n\6\3\7"+ - "\3\7\3\7\3\7\3\7\5\7\u010c\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u0113\n\b\3\t\3"+ - "\t\5\t\u0117\n\t\3\t\3\t\5\t\u011b\n\t\3\n\3\n\5\n\u011f\n\n\3\n\3\n\3"+ - "\n\7\n\u0124\n\n\f\n\16\n\u0127\13\n\3\n\5\n\u012a\n\n\3\n\3\n\5\n\u012e"+ - "\n\n\3\n\3\n\3\n\5\n\u0133\n\n\3\n\3\n\5\n\u0137\n\n\3\13\3\13\3\13\3"+ - "\13\7\13\u013d\n\13\f\13\16\13\u0140\13\13\3\f\5\f\u0143\n\f\3\f\3\f\3"+ - "\f\7\f\u0148\n\f\f\f\16\f\u014b\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+ - "\u0153\n\16\f\16\16\16\u0156\13\16\5\16\u0158\n\16\3\16\3\16\5\16\u015c"+ - "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0168\n\21"+ - "\3\21\5\21\u016b\n\21\3\22\3\22\7\22\u016f\n\22\f\22\16\22\u0172\13\22"+ - "\3\23\3\23\3\23\3\23\5\23\u0178\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u017f"+ - "\n\23\3\24\5\24\u0182\n\24\3\24\3\24\5\24\u0186\n\24\3\24\3\24\5\24\u018a"+ - "\n\24\3\24\3\24\5\24\u018e\n\24\5\24\u0190\n\24\3\25\3\25\3\25\3\25\3"+ - "\25\3\25\3\25\7\25\u0199\n\25\f\25\16\25\u019c\13\25\3\25\3\25\5\25\u01a0"+ - "\n\25\3\26\5\26\u01a3\n\26\3\26\3\26\5\26\u01a7\n\26\3\26\5\26\u01aa\n"+ - "\26\3\26\3\26\3\26\3\26\5\26\u01b0\n\26\3\26\5\26\u01b3\n\26\3\26\3\26"+ - "\3\26\3\26\5\26\u01b9\n\26\3\26\5\26\u01bc\n\26\5\26\u01be\n\26\3\27\3"+ - "\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\5\30\u01e1\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30"+ - "\u01e9\n\30\f\30\16\30\u01ec\13\30\3\31\3\31\7\31\u01f0\n\31\f\31\16\31"+ - "\u01f3\13\31\3\32\3\32\5\32\u01f7\n\32\3\33\5\33\u01fa\n\33\3\33\3\33"+ - "\3\33\3\33\3\33\3\33\5\33\u0202\n\33\3\33\3\33\3\33\3\33\3\33\7\33\u0209"+ - "\n\33\f\33\16\33\u020c\13\33\3\33\3\33\3\33\5\33\u0211\n\33\3\33\3\33"+ - "\3\33\3\33\3\33\3\33\5\33\u0219\n\33\3\33\3\33\3\33\5\33\u021e\n\33\3"+ - "\33\3\33\3\33\3\33\5\33\u0224\n\33\3\33\5\33\u0227\n\33\3\34\3\34\3\34"+ - "\3\35\3\35\5\35\u022e\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u0236\n"+ - "\36\3\37\3\37\3\37\3\37\5\37\u023c\n\37\3\37\3\37\3\37\3\37\3\37\3\37"+ - "\3\37\3\37\3\37\3\37\7\37\u0248\n\37\f\37\16\37\u024b\13\37\3 \3 \3 \3"+ - " \3 \3 \3 \3 \5 \u0255\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0264"+ - "\n \3 \6 \u0267\n \r \16 \u0268\3 \3 \5 \u026d\n \3 \3 \5 \u0271\n \3"+ - " \3 \3 \7 \u0276\n \f \16 \u0279\13 \3!\3!\3!\5!\u027e\n!\3\"\3\"\3\""+ - "\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u028a\n\"\3#\3#\3#\3#\3#\3#\3#\3$\3$"+ - "\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5%\u029f\n%\3&\3&\3&\3&\3&\3&\3&\3\'\3"+ - "\'\3\'\3\'\3\'\5\'\u02ad\n\'\3(\3(\3(\5(\u02b2\n(\3(\3(\3(\7(\u02b7\n"+ - "(\f(\16(\u02ba\13(\5(\u02bc\n(\3(\3(\3)\3)\3)\5)\u02c3\n)\3*\3*\3*\3*"+ - "\3*\6*\u02ca\n*\r*\16*\u02cb\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3"+ - "*\3*\3*\3*\5*\u02df\n*\3+\3+\3,\3,\3-\3-\5-\u02e7\n-\3-\3-\5-\u02eb\n"+ - "-\3-\3-\3-\5-\u02f0\n-\3.\3.\3/\3/\3\60\3\60\3\60\7\60\u02f9\n\60\f\60"+ - "\16\60\u02fc\13\60\3\60\3\60\3\61\3\61\5\61\u0302\n\61\3\62\3\62\3\62"+ - "\5\62\u0307\n\62\3\62\3\62\3\62\3\62\5\62\u030d\n\62\3\62\5\62\u0310\n"+ - "\62\3\63\3\63\5\63\u0314\n\63\3\64\3\64\3\64\5\64\u0319\n\64\3\65\3\65"+ - "\5\65\u031d\n\65\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\38\38\2\5.<>9\2"+ - "\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJL"+ - "NPRTVXZ\\^`bdfhjln\2\22\b\2\7\7\t\t\"\"<ARSde\3\2}~\30\2\b\t\23\24"+ - "\26\31\33\33\"\"$$\'(+-\60\60\65\6588;<>>@@GGKMORUVXY]^``dd\u038b\2p\3"+ - "\2\2\2\4s\3\2\2\2\6\u00e7\3\2\2\2\b\u00f2\3\2\2\2\n\u00f6\3\2\2\2\f\u010b"+ - "\3\2\2\2\16\u0112\3\2\2\2\20\u0114\3\2\2\2\22\u011c\3\2\2\2\24\u0138\3"+ - "\2\2\2\26\u0142\3\2\2\2\30\u014c\3\2\2\2\32\u015b\3\2\2\2\34\u015d\3\2"+ - "\2\2\36\u0163\3\2\2\2 \u0165\3\2\2\2\"\u016c\3\2\2\2$\u017e\3\2\2\2&\u018f"+ - "\3\2\2\2(\u019f\3\2\2\2*\u01bd\3\2\2\2,\u01bf\3\2\2\2.\u01e0\3\2\2\2\60"+ - "\u01f1\3\2\2\2\62\u01f4\3\2\2\2\64\u0226\3\2\2\2\66\u0228\3\2\2\28\u022b"+ - "\3\2\2\2:\u0235\3\2\2\2<\u023b\3\2\2\2>\u0270\3\2\2\2@\u027d\3\2\2\2B"+ - "\u0289\3\2\2\2D\u028b\3\2\2\2F\u0292\3\2\2\2H\u029e\3\2\2\2J\u02a0\3\2"+ - "\2\2L\u02ac\3\2\2\2N\u02ae\3\2\2\2P\u02c2\3\2\2\2R\u02de\3\2\2\2T\u02e0"+ - "\3\2\2\2V\u02e2\3\2\2\2X\u02e4\3\2\2\2Z\u02f1\3\2\2\2\\\u02f3\3\2\2\2"+ - "^\u02fa\3\2\2\2`\u0301\3\2\2\2b\u030f\3\2\2\2d\u0313\3\2\2\2f\u0318\3"+ - "\2\2\2h\u031c\3\2\2\2j\u031e\3\2\2\2l\u0320\3\2\2\2n\u0325\3\2\2\2pq\5"+ - "\6\4\2qr\7\2\2\3r\3\3\2\2\2st\5,\27\2tu\7\2\2\3u\5\3\2\2\2v\u00e8\5\b"+ - "\5\2w\u0085\7$\2\2x\u0081\7\3\2\2yz\7M\2\2z\u0080\t\2\2\2{|\7(\2\2|\u0080"+ - "\t\3\2\2}~\7`\2\2~\u0080\5V,\2\177y\3\2\2\2\177{\3\2\2\2\177}\3\2\2\2"+ - "\u0080\u0083\3\2\2\2\u0081\177\3\2\2\2\u0081\u0082\3\2\2\2\u0082\u0084"+ - "\3\2\2\2\u0083\u0081\3\2\2\2\u0084\u0086\7\4\2\2\u0085x\3\2\2\2\u0085"+ - "\u0086\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u00e8\5\6\4\2\u0088\u0094\7\33"+ - "\2\2\u0089\u0090\7\3\2\2\u008a\u008b\7M\2\2\u008b\u008f\t\4\2\2\u008c"+ - "\u008d\7(\2\2\u008d\u008f\t\3\2\2\u008e\u008a\3\2\2\2\u008e\u008c\3\2"+ - "\2\2\u008f\u0092\3\2\2\2\u0090\u008e\3\2\2\2\u0090\u0091\3\2\2\2\u0091"+ - "\u0093\3\2\2\2\u0092\u0090\3\2\2\2\u0093\u0095\7\4\2\2\u0094\u0089\3\2"+ - "\2\2\u0094\u0095\3\2\2\2\u0095\u0096\3\2\2\2\u0096\u00e8\5\6\4\2\u0097"+ - "\u0098\7U\2\2\u0098\u009b\7X\2\2\u0099\u009a\7\63\2\2\u009a\u009c\7*\2"+ - "\2\u009b\u0099\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009f\3\2\2\2\u009d\u00a0"+ - "\5\66\34\2\u009e\u00a0\5b\62\2\u009f\u009d\3\2\2\2\u009f\u009e\3\2\2\2"+ - "\u009f\u00a0\3\2\2\2\u00a0\u00e8\3\2\2\2\u00a1\u00a2\7U\2\2\u00a2\u00a5"+ - "\7\24\2\2\u00a3\u00a4\7\63\2\2\u00a4\u00a6\7*\2\2\u00a5\u00a3\3\2\2\2"+ - "\u00a5\u00a6\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00aa\t\5\2\2\u00a8\u00ab"+ - "\5\66\34\2\u00a9\u00ab\5b\62\2\u00aa\u00a8\3\2\2\2\u00aa\u00a9\3\2\2\2"+ - "\u00ab\u00e8\3\2\2\2\u00ac\u00af\t\6\2\2\u00ad\u00ae\7\63\2\2\u00ae\u00b0"+ - "\7*\2\2\u00af\u00ad\3\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b3\3\2\2\2\u00b1"+ - "\u00b4\5\66\34\2\u00b2\u00b4\5b\62\2\u00b3\u00b1\3\2\2\2\u00b3\u00b2\3"+ - "\2\2\2\u00b4\u00e8\3\2\2\2\u00b5\u00b6\7U\2\2\u00b6\u00b8\7,\2\2\u00b7"+ - "\u00b9\5\66\34\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00e8\3"+ - "\2\2\2\u00ba\u00bb\7U\2\2\u00bb\u00e8\7Q\2\2\u00bc\u00bd\7V\2\2\u00bd"+ - "\u00c0\7X\2\2\u00be\u00bf\7\22\2\2\u00bf\u00c1\5\66\34\2\u00c0\u00be\3"+ - "\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2\u00c5\5\66\34\2\u00c3"+ - "\u00c5\5b\62\2\u00c4\u00c2\3\2\2\2\u00c4\u00c3\3\2\2\2\u00c4\u00c5\3\2"+ - "\2\2\u00c5\u00cf\3\2\2\2\u00c6\u00c7\7]\2\2\u00c7\u00cc\5j\66\2\u00c8"+ - "\u00c9\7\5\2\2\u00c9\u00cb\5j\66\2\u00ca\u00c8\3\2\2\2\u00cb\u00ce\3\2"+ - "\2\2\u00cc\u00ca\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d0\3\2\2\2\u00ce"+ - "\u00cc\3\2\2\2\u00cf\u00c6\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0\u00e8\3\2"+ - "\2\2\u00d1\u00d2\7V\2\2\u00d2\u00d5\7\24\2\2\u00d3\u00d4\7\22\2\2\u00d4"+ - "\u00d6\5j\66\2\u00d5\u00d3\3\2\2\2\u00d5\u00d6\3\2\2\2\u00d6\u00da\3\2"+ - "\2\2\u00d7\u00d8\7W\2\2\u00d8\u00db\5\66\34\2\u00d9\u00db\5b\62\2\u00da"+ - "\u00d7\3\2\2\2\u00da\u00d9\3\2\2\2\u00da\u00db\3\2\2\2\u00db\u00dd\3\2"+ - "\2\2\u00dc\u00de\5\66\34\2\u00dd\u00dc\3\2\2\2\u00dd\u00de\3\2\2\2\u00de"+ - "\u00e8\3\2\2\2\u00df\u00e0\7V\2\2\u00e0\u00e5\7^\2\2\u00e1\u00e3\t\7\2"+ - "\2\u00e2\u00e1\3\2\2\2\u00e2\u00e3\3\2\2\2\u00e3\u00e4\3\2\2\2\u00e4\u00e6"+ - "\5h\65\2\u00e5\u00e2\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e8\3\2\2\2\u00e7"+ - "v\3\2\2\2\u00e7w\3\2\2\2\u00e7\u0088\3\2\2\2\u00e7\u0097\3\2\2\2\u00e7"+ - "\u00a1\3\2\2\2\u00e7\u00ac\3\2\2\2\u00e7\u00b5\3\2\2\2\u00e7\u00ba\3\2"+ - "\2\2\u00e7\u00bc\3\2\2\2\u00e7\u00d1\3\2\2\2\u00e7\u00df\3\2\2\2\u00e8"+ - "\7\3\2\2\2\u00e9\u00ea\7c\2\2\u00ea\u00ef\5\34\17\2\u00eb\u00ec\7\5\2"+ - "\2\u00ec\u00ee\5\34\17\2\u00ed\u00eb\3\2\2\2\u00ee\u00f1\3\2\2\2\u00ef"+ - "\u00ed\3\2\2\2\u00ef\u00f0\3\2\2\2\u00f0\u00f3\3\2\2\2\u00f1\u00ef\3\2"+ - "\2\2\u00f2\u00e9\3\2\2\2\u00f2\u00f3\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4"+ - "\u00f5\5\n\6\2\u00f5\t\3\2\2\2\u00f6\u0101\5\16\b\2\u00f7\u00f8\7I\2\2"+ - "\u00f8\u00f9\7\17\2\2\u00f9\u00fe\5\20\t\2\u00fa\u00fb\7\5\2\2\u00fb\u00fd"+ - "\5\20\t\2\u00fc\u00fa\3\2\2\2\u00fd\u0100\3\2\2\2\u00fe\u00fc\3\2\2\2"+ - "\u00fe\u00ff\3\2\2\2\u00ff\u0102\3\2\2\2\u0100\u00fe\3\2\2\2\u0101\u00f7"+ - "\3\2\2\2\u0101\u0102\3\2\2\2\u0102\u0104\3\2\2\2\u0103\u0105\5\f\7\2\u0104"+ - "\u0103\3\2\2\2\u0104\u0105\3\2\2\2\u0105\13\3\2\2\2\u0106\u0107\7;\2\2"+ - "\u0107\u010c\t\b\2\2\u0108\u0109\7h\2\2\u0109\u010a\t\b\2\2\u010a\u010c"+ - "\7m\2\2\u010b\u0106\3\2\2\2\u010b\u0108\3\2\2\2\u010c\r\3\2\2\2\u010d"+ - "\u0113\5\22\n\2\u010e\u010f\7\3\2\2\u010f\u0110\5\n\6\2\u0110\u0111\7"+ - "\4\2\2\u0111\u0113\3\2\2\2\u0112\u010d\3\2\2\2\u0112\u010e\3\2\2\2\u0113"+ - "\17\3\2\2\2\u0114\u0116\5,\27\2\u0115\u0117\t\t\2\2\u0116\u0115\3\2\2"+ - "\2\u0116\u0117\3\2\2\2\u0117\u011a\3\2\2\2\u0118\u0119\7E\2\2\u0119\u011b"+ - "\t\n\2\2\u011a\u0118\3\2\2\2\u011a\u011b\3\2\2\2\u011b\21\3\2\2\2\u011c"+ - "\u011e\7T\2\2\u011d\u011f\5\36\20\2\u011e\u011d\3\2\2\2\u011e\u011f\3"+ - "\2\2\2\u011f\u0120\3\2\2\2\u0120\u0125\5 \21\2\u0121\u0122\7\5\2\2\u0122"+ - "\u0124\5 \21\2\u0123\u0121\3\2\2\2\u0124\u0127\3\2\2\2\u0125\u0123\3\2"+ - "\2\2\u0125\u0126\3\2\2\2\u0126\u0129\3\2\2\2\u0127\u0125\3\2\2\2\u0128"+ - "\u012a\5\24\13\2\u0129\u0128\3\2\2\2\u0129\u012a\3\2\2\2\u012a\u012d\3"+ - "\2\2\2\u012b\u012c\7b\2\2\u012c\u012e\5.\30\2\u012d\u012b\3\2\2\2\u012d"+ - "\u012e\3\2\2\2\u012e\u0132\3\2\2\2\u012f\u0130\7.\2\2\u0130\u0131\7\17"+ - "\2\2\u0131\u0133\5\26\f\2\u0132\u012f\3\2\2\2\u0132\u0133\3\2\2\2\u0133"+ - "\u0136\3\2\2\2\u0134\u0135\7/\2\2\u0135\u0137\5.\30\2\u0136\u0134\3\2"+ - "\2\2\u0136\u0137\3\2\2\2\u0137\23\3\2\2\2\u0138\u0139\7)\2\2\u0139\u013e"+ - "\5\"\22\2\u013a\u013b\7\5\2\2\u013b\u013d\5\"\22\2\u013c\u013a\3\2\2\2"+ - "\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013e\u013f\3\2\2\2\u013f\25"+ - "\3\2\2\2\u0140\u013e\3\2\2\2\u0141\u0143\5\36\20\2\u0142\u0141\3\2\2\2"+ - "\u0142\u0143\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0149\5\30\r\2\u0145\u0146"+ - "\7\5\2\2\u0146\u0148\5\30\r\2\u0147\u0145\3\2\2\2\u0148\u014b\3\2\2\2"+ - "\u0149\u0147\3\2\2\2\u0149\u014a\3\2\2\2\u014a\27\3\2\2\2\u014b\u0149"+ - "\3\2\2\2\u014c\u014d\5\32\16\2\u014d\31\3\2\2\2\u014e\u0157\7\3\2\2\u014f"+ - "\u0154\5,\27\2\u0150\u0151\7\5\2\2\u0151\u0153\5,\27\2\u0152\u0150\3\2"+ - "\2\2\u0153\u0156\3\2\2\2\u0154\u0152\3\2\2\2\u0154\u0155\3\2\2\2\u0155"+ - "\u0158\3\2\2\2\u0156\u0154\3\2\2\2\u0157\u014f\3\2\2\2\u0157\u0158\3\2"+ - "\2\2\u0158\u0159\3\2\2\2\u0159\u015c\7\4\2\2\u015a\u015c\5,\27\2\u015b"+ - "\u014e\3\2\2\2\u015b\u015a\3\2\2\2\u015c\33\3\2\2\2\u015d\u015e\5`\61"+ - "\2\u015e\u015f\7\f\2\2\u015f\u0160\7\3\2\2\u0160\u0161\5\n\6\2\u0161\u0162"+ - "\7\4\2\2\u0162\35\3\2\2\2\u0163\u0164\t\13\2\2\u0164\37\3\2\2\2\u0165"+ - "\u016a\5,\27\2\u0166\u0168\7\f\2\2\u0167\u0166\3\2\2\2\u0167\u0168\3\2"+ - "\2\2\u0168\u0169\3\2\2\2\u0169\u016b\5`\61\2\u016a\u0167\3\2\2\2\u016a"+ - "\u016b\3\2\2\2\u016b!\3\2\2\2\u016c\u0170\5*\26\2\u016d\u016f\5$\23\2"+ - "\u016e\u016d\3\2\2\2\u016f\u0172\3\2\2\2\u0170\u016e\3\2\2\2\u0170\u0171"+ - "\3\2\2\2\u0171#\3\2\2\2\u0172\u0170\3\2\2\2\u0173\u0174\5&\24\2\u0174"+ - "\u0175\7\67\2\2\u0175\u0177\5*\26\2\u0176\u0178\5(\25\2\u0177\u0176\3"+ - "\2\2\2\u0177\u0178\3\2\2\2\u0178\u017f\3\2\2\2\u0179\u017a\7B\2\2\u017a"+ - "\u017b\5&\24\2\u017b\u017c\7\67\2\2\u017c\u017d\5*\26\2\u017d\u017f\3"+ - "\2\2\2\u017e\u0173\3\2\2\2\u017e\u0179\3\2\2\2\u017f%\3\2\2\2\u0180\u0182"+ - "\7\64\2\2\u0181\u0180\3\2\2\2\u0181\u0182\3\2\2\2\u0182\u0190\3\2\2\2"+ - "\u0183\u0185\79\2\2\u0184\u0186\7J\2\2\u0185\u0184\3\2\2\2\u0185\u0186"+ - "\3\2\2\2\u0186\u0190\3\2\2\2\u0187\u0189\7N\2\2\u0188\u018a\7J\2\2\u0189"+ - "\u0188\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u0190\3\2\2\2\u018b\u018d\7+"+ - "\2\2\u018c\u018e\7J\2\2\u018d\u018c\3\2\2\2\u018d\u018e\3\2\2\2\u018e"+ - "\u0190\3\2\2\2\u018f\u0181\3\2\2\2\u018f\u0183\3\2\2\2\u018f\u0187\3\2"+ - "\2\2\u018f\u018b\3\2\2\2\u0190\'\3\2\2\2\u0191\u0192\7F\2\2\u0192\u01a0"+ - "\5.\30\2\u0193\u0194\7_\2\2\u0194\u0195\7\3\2\2\u0195\u019a\5`\61\2\u0196"+ - "\u0197\7\5\2\2\u0197\u0199\5`\61\2\u0198\u0196\3\2\2\2\u0199\u019c\3\2"+ - "\2\2\u019a\u0198\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u019d\3\2\2\2\u019c"+ - "\u019a\3\2\2\2\u019d\u019e\7\4\2\2\u019e\u01a0\3\2\2\2\u019f\u0191\3\2"+ - "\2\2\u019f\u0193\3\2\2\2\u01a0)\3\2\2\2\u01a1\u01a3\7*\2\2\u01a2\u01a1"+ - "\3\2\2\2\u01a2\u01a3\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a9\5b\62\2\u01a5"+ - "\u01a7\7\f\2\2\u01a6\u01a5\3\2\2\2\u01a6\u01a7\3\2\2\2\u01a7\u01a8\3\2"+ - "\2\2\u01a8\u01aa\5^\60\2\u01a9\u01a6\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa"+ - "\u01be\3\2\2\2\u01ab\u01ac\7\3\2\2\u01ac\u01ad\5\n\6\2\u01ad\u01b2\7\4"+ - "\2\2\u01ae\u01b0\7\f\2\2\u01af\u01ae\3\2\2\2\u01af\u01b0\3\2\2\2\u01b0"+ - "\u01b1\3\2\2\2\u01b1\u01b3\5^\60\2\u01b2\u01af\3\2\2\2\u01b2\u01b3\3\2"+ - "\2\2\u01b3\u01be\3\2\2\2\u01b4\u01b5\7\3\2\2\u01b5\u01b6\5\"\22\2\u01b6"+ - "\u01bb\7\4\2\2\u01b7\u01b9\7\f\2\2\u01b8\u01b7\3\2\2\2\u01b8\u01b9\3\2"+ - "\2\2\u01b9\u01ba\3\2\2\2\u01ba\u01bc\5^\60\2\u01bb\u01b8\3\2\2\2\u01bb"+ - "\u01bc\3\2\2\2\u01bc\u01be\3\2\2\2\u01bd\u01a2\3\2\2\2\u01bd\u01ab\3\2"+ - "\2\2\u01bd\u01b4\3\2\2\2\u01be+\3\2\2\2\u01bf\u01c0\5.\30\2\u01c0-\3\2"+ - "\2\2\u01c1\u01c2\b\30\1\2\u01c2\u01c3\7C\2\2\u01c3\u01e1\5.\30\n\u01c4"+ - "\u01c5\7#\2\2\u01c5\u01c6\7\3\2\2\u01c6\u01c7\5\b\5\2\u01c7\u01c8\7\4"+ - "\2\2\u01c8\u01e1\3\2\2\2\u01c9\u01ca\7P\2\2\u01ca\u01cb\7\3\2\2\u01cb"+ - "\u01cc\5j\66\2\u01cc\u01cd\5\60\31\2\u01cd\u01ce\7\4\2\2\u01ce\u01e1\3"+ - "\2\2\2\u01cf\u01d0\7=\2\2\u01d0\u01d1\7\3\2\2\u01d1\u01d2\5^\60\2\u01d2"+ - "\u01d3\7\5\2\2\u01d3\u01d4\5j\66\2\u01d4\u01d5\5\60\31\2\u01d5\u01d6\7"+ - "\4\2\2\u01d6\u01e1\3\2\2\2\u01d7\u01d8\7=\2\2\u01d8\u01d9\7\3\2\2\u01d9"+ - "\u01da\5j\66\2\u01da\u01db\7\5\2\2\u01db\u01dc\5j\66\2\u01dc\u01dd\5\60"+ - "\31\2\u01dd\u01de\7\4\2\2\u01de\u01e1\3\2\2\2\u01df\u01e1\5\62\32\2\u01e0"+ - "\u01c1\3\2\2\2\u01e0\u01c4\3\2\2\2\u01e0\u01c9\3\2\2\2\u01e0\u01cf\3\2"+ - "\2\2\u01e0\u01d7\3\2\2\2\u01e0\u01df\3\2\2\2\u01e1\u01ea\3\2\2\2\u01e2"+ - "\u01e3\f\4\2\2\u01e3\u01e4\7\n\2\2\u01e4\u01e9\5.\30\5\u01e5\u01e6\f\3"+ - "\2\2\u01e6\u01e7\7H\2\2\u01e7\u01e9\5.\30\4\u01e8\u01e2\3\2\2\2\u01e8"+ - "\u01e5\3\2\2\2\u01e9\u01ec\3\2\2\2\u01ea\u01e8\3\2\2\2\u01ea\u01eb\3\2"+ - "\2\2\u01eb/\3\2\2\2\u01ec\u01ea\3\2\2\2\u01ed\u01ee\7\5\2\2\u01ee\u01f0"+ - "\5j\66\2\u01ef\u01ed\3\2\2\2\u01f0\u01f3\3\2\2\2\u01f1\u01ef\3\2\2\2\u01f1"+ - "\u01f2\3\2\2\2\u01f2\61\3\2\2\2\u01f3\u01f1\3\2\2\2\u01f4\u01f6\5<\37"+ - "\2\u01f5\u01f7\5\64\33\2\u01f6\u01f5\3\2\2\2\u01f6\u01f7\3\2\2\2\u01f7"+ - "\63\3\2\2\2\u01f8\u01fa\7C\2\2\u01f9\u01f8\3\2\2\2\u01f9\u01fa\3\2\2\2"+ - "\u01fa\u01fb\3\2\2\2\u01fb\u01fc\7\16\2\2\u01fc\u01fd\5<\37\2\u01fd\u01fe"+ - "\7\n\2\2\u01fe\u01ff\5<\37\2\u01ff\u0227\3\2\2\2\u0200\u0202\7C\2\2\u0201"+ - "\u0200\3\2\2\2\u0201\u0202\3\2\2\2\u0202\u0203\3\2\2\2\u0203\u0204\7\62"+ - "\2\2\u0204\u0205\7\3\2\2\u0205\u020a\5<\37\2\u0206\u0207\7\5\2\2\u0207"+ - "\u0209\5<\37\2\u0208\u0206\3\2\2\2\u0209\u020c\3\2\2\2\u020a\u0208\3\2"+ - "\2\2\u020a\u020b\3\2\2\2\u020b\u020d\3\2\2\2\u020c\u020a\3\2\2\2\u020d"+ - "\u020e\7\4\2\2\u020e\u0227\3\2\2\2\u020f\u0211\7C\2\2\u0210\u020f\3\2"+ - "\2\2\u0210\u0211\3\2\2\2\u0211\u0212\3\2\2\2\u0212\u0213\7\62\2\2\u0213"+ - "\u0214\7\3\2\2\u0214\u0215\5\b\5\2\u0215\u0216\7\4\2\2\u0216\u0227\3\2"+ - "\2\2\u0217\u0219\7C\2\2\u0218\u0217\3\2\2\2\u0218\u0219\3\2\2\2\u0219"+ - "\u021a\3\2\2\2\u021a\u021b\7:\2\2\u021b\u0227\58\35\2\u021c\u021e\7C\2"+ - "\2\u021d\u021c\3\2\2\2\u021d\u021e\3\2\2\2\u021e\u021f\3\2\2\2\u021f\u0220"+ - "\7O\2\2\u0220\u0227\5j\66\2\u0221\u0223\7\66\2\2\u0222\u0224\7C\2\2\u0223"+ - "\u0222\3\2\2\2\u0223\u0224\3\2\2\2\u0224\u0225\3\2\2\2\u0225\u0227\7D"+ - "\2\2\u0226\u01f9\3\2\2\2\u0226\u0201\3\2\2\2\u0226\u0210\3\2\2\2\u0226"+ - "\u0218\3\2\2\2\u0226\u021d\3\2\2\2\u0226\u0221\3\2\2\2\u0227\65\3\2\2"+ - "\2\u0228\u0229\7:\2\2\u0229\u022a\58\35\2\u022a\67\3\2\2\2\u022b\u022d"+ - "\5j\66\2\u022c\u022e\5:\36\2\u022d\u022c\3\2\2\2\u022d\u022e\3\2\2\2\u022e"+ - "9\3\2\2\2\u022f\u0230\7!\2\2\u0230\u0236\5j\66\2\u0231\u0232\7f\2\2\u0232"+ - "\u0233\5j\66\2\u0233\u0234\7m\2\2\u0234\u0236\3\2\2\2\u0235\u022f\3\2"+ - "\2\2\u0235\u0231\3\2\2\2\u0236;\3\2\2\2\u0237\u0238\b\37\1\2\u0238\u023c"+ - "\5> \2\u0239\u023a\t\7\2\2\u023a\u023c\5<\37\6\u023b\u0237\3\2\2\2\u023b"+ - "\u0239\3\2\2\2\u023c\u0249\3\2\2\2\u023d\u023e\f\5\2\2\u023e\u023f\t\f"+ - "\2\2\u023f\u0248\5<\37\6\u0240\u0241\f\4\2\2\u0241\u0242\t\7\2\2\u0242"+ - "\u0248\5<\37\5\u0243\u0244\f\3\2\2\u0244\u0245\5T+\2\u0245\u0246\5<\37"+ - "\4\u0246\u0248\3\2\2\2\u0247\u023d\3\2\2\2\u0247\u0240\3\2\2\2\u0247\u0243"+ - "\3\2\2\2\u0248\u024b\3\2\2\2\u0249\u0247\3\2\2\2\u0249\u024a\3\2\2\2\u024a"+ - "=\3\2\2\2\u024b\u0249\3\2\2\2\u024c\u024d\b \1\2\u024d\u0271\5B\"\2\u024e"+ - "\u0271\5H%\2\u024f\u0271\5@!\2\u0250\u0271\5R*\2\u0251\u0252\5^\60\2\u0252"+ - "\u0253\7|\2\2\u0253\u0255\3\2\2\2\u0254\u0251\3\2\2\2\u0254\u0255\3\2"+ - "\2\2\u0255\u0256\3\2\2\2\u0256\u0271\7w\2\2\u0257\u0271\5L\'\2\u0258\u0259"+ - "\7\3\2\2\u0259\u025a\5\b\5\2\u025a\u025b\7\4\2\2\u025b\u0271\3\2\2\2\u025c"+ - "\u0271\5^\60\2\u025d\u025e\7\3\2\2\u025e\u025f\5,\27\2\u025f\u0260\7\4"+ - "\2\2\u0260\u0271\3\2\2\2\u0261\u0263\7\20\2\2\u0262\u0264\5.\30\2\u0263"+ - "\u0262\3\2\2\2\u0263\u0264\3\2\2\2\u0264\u0266\3\2\2\2\u0265\u0267\5l"+ - "\67\2\u0266\u0265\3\2\2\2\u0267\u0268\3\2\2\2\u0268\u0266\3\2\2\2\u0268"+ - "\u0269\3\2\2\2\u0269\u026c\3\2\2\2\u026a\u026b\7\37\2\2\u026b\u026d\5"+ - ".\30\2\u026c\u026a\3\2\2\2\u026c\u026d\3\2\2\2\u026d\u026e\3\2\2\2\u026e"+ - "\u026f\7 \2\2\u026f\u0271\3\2\2\2\u0270\u024c\3\2\2\2\u0270\u024e\3\2"+ - "\2\2\u0270\u024f\3\2\2\2\u0270\u0250\3\2\2\2\u0270\u0254\3\2\2\2\u0270"+ - "\u0257\3\2\2\2\u0270\u0258\3\2\2\2\u0270\u025c\3\2\2\2\u0270\u025d\3\2"+ - "\2\2\u0270\u0261\3\2\2\2\u0271\u0277\3\2\2\2\u0272\u0273\f\f\2\2\u0273"+ - "\u0274\7z\2\2\u0274\u0276\5\\/\2\u0275\u0272\3\2\2\2\u0276\u0279\3\2\2"+ - "\2\u0277\u0275\3\2\2\2\u0277\u0278\3\2\2\2\u0278?\3\2\2\2\u0279\u0277"+ - "\3\2\2\2\u027a\u027e\7\30\2\2\u027b\u027e\7\26\2\2\u027c\u027e\7\27\2"+ - "\2\u027d\u027a\3\2\2\2\u027d\u027b\3\2\2\2\u027d\u027c\3\2\2\2\u027eA"+ - "\3\2\2\2\u027f\u028a\5D#\2\u0280\u0281\7g\2\2\u0281\u0282\5D#\2\u0282"+ - "\u0283\7m\2\2\u0283\u028a\3\2\2\2\u0284\u028a\5F$\2\u0285\u0286\7g\2\2"+ - "\u0286\u0287\5F$\2\u0287\u0288\7m\2\2\u0288\u028a\3\2\2\2\u0289\u027f"+ - "\3\2\2\2\u0289\u0280\3\2\2\2\u0289\u0284\3\2\2\2\u0289\u0285\3\2\2\2\u028a"+ - "C\3\2\2\2\u028b\u028c\7\21\2\2\u028c\u028d\7\3\2\2\u028d\u028e\5,\27\2"+ - "\u028e\u028f\7\f\2\2\u028f\u0290\5\\/\2\u0290\u0291\7\4\2\2\u0291E\3\2"+ - "\2\2\u0292\u0293\7\25\2\2\u0293\u0294\7\3\2\2\u0294\u0295\5,\27\2\u0295"+ - "\u0296\7\5\2\2\u0296\u0297\5\\/\2\u0297\u0298\7\4\2\2\u0298G\3\2\2\2\u0299"+ - "\u029f\5J&\2\u029a\u029b\7g\2\2\u029b\u029c\5J&\2\u029c\u029d\7m\2\2\u029d"+ - "\u029f\3\2\2\2\u029e\u0299\3\2\2\2\u029e\u029a\3\2\2\2\u029fI\3\2\2\2"+ - "\u02a0\u02a1\7%\2\2\u02a1\u02a2\7\3\2\2\u02a2\u02a3\5`\61\2\u02a3\u02a4"+ - "\7)\2\2\u02a4\u02a5\5<\37\2\u02a5\u02a6\7\4\2\2\u02a6K\3\2\2\2\u02a7\u02ad"+ - "\5N(\2\u02a8\u02a9\7g\2\2\u02a9\u02aa\5N(\2\u02aa\u02ab\7m\2\2\u02ab\u02ad"+ - "\3\2\2\2\u02ac\u02a7\3\2\2\2\u02ac\u02a8\3\2\2\2\u02adM\3\2\2\2\u02ae"+ - "\u02af\5P)\2\u02af\u02bb\7\3\2\2\u02b0\u02b2\5\36\20\2\u02b1\u02b0\3\2"+ - "\2\2\u02b1\u02b2\3\2\2\2\u02b2\u02b3\3\2\2\2\u02b3\u02b8\5,\27\2\u02b4"+ - "\u02b5\7\5\2\2\u02b5\u02b7\5,\27\2\u02b6\u02b4\3\2\2\2\u02b7\u02ba\3\2"+ - "\2\2\u02b8\u02b6\3\2\2\2\u02b8\u02b9\3\2\2\2\u02b9\u02bc\3\2\2\2\u02ba"+ - "\u02b8\3\2\2\2\u02bb\u02b1\3\2\2\2\u02bb\u02bc\3\2\2\2\u02bc\u02bd\3\2"+ - "\2\2\u02bd\u02be\7\4\2\2\u02beO\3\2\2\2\u02bf\u02c3\79\2\2\u02c0\u02c3"+ - "\7N\2\2\u02c1\u02c3\5`\61\2\u02c2\u02bf\3\2\2\2\u02c2\u02c0\3\2\2\2\u02c2"+ - "\u02c1\3\2\2\2\u02c3Q\3\2\2\2\u02c4\u02df\7D\2\2\u02c5\u02df\5X-\2\u02c6"+ - "\u02df\5h\65\2\u02c7\u02df\5V,\2\u02c8\u02ca\7~\2\2\u02c9\u02c8\3\2\2"+ - "\2\u02ca\u02cb\3\2\2\2\u02cb\u02c9\3\2\2\2\u02cb\u02cc\3\2\2\2\u02cc\u02df"+ - "\3\2\2\2\u02cd\u02df\7}\2\2\u02ce\u02cf\7i\2\2\u02cf\u02d0\5j\66\2\u02d0"+ - "\u02d1\7m\2\2\u02d1\u02df\3\2\2\2\u02d2\u02d3\7j\2\2\u02d3\u02d4\5j\66"+ - "\2\u02d4\u02d5\7m\2\2\u02d5\u02df\3\2\2\2\u02d6\u02d7\7k\2\2\u02d7\u02d8"+ - "\5j\66\2\u02d8\u02d9\7m\2\2\u02d9\u02df\3\2\2\2\u02da\u02db\7l\2\2\u02db"+ - "\u02dc\5j\66\2\u02dc\u02dd\7m\2\2\u02dd\u02df\3\2\2\2\u02de\u02c4\3\2"+ - "\2\2\u02de\u02c5\3\2\2\2\u02de\u02c6\3\2\2\2\u02de\u02c7\3\2\2\2\u02de"+ - "\u02c9\3\2\2\2\u02de\u02cd\3\2\2\2\u02de\u02ce\3\2\2\2\u02de\u02d2\3\2"+ - "\2\2\u02de\u02d6\3\2\2\2\u02de\u02da\3\2\2\2\u02dfS\3\2\2\2\u02e0\u02e1"+ - "\t\r\2\2\u02e1U\3\2\2\2\u02e2\u02e3\t\16\2\2\u02e3W\3\2\2\2\u02e4\u02e6"+ - "\7\65\2\2\u02e5\u02e7\t\7\2\2\u02e6\u02e5\3\2\2\2\u02e6\u02e7\3\2\2\2"+ - "\u02e7\u02ea\3\2\2\2\u02e8\u02eb\5h\65\2\u02e9\u02eb\5j\66\2\u02ea\u02e8"+ - "\3\2\2\2\u02ea\u02e9\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec\u02ef\5Z.\2\u02ed"+ - "\u02ee\7\\\2\2\u02ee\u02f0\5Z.\2\u02ef\u02ed\3\2\2\2\u02ef\u02f0\3\2\2"+ - "\2\u02f0Y\3\2\2\2\u02f1\u02f2\t\17\2\2\u02f2[\3\2\2\2\u02f3\u02f4\5`\61"+ - "\2\u02f4]\3\2\2\2\u02f5\u02f6\5`\61\2\u02f6\u02f7\7|\2\2\u02f7\u02f9\3"+ - "\2\2\2\u02f8\u02f5\3\2\2\2\u02f9\u02fc\3\2\2\2\u02fa\u02f8\3\2\2\2\u02fa"+ - "\u02fb\3\2\2\2\u02fb\u02fd\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fd\u02fe\5`"+ - "\61\2\u02fe_\3\2\2\2\u02ff\u0302\5d\63\2\u0300\u0302\5f\64\2\u0301\u02ff"+ - "\3\2\2\2\u0301\u0300\3\2\2\2\u0302a\3\2\2\2\u0303\u0304\5`\61\2\u0304"+ - "\u0305\7\6\2\2\u0305\u0307\3\2\2\2\u0306\u0303\3\2\2\2\u0306\u0307\3\2"+ - "\2\2\u0307\u0308\3\2\2\2\u0308\u0310\7\u0083\2\2\u0309\u030a\5`\61\2\u030a"+ - "\u030b\7\6\2\2\u030b\u030d\3\2\2\2\u030c\u0309\3\2\2\2\u030c\u030d\3\2"+ - "\2\2\u030d\u030e\3\2\2\2\u030e\u0310\5`\61\2\u030f\u0306\3\2\2\2\u030f"+ - "\u030c\3\2\2\2\u0310c\3\2\2\2\u0311\u0314\7\u0084\2\2\u0312\u0314\7\u0085"+ - "\2\2\u0313\u0311\3\2\2\2\u0313\u0312\3\2\2\2\u0314e\3\2\2\2\u0315\u0319"+ - "\7\u0081\2\2\u0316\u0319\5n8\2\u0317\u0319\7\u0082\2\2\u0318\u0315\3\2"+ - "\2\2\u0318\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319g\3\2\2\2\u031a\u031d"+ - "\7\u0080\2\2\u031b\u031d\7\177\2\2\u031c\u031a\3\2\2\2\u031c\u031b\3\2"+ - "\2\2\u031di\3\2\2\2\u031e\u031f\t\20\2\2\u031fk\3\2\2\2\u0320\u0321\7"+ - "a\2\2\u0321\u0322\5,\27\2\u0322\u0323\7Z\2\2\u0323\u0324\5,\27\2\u0324"+ - "m\3\2\2\2\u0325\u0326\t\21\2\2\u0326o\3\2\2\2o\177\u0081\u0085\u008e\u0090"+ - "\u0094\u009b\u009f\u00a5\u00aa\u00af\u00b3\u00b8\u00c0\u00c4\u00cc\u00cf"+ - "\u00d5\u00da\u00dd\u00e2\u00e5\u00e7\u00ef\u00f2\u00fe\u0101\u0104\u010b"+ - "\u0112\u0116\u011a\u011e\u0125\u0129\u012d\u0132\u0136\u013e\u0142\u0149"+ - "\u0154\u0157\u015b\u0167\u016a\u0170\u0177\u017e\u0181\u0185\u0189\u018d"+ - "\u018f\u019a\u019f\u01a2\u01a6\u01a9\u01af\u01b2\u01b8\u01bb\u01bd\u01e0"+ - "\u01e8\u01ea\u01f1\u01f6\u01f9\u0201\u020a\u0210\u0218\u021d\u0223\u0226"+ - "\u022d\u0235\u023b\u0247\u0249\u0254\u0263\u0268\u026c\u0270\u0277\u027d"+ - "\u0289\u029e\u02ac\u02b1\u02b8\u02bb\u02c2\u02cb\u02de\u02e6\u02ea\u02ef"+ - "\u02fa\u0301\u0306\u030c\u030f\u0313\u0318\u031c"; + "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\3\2\3"+ + "\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0088\n\4\f"+ + "\4\16\4\u008b\13\4\3\4\5\4\u008e\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0097"+ + "\n\4\f\4\16\4\u009a\13\4\3\4\5\4\u009d\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00a4"+ + "\n\4\3\4\3\4\5\4\u00a8\n\4\3\4\3\4\3\4\3\4\5\4\u00ae\n\4\3\4\3\4\3\4\5"+ + "\4\u00b3\n\4\3\4\3\4\3\4\5\4\u00b8\n\4\3\4\3\4\5\4\u00bc\n\4\3\4\3\4\3"+ + "\4\5\4\u00c1\n\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00c9\n\4\3\4\3\4\5\4\u00cd"+ + "\n\4\3\4\3\4\3\4\3\4\7\4\u00d3\n\4\f\4\16\4\u00d6\13\4\5\4\u00d8\n\4\3"+ + "\4\3\4\3\4\3\4\5\4\u00de\n\4\3\4\3\4\3\4\5\4\u00e3\n\4\3\4\5\4\u00e6\n"+ + "\4\3\4\3\4\3\4\5\4\u00eb\n\4\3\4\5\4\u00ee\n\4\5\4\u00f0\n\4\3\5\3\5\3"+ + "\5\3\5\7\5\u00f6\n\5\f\5\16\5\u00f9\13\5\5\5\u00fb\n\5\3\5\3\5\3\6\3\6"+ + "\3\6\3\6\3\6\3\6\7\6\u0105\n\6\f\6\16\6\u0108\13\6\5\6\u010a\n\6\3\6\5"+ + "\6\u010d\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u0114\n\7\3\b\3\b\3\b\3\b\3\b\5\b"+ + "\u011b\n\b\3\t\3\t\5\t\u011f\n\t\3\t\3\t\5\t\u0123\n\t\3\n\3\n\5\n\u0127"+ + "\n\n\3\n\3\n\5\n\u012b\n\n\3\n\3\n\5\n\u012f\n\n\3\n\3\n\3\n\5\n\u0134"+ + "\n\n\3\n\3\n\5\n\u0138\n\n\3\13\3\13\3\13\3\13\7\13\u013e\n\13\f\13\16"+ + "\13\u0141\13\13\3\13\5\13\u0144\n\13\3\f\5\f\u0147\n\f\3\f\3\f\3\f\7\f"+ + "\u014c\n\f\f\f\16\f\u014f\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16\u0157"+ + "\n\16\f\16\16\16\u015a\13\16\5\16\u015c\n\16\3\16\3\16\5\16\u0160\n\16"+ + "\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\3\21\7\21\u016d\n\21"+ + "\f\21\16\21\u0170\13\21\3\22\3\22\5\22\u0174\n\22\3\22\5\22\u0177\n\22"+ + "\3\23\3\23\7\23\u017b\n\23\f\23\16\23\u017e\13\23\3\24\3\24\3\24\3\24"+ + "\5\24\u0184\n\24\3\24\3\24\3\24\3\24\3\24\5\24\u018b\n\24\3\25\5\25\u018e"+ + "\n\25\3\25\3\25\5\25\u0192\n\25\3\25\3\25\5\25\u0196\n\25\3\25\3\25\5"+ + "\25\u019a\n\25\5\25\u019c\n\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\7\26"+ + "\u01a5\n\26\f\26\16\26\u01a8\13\26\3\26\3\26\5\26\u01ac\n\26\3\27\5\27"+ + "\u01af\n\27\3\27\3\27\5\27\u01b3\n\27\3\27\5\27\u01b6\n\27\3\27\3\27\3"+ + "\27\3\27\5\27\u01bc\n\27\3\27\5\27\u01bf\n\27\3\27\3\27\3\27\3\27\5\27"+ + "\u01c5\n\27\3\27\5\27\u01c8\n\27\5\27\u01ca\n\27\3\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\7\31\u01da\n\31\f\31"+ + "\16\31\u01dd\13\31\3\32\3\32\5\32\u01e1\n\32\3\32\5\32\u01e4\n\32\3\33"+ + "\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+ + "\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+ + "\3\34\3\34\3\34\3\34\5\34\u0207\n\34\3\34\3\34\3\34\3\34\3\34\3\34\7\34"+ + "\u020f\n\34\f\34\16\34\u0212\13\34\3\35\3\35\7\35\u0216\n\35\f\35\16\35"+ + "\u0219\13\35\3\36\3\36\5\36\u021d\n\36\3\37\5\37\u0220\n\37\3\37\3\37"+ + "\3\37\3\37\3\37\3\37\5\37\u0228\n\37\3\37\3\37\3\37\3\37\3\37\7\37\u022f"+ + "\n\37\f\37\16\37\u0232\13\37\3\37\3\37\3\37\5\37\u0237\n\37\3\37\3\37"+ + "\3\37\3\37\3\37\3\37\5\37\u023f\n\37\3\37\3\37\3\37\5\37\u0244\n\37\3"+ + "\37\3\37\3\37\3\37\5\37\u024a\n\37\3\37\5\37\u024d\n\37\3 \3 \3 \3!\3"+ + "!\5!\u0254\n!\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u025c\n\"\3#\3#\3#\3#\5#\u0262"+ + "\n#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\7#\u026e\n#\f#\16#\u0271\13#\3$\3$\3"+ + "$\3$\3$\3$\3$\3$\5$\u027b\n$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\5"+ + "$\u028a\n$\3$\6$\u028d\n$\r$\16$\u028e\3$\3$\5$\u0293\n$\3$\3$\5$\u0297"+ + "\n$\3$\3$\3$\7$\u029c\n$\f$\16$\u029f\13$\3%\3%\3%\5%\u02a4\n%\3&\3&\3"+ + "&\3&\3&\3&\3&\3&\3&\3&\5&\u02b0\n&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3"+ + "(\3(\3(\3(\3(\3)\3)\3)\3)\3)\5)\u02c5\n)\3*\3*\3*\3*\3*\3*\3*\3+\3+\3"+ + "+\3+\3+\5+\u02d3\n+\3,\3,\3,\5,\u02d8\n,\3,\3,\3,\7,\u02dd\n,\f,\16,\u02e0"+ + "\13,\5,\u02e2\n,\3,\3,\3-\3-\3-\5-\u02e9\n-\3.\3.\3.\3.\3.\6.\u02f0\n"+ + ".\r.\16.\u02f1\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\5.\u0305"+ + "\n.\3/\3/\3\60\3\60\3\61\3\61\5\61\u030d\n\61\3\61\3\61\5\61\u0311\n\61"+ + "\3\61\3\61\3\61\5\61\u0316\n\61\3\62\3\62\3\63\3\63\3\64\3\64\3\64\7\64"+ + "\u031f\n\64\f\64\16\64\u0322\13\64\3\64\3\64\3\65\3\65\5\65\u0328\n\65"+ + "\3\66\3\66\3\66\5\66\u032d\n\66\3\66\3\66\3\66\3\66\5\66\u0333\n\66\3"+ + "\66\5\66\u0336\n\66\3\67\3\67\5\67\u033a\n\67\38\38\38\58\u033f\n8\39"+ + "\39\59\u0343\n9\3:\3:\3;\3;\3;\3;\3;\3<\3<\3<\2\5\66DF=\2\4\6\b\n\f\16"+ + "\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bd"+ + "fhjlnprtv\2\22\b\2\7\7\t\t\"\"==HHLL\4\2..[[\4\2\t\tHH\4\2**\63\63\3\2"+ + "\34\35\3\2wx\4\2\7\7\u0081\u0081\4\2\r\r\34\34\4\2\'\'99\4\2\7\7\36\36"+ + "\3\2y{\3\2pv\4\2&&]]\7\2\31\32\61\62?BTUfg\3\2\177\u0080\31\2\b\t\23\24"+ + "\26\31\33\33\"\"$$\'\')),.\61\61\66\6699<=??AAHHLOQTWXZ[_`bbff\u03b1\2"+ + "x\3\2\2\2\4{\3\2\2\2\6\u00ef\3\2\2\2\b\u00fa\3\2\2\2\n\u00fe\3\2\2\2\f"+ + "\u0113\3\2\2\2\16\u011a\3\2\2\2\20\u011c\3\2\2\2\22\u0124\3\2\2\2\24\u0139"+ + "\3\2\2\2\26\u0146\3\2\2\2\30\u0150\3\2\2\2\32\u015f\3\2\2\2\34\u0161\3"+ + "\2\2\2\36\u0167\3\2\2\2 \u0169\3\2\2\2\"\u0171\3\2\2\2$\u0178\3\2\2\2"+ + "&\u018a\3\2\2\2(\u019b\3\2\2\2*\u01ab\3\2\2\2,\u01c9\3\2\2\2.\u01cb\3"+ + "\2\2\2\60\u01d6\3\2\2\2\62\u01de\3\2\2\2\64\u01e5\3\2\2\2\66\u0206\3\2"+ + "\2\28\u0217\3\2\2\2:\u021a\3\2\2\2<\u024c\3\2\2\2>\u024e\3\2\2\2@\u0251"+ + "\3\2\2\2B\u025b\3\2\2\2D\u0261\3\2\2\2F\u0296\3\2\2\2H\u02a3\3\2\2\2J"+ + "\u02af\3\2\2\2L\u02b1\3\2\2\2N\u02b8\3\2\2\2P\u02c4\3\2\2\2R\u02c6\3\2"+ + "\2\2T\u02d2\3\2\2\2V\u02d4\3\2\2\2X\u02e8\3\2\2\2Z\u0304\3\2\2\2\\\u0306"+ + "\3\2\2\2^\u0308\3\2\2\2`\u030a\3\2\2\2b\u0317\3\2\2\2d\u0319\3\2\2\2f"+ + "\u0320\3\2\2\2h\u0327\3\2\2\2j\u0335\3\2\2\2l\u0339\3\2\2\2n\u033e\3\2"+ + "\2\2p\u0342\3\2\2\2r\u0344\3\2\2\2t\u0346\3\2\2\2v\u034b\3\2\2\2xy\5\6"+ + "\4\2yz\7\2\2\3z\3\3\2\2\2{|\5\64\33\2|}\7\2\2\3}\5\3\2\2\2~\u00f0\5\b"+ + "\5\2\177\u008d\7$\2\2\u0080\u0089\7\3\2\2\u0081\u0082\7O\2\2\u0082\u0088"+ + "\t\2\2\2\u0083\u0084\7)\2\2\u0084\u0088\t\3\2\2\u0085\u0086\7b\2\2\u0086"+ + "\u0088\5^\60\2\u0087\u0081\3\2\2\2\u0087\u0083\3\2\2\2\u0087\u0085\3\2"+ + "\2\2\u0088\u008b\3\2\2\2\u0089\u0087\3\2\2\2\u0089\u008a\3\2\2\2\u008a"+ + "\u008c\3\2\2\2\u008b\u0089\3\2\2\2\u008c\u008e\7\4\2\2\u008d\u0080\3\2"+ + "\2\2\u008d\u008e\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u00f0\5\6\4\2\u0090"+ + "\u009c\7\33\2\2\u0091\u0098\7\3\2\2\u0092\u0093\7O\2\2\u0093\u0097\t\4"+ + "\2\2\u0094\u0095\7)\2\2\u0095\u0097\t\3\2\2\u0096\u0092\3\2\2\2\u0096"+ + "\u0094\3\2\2\2\u0097\u009a\3\2\2\2\u0098\u0096\3\2\2\2\u0098\u0099\3\2"+ + "\2\2\u0099\u009b\3\2\2\2\u009a\u0098\3\2\2\2\u009b\u009d\7\4\2\2\u009c"+ + "\u0091\3\2\2\2\u009c\u009d\3\2\2\2\u009d\u009e\3\2\2\2\u009e\u00f0\5\6"+ + "\4\2\u009f\u00a0\7W\2\2\u00a0\u00a3\7Z\2\2\u00a1\u00a2\7\64\2\2\u00a2"+ + "\u00a4\7+\2\2\u00a3\u00a1\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a4\u00a7\3\2"+ + "\2\2\u00a5\u00a8\5> \2\u00a6\u00a8\5j\66\2\u00a7\u00a5\3\2\2\2\u00a7\u00a6"+ + "\3\2\2\2\u00a7\u00a8\3\2\2\2\u00a8\u00f0\3\2\2\2\u00a9\u00aa\7W\2\2\u00aa"+ + "\u00ad\7\24\2\2\u00ab\u00ac\7\64\2\2\u00ac\u00ae\7+\2\2\u00ad\u00ab\3"+ + "\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b2\t\5\2\2\u00b0"+ + "\u00b3\5> \2\u00b1\u00b3\5j\66\2\u00b2\u00b0\3\2\2\2\u00b2\u00b1\3\2\2"+ + "\2\u00b3\u00f0\3\2\2\2\u00b4\u00b7\t\6\2\2\u00b5\u00b6\7\64\2\2\u00b6"+ + "\u00b8\7+\2\2\u00b7\u00b5\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00bb\3\2"+ + "\2\2\u00b9\u00bc\5> \2\u00ba\u00bc\5j\66\2\u00bb\u00b9\3\2\2\2\u00bb\u00ba"+ + "\3\2\2\2\u00bc\u00f0\3\2\2\2\u00bd\u00be\7W\2\2\u00be\u00c0\7-\2\2\u00bf"+ + "\u00c1\5> \2\u00c0\u00bf\3\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00f0\3\2\2"+ + "\2\u00c2\u00c3\7W\2\2\u00c3\u00f0\7S\2\2\u00c4\u00c5\7X\2\2\u00c5\u00c8"+ + "\7Z\2\2\u00c6\u00c7\7\22\2\2\u00c7\u00c9\5> \2\u00c8\u00c6\3\2\2\2\u00c8"+ + "\u00c9\3\2\2\2\u00c9\u00cc\3\2\2\2\u00ca\u00cd\5> \2\u00cb\u00cd\5j\66"+ + "\2\u00cc\u00ca\3\2\2\2\u00cc\u00cb\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d7"+ + "\3\2\2\2\u00ce\u00cf\7_\2\2\u00cf\u00d4\5r:\2\u00d0\u00d1\7\5\2\2\u00d1"+ + "\u00d3\5r:\2\u00d2\u00d0\3\2\2\2\u00d3\u00d6\3\2\2\2\u00d4\u00d2\3\2\2"+ + "\2\u00d4\u00d5\3\2\2\2\u00d5\u00d8\3\2\2\2\u00d6\u00d4\3\2\2\2\u00d7\u00ce"+ + "\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\u00f0\3\2\2\2\u00d9\u00da\7X\2\2\u00da"+ + "\u00dd\7\24\2\2\u00db\u00dc\7\22\2\2\u00dc\u00de\5r:\2\u00dd\u00db\3\2"+ + "\2\2\u00dd\u00de\3\2\2\2\u00de\u00e2\3\2\2\2\u00df\u00e0\7Y\2\2\u00e0"+ + "\u00e3\5> \2\u00e1\u00e3\5j\66\2\u00e2\u00df\3\2\2\2\u00e2\u00e1\3\2\2"+ + "\2\u00e2\u00e3\3\2\2\2\u00e3\u00e5\3\2\2\2\u00e4\u00e6\5> \2\u00e5\u00e4"+ + "\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00f0\3\2\2\2\u00e7\u00e8\7X\2\2\u00e8"+ + "\u00ed\7`\2\2\u00e9\u00eb\t\7\2\2\u00ea\u00e9\3\2\2\2\u00ea\u00eb\3\2"+ + "\2\2\u00eb\u00ec\3\2\2\2\u00ec\u00ee\5p9\2\u00ed\u00ea\3\2\2\2\u00ed\u00ee"+ + "\3\2\2\2\u00ee\u00f0\3\2\2\2\u00ef~\3\2\2\2\u00ef\177\3\2\2\2\u00ef\u0090"+ + "\3\2\2\2\u00ef\u009f\3\2\2\2\u00ef\u00a9\3\2\2\2\u00ef\u00b4\3\2\2\2\u00ef"+ + "\u00bd\3\2\2\2\u00ef\u00c2\3\2\2\2\u00ef\u00c4\3\2\2\2\u00ef\u00d9\3\2"+ + "\2\2\u00ef\u00e7\3\2\2\2\u00f0\7\3\2\2\2\u00f1\u00f2\7e\2\2\u00f2\u00f7"+ + "\5\34\17\2\u00f3\u00f4\7\5\2\2\u00f4\u00f6\5\34\17\2\u00f5\u00f3\3\2\2"+ + "\2\u00f6\u00f9\3\2\2\2\u00f7\u00f5\3\2\2\2\u00f7\u00f8\3\2\2\2\u00f8\u00fb"+ + "\3\2\2\2\u00f9\u00f7\3\2\2\2\u00fa\u00f1\3\2\2\2\u00fa\u00fb\3\2\2\2\u00fb"+ + "\u00fc\3\2\2\2\u00fc\u00fd\5\n\6\2\u00fd\t\3\2\2\2\u00fe\u0109\5\16\b"+ + "\2\u00ff\u0100\7J\2\2\u0100\u0101\7\17\2\2\u0101\u0106\5\20\t\2\u0102"+ + "\u0103\7\5\2\2\u0103\u0105\5\20\t\2\u0104\u0102\3\2\2\2\u0105\u0108\3"+ + "\2\2\2\u0106\u0104\3\2\2\2\u0106\u0107\3\2\2\2\u0107\u010a\3\2\2\2\u0108"+ + "\u0106\3\2\2\2\u0109\u00ff\3\2\2\2\u0109\u010a\3\2\2\2\u010a\u010c\3\2"+ + "\2\2\u010b\u010d\5\f\7\2\u010c\u010b\3\2\2\2\u010c\u010d\3\2\2\2\u010d"+ + "\13\3\2\2\2\u010e\u010f\7<\2\2\u010f\u0114\t\b\2\2\u0110\u0111\7j\2\2"+ + "\u0111\u0112\t\b\2\2\u0112\u0114\7o\2\2\u0113\u010e\3\2\2\2\u0113\u0110"+ + "\3\2\2\2\u0114\r\3\2\2\2\u0115\u011b\5\22\n\2\u0116\u0117\7\3\2\2\u0117"+ + "\u0118\5\n\6\2\u0118\u0119\7\4\2\2\u0119\u011b\3\2\2\2\u011a\u0115\3\2"+ + "\2\2\u011a\u0116\3\2\2\2\u011b\17\3\2\2\2\u011c\u011e\5\64\33\2\u011d"+ + "\u011f\t\t\2\2\u011e\u011d\3\2\2\2\u011e\u011f\3\2\2\2\u011f\u0122\3\2"+ + "\2\2\u0120\u0121\7F\2\2\u0121\u0123\t\n\2\2\u0122\u0120\3\2\2\2\u0122"+ + "\u0123\3\2\2\2\u0123\21\3\2\2\2\u0124\u0126\7V\2\2\u0125\u0127\5\36\20"+ + "\2\u0126\u0125\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u0128\3\2\2\2\u0128\u012a"+ + "\5 \21\2\u0129\u012b\5\24\13\2\u012a\u0129\3\2\2\2\u012a\u012b\3\2\2\2"+ + "\u012b\u012e\3\2\2\2\u012c\u012d\7d\2\2\u012d\u012f\5\66\34\2\u012e\u012c"+ + "\3\2\2\2\u012e\u012f\3\2\2\2\u012f\u0133\3\2\2\2\u0130\u0131\7/\2\2\u0131"+ + "\u0132\7\17\2\2\u0132\u0134\5\26\f\2\u0133\u0130\3\2\2\2\u0133\u0134\3"+ + "\2\2\2\u0134\u0137\3\2\2\2\u0135\u0136\7\60\2\2\u0136\u0138\5\66\34\2"+ + "\u0137\u0135\3\2\2\2\u0137\u0138\3\2\2\2\u0138\23\3\2\2\2\u0139\u013a"+ + "\7*\2\2\u013a\u013f\5$\23\2\u013b\u013c\7\5\2\2\u013c\u013e\5$\23\2\u013d"+ + "\u013b\3\2\2\2\u013e\u0141\3\2\2\2\u013f\u013d\3\2\2\2\u013f\u0140\3\2"+ + "\2\2\u0140\u0143\3\2\2\2\u0141\u013f\3\2\2\2\u0142\u0144\5.\30\2\u0143"+ + "\u0142\3\2\2\2\u0143\u0144\3\2\2\2\u0144\25\3\2\2\2\u0145\u0147\5\36\20"+ + "\2\u0146\u0145\3\2\2\2\u0146\u0147\3\2\2\2\u0147\u0148\3\2\2\2\u0148\u014d"+ + "\5\30\r\2\u0149\u014a\7\5\2\2\u014a\u014c\5\30\r\2\u014b\u0149\3\2\2\2"+ + "\u014c\u014f\3\2\2\2\u014d\u014b\3\2\2\2\u014d\u014e\3\2\2\2\u014e\27"+ + "\3\2\2\2\u014f\u014d\3\2\2\2\u0150\u0151\5\32\16\2\u0151\31\3\2\2\2\u0152"+ + "\u015b\7\3\2\2\u0153\u0158\5\64\33\2\u0154\u0155\7\5\2\2\u0155\u0157\5"+ + "\64\33\2\u0156\u0154\3\2\2\2\u0157\u015a\3\2\2\2\u0158\u0156\3\2\2\2\u0158"+ + "\u0159\3\2\2\2\u0159\u015c\3\2\2\2\u015a\u0158\3\2\2\2\u015b\u0153\3\2"+ + "\2\2\u015b\u015c\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u0160\7\4\2\2\u015e"+ + "\u0160\5\64\33\2\u015f\u0152\3\2\2\2\u015f\u015e\3\2\2\2\u0160\33\3\2"+ + "\2\2\u0161\u0162\5h\65\2\u0162\u0163\7\f\2\2\u0163\u0164\7\3\2\2\u0164"+ + "\u0165\5\n\6\2\u0165\u0166\7\4\2\2\u0166\35\3\2\2\2\u0167\u0168\t\13\2"+ + "\2\u0168\37\3\2\2\2\u0169\u016e\5\"\22\2\u016a\u016b\7\5\2\2\u016b\u016d"+ + "\5\"\22\2\u016c\u016a\3\2\2\2\u016d\u0170\3\2\2\2\u016e\u016c\3\2\2\2"+ + "\u016e\u016f\3\2\2\2\u016f!\3\2\2\2\u0170\u016e\3\2\2\2\u0171\u0176\5"+ + "\64\33\2\u0172\u0174\7\f\2\2\u0173\u0172\3\2\2\2\u0173\u0174\3\2\2\2\u0174"+ + "\u0175\3\2\2\2\u0175\u0177\5h\65\2\u0176\u0173\3\2\2\2\u0176\u0177\3\2"+ + "\2\2\u0177#\3\2\2\2\u0178\u017c\5,\27\2\u0179\u017b\5&\24\2\u017a\u0179"+ + "\3\2\2\2\u017b\u017e\3\2\2\2\u017c\u017a\3\2\2\2\u017c\u017d\3\2\2\2\u017d"+ + "%\3\2\2\2\u017e\u017c\3\2\2\2\u017f\u0180\5(\25\2\u0180\u0181\78\2\2\u0181"+ + "\u0183\5,\27\2\u0182\u0184\5*\26\2\u0183\u0182\3\2\2\2\u0183\u0184\3\2"+ + "\2\2\u0184\u018b\3\2\2\2\u0185\u0186\7C\2\2\u0186\u0187\5(\25\2\u0187"+ + "\u0188\78\2\2\u0188\u0189\5,\27\2\u0189\u018b\3\2\2\2\u018a\u017f\3\2"+ + "\2\2\u018a\u0185\3\2\2\2\u018b\'\3\2\2\2\u018c\u018e\7\65\2\2\u018d\u018c"+ + "\3\2\2\2\u018d\u018e\3\2\2\2\u018e\u019c\3\2\2\2\u018f\u0191\7:\2\2\u0190"+ + "\u0192\7K\2\2\u0191\u0190\3\2\2\2\u0191\u0192\3\2\2\2\u0192\u019c\3\2"+ + "\2\2\u0193\u0195\7P\2\2\u0194\u0196\7K\2\2\u0195\u0194\3\2\2\2\u0195\u0196"+ + "\3\2\2\2\u0196\u019c\3\2\2\2\u0197\u0199\7,\2\2\u0198\u019a\7K\2\2\u0199"+ + "\u0198\3\2\2\2\u0199\u019a\3\2\2\2\u019a\u019c\3\2\2\2\u019b\u018d\3\2"+ + "\2\2\u019b\u018f\3\2\2\2\u019b\u0193\3\2\2\2\u019b\u0197\3\2\2\2\u019c"+ + ")\3\2\2\2\u019d\u019e\7G\2\2\u019e\u01ac\5\66\34\2\u019f\u01a0\7a\2\2"+ + "\u01a0\u01a1\7\3\2\2\u01a1\u01a6\5h\65\2\u01a2\u01a3\7\5\2\2\u01a3\u01a5"+ + "\5h\65\2\u01a4\u01a2\3\2\2\2\u01a5\u01a8\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a6"+ + "\u01a7\3\2\2\2\u01a7\u01a9\3\2\2\2\u01a8\u01a6\3\2\2\2\u01a9\u01aa\7\4"+ + "\2\2\u01aa\u01ac\3\2\2\2\u01ab\u019d\3\2\2\2\u01ab\u019f\3\2\2\2\u01ac"+ + "+\3\2\2\2\u01ad\u01af\7+\2\2\u01ae\u01ad\3\2\2\2\u01ae\u01af\3\2\2\2\u01af"+ + "\u01b0\3\2\2\2\u01b0\u01b5\5j\66\2\u01b1\u01b3\7\f\2\2\u01b2\u01b1\3\2"+ + "\2\2\u01b2\u01b3\3\2\2\2\u01b3\u01b4\3\2\2\2\u01b4\u01b6\5f\64\2\u01b5"+ + "\u01b2\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01ca\3\2\2\2\u01b7\u01b8\7\3"+ + "\2\2\u01b8\u01b9\5\n\6\2\u01b9\u01be\7\4\2\2\u01ba\u01bc\7\f\2\2\u01bb"+ + "\u01ba\3\2\2\2\u01bb\u01bc\3\2\2\2\u01bc\u01bd\3\2\2\2\u01bd\u01bf\5f"+ + "\64\2\u01be\u01bb\3\2\2\2\u01be\u01bf\3\2\2\2\u01bf\u01ca\3\2\2\2\u01c0"+ + "\u01c1\7\3\2\2\u01c1\u01c2\5$\23\2\u01c2\u01c7\7\4\2\2\u01c3\u01c5\7\f"+ + "\2\2\u01c4\u01c3\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6"+ + "\u01c8\5f\64\2\u01c7\u01c4\3\2\2\2\u01c7\u01c8\3\2\2\2\u01c8\u01ca\3\2"+ + "\2\2\u01c9\u01ae\3\2\2\2\u01c9\u01b7\3\2\2\2\u01c9\u01c0\3\2\2\2\u01ca"+ + "-\3\2\2\2\u01cb\u01cc\7N\2\2\u01cc\u01cd\7\3\2\2\u01cd\u01ce\5\60\31\2"+ + "\u01ce\u01cf\7(\2\2\u01cf\u01d0\5f\64\2\u01d0\u01d1\7\63\2\2\u01d1\u01d2"+ + "\7\3\2\2\u01d2\u01d3\5\60\31\2\u01d3\u01d4\7\4\2\2\u01d4\u01d5\7\4\2\2"+ + "\u01d5/\3\2\2\2\u01d6\u01db\5\62\32\2\u01d7\u01d8\7\5\2\2\u01d8\u01da"+ + "\5\62\32\2\u01d9\u01d7\3\2\2\2\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2"+ + "\u01db\u01dc\3\2\2\2\u01dc\61\3\2\2\2\u01dd\u01db\3\2\2\2\u01de\u01e3"+ + "\5D#\2\u01df\u01e1\7\f\2\2\u01e0\u01df\3\2\2\2\u01e0\u01e1\3\2\2\2\u01e1"+ + "\u01e2\3\2\2\2\u01e2\u01e4\5h\65\2\u01e3\u01e0\3\2\2\2\u01e3\u01e4\3\2"+ + "\2\2\u01e4\63\3\2\2\2\u01e5\u01e6\5\66\34\2\u01e6\65\3\2\2\2\u01e7\u01e8"+ + "\b\34\1\2\u01e8\u01e9\7D\2\2\u01e9\u0207\5\66\34\n\u01ea\u01eb\7#\2\2"+ + "\u01eb\u01ec\7\3\2\2\u01ec\u01ed\5\b\5\2\u01ed\u01ee\7\4\2\2\u01ee\u0207"+ + "\3\2\2\2\u01ef\u01f0\7R\2\2\u01f0\u01f1\7\3\2\2\u01f1\u01f2\5r:\2\u01f2"+ + "\u01f3\58\35\2\u01f3\u01f4\7\4\2\2\u01f4\u0207\3\2\2\2\u01f5\u01f6\7>"+ + "\2\2\u01f6\u01f7\7\3\2\2\u01f7\u01f8\5f\64\2\u01f8\u01f9\7\5\2\2\u01f9"+ + "\u01fa\5r:\2\u01fa\u01fb\58\35\2\u01fb\u01fc\7\4\2\2\u01fc\u0207\3\2\2"+ + "\2\u01fd\u01fe\7>\2\2\u01fe\u01ff\7\3\2\2\u01ff\u0200\5r:\2\u0200\u0201"+ + "\7\5\2\2\u0201\u0202\5r:\2\u0202\u0203\58\35\2\u0203\u0204\7\4\2\2\u0204"+ + "\u0207\3\2\2\2\u0205\u0207\5:\36\2\u0206\u01e7\3\2\2\2\u0206\u01ea\3\2"+ + "\2\2\u0206\u01ef\3\2\2\2\u0206\u01f5\3\2\2\2\u0206\u01fd\3\2\2\2\u0206"+ + "\u0205\3\2\2\2\u0207\u0210\3\2\2\2\u0208\u0209\f\4\2\2\u0209\u020a\7\n"+ + "\2\2\u020a\u020f\5\66\34\5\u020b\u020c\f\3\2\2\u020c\u020d\7I\2\2\u020d"+ + "\u020f\5\66\34\4\u020e\u0208\3\2\2\2\u020e\u020b\3\2\2\2\u020f\u0212\3"+ + "\2\2\2\u0210\u020e\3\2\2\2\u0210\u0211\3\2\2\2\u0211\67\3\2\2\2\u0212"+ + "\u0210\3\2\2\2\u0213\u0214\7\5\2\2\u0214\u0216\5r:\2\u0215\u0213\3\2\2"+ + "\2\u0216\u0219\3\2\2\2\u0217\u0215\3\2\2\2\u0217\u0218\3\2\2\2\u02189"+ + "\3\2\2\2\u0219\u0217\3\2\2\2\u021a\u021c\5D#\2\u021b\u021d\5<\37\2\u021c"+ + "\u021b\3\2\2\2\u021c\u021d\3\2\2\2\u021d;\3\2\2\2\u021e\u0220\7D\2\2\u021f"+ + "\u021e\3\2\2\2\u021f\u0220\3\2\2\2\u0220\u0221\3\2\2\2\u0221\u0222\7\16"+ + "\2\2\u0222\u0223\5D#\2\u0223\u0224\7\n\2\2\u0224\u0225\5D#\2\u0225\u024d"+ + "\3\2\2\2\u0226\u0228\7D\2\2\u0227\u0226\3\2\2\2\u0227\u0228\3\2\2\2\u0228"+ + "\u0229\3\2\2\2\u0229\u022a\7\63\2\2\u022a\u022b\7\3\2\2\u022b\u0230\5"+ + "D#\2\u022c\u022d\7\5\2\2\u022d\u022f\5D#\2\u022e\u022c\3\2\2\2\u022f\u0232"+ + "\3\2\2\2\u0230\u022e\3\2\2\2\u0230\u0231\3\2\2\2\u0231\u0233\3\2\2\2\u0232"+ + "\u0230\3\2\2\2\u0233\u0234\7\4\2\2\u0234\u024d\3\2\2\2\u0235\u0237\7D"+ + "\2\2\u0236\u0235\3\2\2\2\u0236\u0237\3\2\2\2\u0237\u0238\3\2\2\2\u0238"+ + "\u0239\7\63\2\2\u0239\u023a\7\3\2\2\u023a\u023b\5\b\5\2\u023b\u023c\7"+ + "\4\2\2\u023c\u024d\3\2\2\2\u023d\u023f\7D\2\2\u023e\u023d\3\2\2\2\u023e"+ + "\u023f\3\2\2\2\u023f\u0240\3\2\2\2\u0240\u0241\7;\2\2\u0241\u024d\5@!"+ + "\2\u0242\u0244\7D\2\2\u0243\u0242\3\2\2\2\u0243\u0244\3\2\2\2\u0244\u0245"+ + "\3\2\2\2\u0245\u0246\7Q\2\2\u0246\u024d\5r:\2\u0247\u0249\7\67\2\2\u0248"+ + "\u024a\7D\2\2\u0249\u0248\3\2\2\2\u0249\u024a\3\2\2\2\u024a\u024b\3\2"+ + "\2\2\u024b\u024d\7E\2\2\u024c\u021f\3\2\2\2\u024c\u0227\3\2\2\2\u024c"+ + "\u0236\3\2\2\2\u024c\u023e\3\2\2\2\u024c\u0243\3\2\2\2\u024c\u0247\3\2"+ + "\2\2\u024d=\3\2\2\2\u024e\u024f\7;\2\2\u024f\u0250\5@!\2\u0250?\3\2\2"+ + "\2\u0251\u0253\5r:\2\u0252\u0254\5B\"\2\u0253\u0252\3\2\2\2\u0253\u0254"+ + "\3\2\2\2\u0254A\3\2\2\2\u0255\u0256\7!\2\2\u0256\u025c\5r:\2\u0257\u0258"+ + "\7h\2\2\u0258\u0259\5r:\2\u0259\u025a\7o\2\2\u025a\u025c\3\2\2\2\u025b"+ + "\u0255\3\2\2\2\u025b\u0257\3\2\2\2\u025cC\3\2\2\2\u025d\u025e\b#\1\2\u025e"+ + "\u0262\5F$\2\u025f\u0260\t\7\2\2\u0260\u0262\5D#\6\u0261\u025d\3\2\2\2"+ + "\u0261\u025f\3\2\2\2\u0262\u026f\3\2\2\2\u0263\u0264\f\5\2\2\u0264\u0265"+ + "\t\f\2\2\u0265\u026e\5D#\6\u0266\u0267\f\4\2\2\u0267\u0268\t\7\2\2\u0268"+ + "\u026e\5D#\5\u0269\u026a\f\3\2\2\u026a\u026b\5\\/\2\u026b\u026c\5D#\4"+ + "\u026c\u026e\3\2\2\2\u026d\u0263\3\2\2\2\u026d\u0266\3\2\2\2\u026d\u0269"+ + "\3\2\2\2\u026e\u0271\3\2\2\2\u026f\u026d\3\2\2\2\u026f\u0270\3\2\2\2\u0270"+ + "E\3\2\2\2\u0271\u026f\3\2\2\2\u0272\u0273\b$\1\2\u0273\u0297\5J&\2\u0274"+ + "\u0297\5P)\2\u0275\u0297\5H%\2\u0276\u0297\5Z.\2\u0277\u0278\5f\64\2\u0278"+ + "\u0279\7~\2\2\u0279\u027b\3\2\2\2\u027a\u0277\3\2\2\2\u027a\u027b\3\2"+ + "\2\2\u027b\u027c\3\2\2\2\u027c\u0297\7y\2\2\u027d\u0297\5T+\2\u027e\u027f"+ + "\7\3\2\2\u027f\u0280\5\b\5\2\u0280\u0281\7\4\2\2\u0281\u0297\3\2\2\2\u0282"+ + "\u0297\5f\64\2\u0283\u0284\7\3\2\2\u0284\u0285\5\64\33\2\u0285\u0286\7"+ + "\4\2\2\u0286\u0297\3\2\2\2\u0287\u0289\7\20\2\2\u0288\u028a\5\66\34\2"+ + "\u0289\u0288\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u028c\3\2\2\2\u028b\u028d"+ + "\5t;\2\u028c\u028b\3\2\2\2\u028d\u028e\3\2\2\2\u028e\u028c\3\2\2\2\u028e"+ + "\u028f\3\2\2\2\u028f\u0292\3\2\2\2\u0290\u0291\7\37\2\2\u0291\u0293\5"+ + "\66\34\2\u0292\u0290\3\2\2\2\u0292\u0293\3\2\2\2\u0293\u0294\3\2\2\2\u0294"+ + "\u0295\7 \2\2\u0295\u0297\3\2\2\2\u0296\u0272\3\2\2\2\u0296\u0274\3\2"+ + "\2\2\u0296\u0275\3\2\2\2\u0296\u0276\3\2\2\2\u0296\u027a\3\2\2\2\u0296"+ + "\u027d\3\2\2\2\u0296\u027e\3\2\2\2\u0296\u0282\3\2\2\2\u0296\u0283\3\2"+ + "\2\2\u0296\u0287\3\2\2\2\u0297\u029d\3\2\2\2\u0298\u0299\f\f\2\2\u0299"+ + "\u029a\7|\2\2\u029a\u029c\5d\63\2\u029b\u0298\3\2\2\2\u029c\u029f\3\2"+ + "\2\2\u029d\u029b\3\2\2\2\u029d\u029e\3\2\2\2\u029eG\3\2\2\2\u029f\u029d"+ + "\3\2\2\2\u02a0\u02a4\7\30\2\2\u02a1\u02a4\7\26\2\2\u02a2\u02a4\7\27\2"+ + "\2\u02a3\u02a0\3\2\2\2\u02a3\u02a1\3\2\2\2\u02a3\u02a2\3\2\2\2\u02a4I"+ + "\3\2\2\2\u02a5\u02b0\5L\'\2\u02a6\u02a7\7i\2\2\u02a7\u02a8\5L\'\2\u02a8"+ + "\u02a9\7o\2\2\u02a9\u02b0\3\2\2\2\u02aa\u02b0\5N(\2\u02ab\u02ac\7i\2\2"+ + "\u02ac\u02ad\5N(\2\u02ad\u02ae\7o\2\2\u02ae\u02b0\3\2\2\2\u02af\u02a5"+ + "\3\2\2\2\u02af\u02a6\3\2\2\2\u02af\u02aa\3\2\2\2\u02af\u02ab\3\2\2\2\u02b0"+ + "K\3\2\2\2\u02b1\u02b2\7\21\2\2\u02b2\u02b3\7\3\2\2\u02b3\u02b4\5\64\33"+ + "\2\u02b4\u02b5\7\f\2\2\u02b5\u02b6\5d\63\2\u02b6\u02b7\7\4\2\2\u02b7M"+ + "\3\2\2\2\u02b8\u02b9\7\25\2\2\u02b9\u02ba\7\3\2\2\u02ba\u02bb\5\64\33"+ + "\2\u02bb\u02bc\7\5\2\2\u02bc\u02bd\5d\63\2\u02bd\u02be\7\4\2\2\u02beO"+ + "\3\2\2\2\u02bf\u02c5\5R*\2\u02c0\u02c1\7i\2\2\u02c1\u02c2\5R*\2\u02c2"+ + "\u02c3\7o\2\2\u02c3\u02c5\3\2\2\2\u02c4\u02bf\3\2\2\2\u02c4\u02c0\3\2"+ + "\2\2\u02c5Q\3\2\2\2\u02c6\u02c7\7%\2\2\u02c7\u02c8\7\3\2\2\u02c8\u02c9"+ + "\5h\65\2\u02c9\u02ca\7*\2\2\u02ca\u02cb\5D#\2\u02cb\u02cc\7\4\2\2\u02cc"+ + "S\3\2\2\2\u02cd\u02d3\5V,\2\u02ce\u02cf\7i\2\2\u02cf\u02d0\5V,\2\u02d0"+ + "\u02d1\7o\2\2\u02d1\u02d3\3\2\2\2\u02d2\u02cd\3\2\2\2\u02d2\u02ce\3\2"+ + "\2\2\u02d3U\3\2\2\2\u02d4\u02d5\5X-\2\u02d5\u02e1\7\3\2\2\u02d6\u02d8"+ + "\5\36\20\2\u02d7\u02d6\3\2\2\2\u02d7\u02d8\3\2\2\2\u02d8\u02d9\3\2\2\2"+ + "\u02d9\u02de\5\64\33\2\u02da\u02db\7\5\2\2\u02db\u02dd\5\64\33\2\u02dc"+ + "\u02da\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2\u02de\u02df\3\2"+ + "\2\2\u02df\u02e2\3\2\2\2\u02e0\u02de\3\2\2\2\u02e1\u02d7\3\2\2\2\u02e1"+ + "\u02e2\3\2\2\2\u02e2\u02e3\3\2\2\2\u02e3\u02e4\7\4\2\2\u02e4W\3\2\2\2"+ + "\u02e5\u02e9\7:\2\2\u02e6\u02e9\7P\2\2\u02e7\u02e9\5h\65\2\u02e8\u02e5"+ + "\3\2\2\2\u02e8\u02e6\3\2\2\2\u02e8\u02e7\3\2\2\2\u02e9Y\3\2\2\2\u02ea"+ + "\u0305\7E\2\2\u02eb\u0305\5`\61\2\u02ec\u0305\5p9\2\u02ed\u0305\5^\60"+ + "\2\u02ee\u02f0\7\u0080\2\2\u02ef\u02ee\3\2\2\2\u02f0\u02f1\3\2\2\2\u02f1"+ + "\u02ef\3\2\2\2\u02f1\u02f2\3\2\2\2\u02f2\u0305\3\2\2\2\u02f3\u0305\7\177"+ + "\2\2\u02f4\u02f5\7k\2\2\u02f5\u02f6\5r:\2\u02f6\u02f7\7o\2\2\u02f7\u0305"+ + "\3\2\2\2\u02f8\u02f9\7l\2\2\u02f9\u02fa\5r:\2\u02fa\u02fb\7o\2\2\u02fb"+ + "\u0305\3\2\2\2\u02fc\u02fd\7m\2\2\u02fd\u02fe\5r:\2\u02fe\u02ff\7o\2\2"+ + "\u02ff\u0305\3\2\2\2\u0300\u0301\7n\2\2\u0301\u0302\5r:\2\u0302\u0303"+ + "\7o\2\2\u0303\u0305\3\2\2\2\u0304\u02ea\3\2\2\2\u0304\u02eb\3\2\2\2\u0304"+ + "\u02ec\3\2\2\2\u0304\u02ed\3\2\2\2\u0304\u02ef\3\2\2\2\u0304\u02f3\3\2"+ + "\2\2\u0304\u02f4\3\2\2\2\u0304\u02f8\3\2\2\2\u0304\u02fc\3\2\2\2\u0304"+ + "\u0300\3\2\2\2\u0305[\3\2\2\2\u0306\u0307\t\r\2\2\u0307]\3\2\2\2\u0308"+ + "\u0309\t\16\2\2\u0309_\3\2\2\2\u030a\u030c\7\66\2\2\u030b\u030d\t\7\2"+ + "\2\u030c\u030b\3\2\2\2\u030c\u030d\3\2\2\2\u030d\u0310\3\2\2\2\u030e\u0311"+ + "\5p9\2\u030f\u0311\5r:\2\u0310\u030e\3\2\2\2\u0310\u030f\3\2\2\2\u0311"+ + "\u0312\3\2\2\2\u0312\u0315\5b\62\2\u0313\u0314\7^\2\2\u0314\u0316\5b\62"+ + "\2\u0315\u0313\3\2\2\2\u0315\u0316\3\2\2\2\u0316a\3\2\2\2\u0317\u0318"+ + "\t\17\2\2\u0318c\3\2\2\2\u0319\u031a\5h\65\2\u031ae\3\2\2\2\u031b\u031c"+ + "\5h\65\2\u031c\u031d\7~\2\2\u031d\u031f\3\2\2\2\u031e\u031b\3\2\2\2\u031f"+ + "\u0322\3\2\2\2\u0320\u031e\3\2\2\2\u0320\u0321\3\2\2\2\u0321\u0323\3\2"+ + "\2\2\u0322\u0320\3\2\2\2\u0323\u0324\5h\65\2\u0324g\3\2\2\2\u0325\u0328"+ + "\5l\67\2\u0326\u0328\5n8\2\u0327\u0325\3\2\2\2\u0327\u0326\3\2\2\2\u0328"+ + "i\3\2\2\2\u0329\u032a\5h\65\2\u032a\u032b\7\6\2\2\u032b\u032d\3\2\2\2"+ + "\u032c\u0329\3\2\2\2\u032c\u032d\3\2\2\2\u032d\u032e\3\2\2\2\u032e\u0336"+ + "\7\u0085\2\2\u032f\u0330\5h\65\2\u0330\u0331\7\6\2\2\u0331\u0333\3\2\2"+ + "\2\u0332\u032f\3\2\2\2\u0332\u0333\3\2\2\2\u0333\u0334\3\2\2\2\u0334\u0336"+ + "\5h\65\2\u0335\u032c\3\2\2\2\u0335\u0332\3\2\2\2\u0336k\3\2\2\2\u0337"+ + "\u033a\7\u0086\2\2\u0338\u033a\7\u0087\2\2\u0339\u0337\3\2\2\2\u0339\u0338"+ + "\3\2\2\2\u033am\3\2\2\2\u033b\u033f\7\u0083\2\2\u033c\u033f\5v<\2\u033d"+ + "\u033f\7\u0084\2\2\u033e\u033b\3\2\2\2\u033e\u033c\3\2\2\2\u033e\u033d"+ + "\3\2\2\2\u033fo\3\2\2\2\u0340\u0343\7\u0082\2\2\u0341\u0343\7\u0081\2"+ + "\2\u0342\u0340\3\2\2\2\u0342\u0341\3\2\2\2\u0343q\3\2\2\2\u0344\u0345"+ + "\t\20\2\2\u0345s\3\2\2\2\u0346\u0347\7c\2\2\u0347\u0348\5\64\33\2\u0348"+ + "\u0349\7\\\2\2\u0349\u034a\5\64\33\2\u034au\3\2\2\2\u034b\u034c\t\21\2"+ + "\2\u034cw\3\2\2\2s\u0087\u0089\u008d\u0096\u0098\u009c\u00a3\u00a7\u00ad"+ + "\u00b2\u00b7\u00bb\u00c0\u00c8\u00cc\u00d4\u00d7\u00dd\u00e2\u00e5\u00ea"+ + "\u00ed\u00ef\u00f7\u00fa\u0106\u0109\u010c\u0113\u011a\u011e\u0122\u0126"+ + "\u012a\u012e\u0133\u0137\u013f\u0143\u0146\u014d\u0158\u015b\u015f\u016e"+ + "\u0173\u0176\u017c\u0183\u018a\u018d\u0191\u0195\u0199\u019b\u01a6\u01ab"+ + "\u01ae\u01b2\u01b5\u01bb\u01be\u01c4\u01c7\u01c9\u01db\u01e0\u01e3\u0206"+ + "\u020e\u0210\u0217\u021c\u021f\u0227\u0230\u0236\u023e\u0243\u0249\u024c"+ + "\u0253\u025b\u0261\u026d\u026f\u027a\u0289\u028e\u0292\u0296\u029d\u02a3"+ + "\u02af\u02c4\u02d2\u02d7\u02de\u02e1\u02e8\u02f1\u0304\u030c\u0310\u0315"+ + "\u0320\u0327\u032c\u0332\u0335\u0339\u033e\u0342"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 7f44a1593c2..bc8d06c1dcc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -173,6 +173,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#selectItems}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSelectItems(SqlBaseParser.SelectItemsContext ctx); /** * Visit a parse tree produced by the {@code selectExpression} * labeled alternative in {@link SqlBaseParser#selectItem}. @@ -225,6 +231,24 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#pivotClause}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPivotClause(SqlBaseParser.PivotClauseContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#pivotArgs}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPivotArgs(SqlBaseParser.PivotArgsContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#namedValueExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx); /** * Visit a parse tree produced by {@link SqlBaseParser#expression}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java index 35d93e3a68c..39fef8188b2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java @@ -10,8 +10,8 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; import java.util.Objects; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java new file mode 100644 index 00000000000..4a0639d8b78 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.plan.logical; + +import org.elasticsearch.xpack.sql.capabilities.Resolvables; +import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.expression.AttributeSet; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.function.Function; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.singletonList; + +public class Pivot extends UnaryPlan { + + private final Expression column; + private final List values; + private final List aggregates; + // derived properties + private AttributeSet groupingSet; + private AttributeSet valueOutput; + private List output; + + public Pivot(Source source, LogicalPlan child, Expression column, List values, List aggregates) { + super(source, child); + this.column = column; + this.values = values; + this.aggregates = aggregates; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Pivot::new, child(), column, values, aggregates); + } + + @Override + protected Pivot replaceChild(LogicalPlan newChild) { + return new Pivot(source(), newChild, column, values, aggregates); + } + + public Expression column() { + return column; + } + + public List values() { + return values; + } + + public List aggregates() { + return aggregates; + } + + public AttributeSet groupingSet() { + if (groupingSet == null) { + AttributeSet columnSet = Expressions.references(singletonList(column)); + // grouping can happen only on "primitive" fields, thus exclude multi-fields or nested docs + // the verifier enforces this rule so it does not catch folks by surprise + groupingSet = new AttributeSet(Expressions.onlyPrimitiveFieldAttributes(child().output())) + // make sure to have the column as the last entry (helps with translation) + .subtract(columnSet) + .subtract(Expressions.references(aggregates)) + .combine(columnSet); + } + return groupingSet; + } + + public AttributeSet valuesOutput() { + // TODO: the generated id is a hack since it can clash with other potentially generated ids + if (valueOutput == null) { + List out = new ArrayList<>(aggregates.size() * values.size()); + if (aggregates.size() == 1) { + NamedExpression agg = aggregates.get(0); + for (NamedExpression value : values) { + ExpressionId id = new ExpressionId(agg.id().hashCode() + value.id().hashCode()); + out.add(value.toAttribute().withDataType(agg.dataType()).withId(id)); + } + } + // for multiple args, concat the function and the value + else { + for (NamedExpression agg : aggregates) { + String name = agg instanceof Function ? ((Function) agg).functionName() : agg.name(); + for (NamedExpression value : values) { + ExpressionId id = new ExpressionId(agg.id().hashCode() + value.id().hashCode()); + out.add(value.toAttribute().withName(value.name() + "_" + name).withDataType(agg.dataType()).withId(id)); + } + } + } + valueOutput = new AttributeSet(out); + } + return valueOutput; + } + + @Override + public List output() { + if (output == null) { + output = new ArrayList<>(groupingSet() + .subtract(Expressions.references(singletonList(column))) + .combine(valuesOutput())); + } + + return output; + } + + @Override + public boolean expressionsResolved() { + return column.resolved() && Resolvables.resolved(values) && Resolvables.resolved(aggregates); + } + + @Override + public int hashCode() { + return Objects.hash(column, values, aggregates, child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Pivot other = (Pivot) obj; + return Objects.equals(column, other.column) + && Objects.equals(values, other.values) + && Objects.equals(aggregates, other.aggregates) + && Objects.equals(child(), other.child()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java new file mode 100644 index 00000000000..579a53696ee --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.plan.physical; + +import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.List; +import java.util.Objects; + +public class PivotExec extends UnaryExec implements Unexecutable { + + private final Pivot pivot; + + public PivotExec(Source source, PhysicalPlan child, Pivot pivot) { + super(source, child); + this.pivot = pivot; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, PivotExec::new, child(), pivot); + } + + @Override + protected PivotExec replaceChild(PhysicalPlan newChild) { + return new PivotExec(source(), newChild, pivot); + } + + @Override + public List output() { + return pivot.output(); + } + + public Pivot pivot() { + return pivot; + } + + @Override + public int hashCode() { + return Objects.hash(pivot, child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + PivotExec other = (PivotExec) obj; + + return Objects.equals(pivot, other.pivot) + && Objects.equals(child(), other.child()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java index b32ad961ae9..522d5a944dc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.With; import org.elasticsearch.xpack.sql.plan.logical.command.Command; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.sql.plan.physical.LimitExec; import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.plan.physical.OrderExec; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.plan.physical.PivotExec; import org.elasticsearch.xpack.sql.plan.physical.ProjectExec; import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; @@ -88,6 +90,11 @@ class Mapper extends RuleExecutor { return new AggregateExec(p.source(), map(a.child()), a.groupings(), a.aggregates()); } + if (p instanceof Pivot) { + Pivot pv = (Pivot) p; + return new PivotExec(pv.source(), map(pv.child()), pv); + } + if (p instanceof EsRelation) { EsRelation c = (EsRelation) p; List output = c.output(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index ae875d6fc6e..3931ada3836 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.sql.planner; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.search.AggRef; +import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.AttributeMap; +import org.elasticsearch.xpack.sql.expression.AttributeSet; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.NamedExpression; @@ -32,6 +35,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggPathInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.physical.AggregateExec; import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.plan.physical.FilterExec; @@ -39,6 +43,7 @@ import org.elasticsearch.xpack.sql.plan.physical.LimitExec; import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.plan.physical.OrderExec; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.plan.physical.PivotExec; import org.elasticsearch.xpack.sql.plan.physical.ProjectExec; import org.elasticsearch.xpack.sql.planner.QueryTranslator.GroupingContext; import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation; @@ -52,6 +57,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.GlobalCountRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef.Property; import org.elasticsearch.xpack.sql.querydsl.container.MetricAggRef; +import org.elasticsearch.xpack.sql.querydsl.container.PivotColumnRef; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.ScoreSort; import org.elasticsearch.xpack.sql.querydsl.container.ScriptSort; @@ -64,14 +70,17 @@ import org.elasticsearch.xpack.sql.rule.RuleExecutor; import org.elasticsearch.xpack.sql.session.EmptyExecutable; import org.elasticsearch.xpack.sql.util.Check; +import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.and; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.toAgg; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.toQuery; +import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; /** * Folds the PhysicalPlan into a {@link Query}. @@ -85,6 +94,7 @@ class QueryFolder extends RuleExecutor { @Override protected Iterable.Batch> batches() { Batch rollup = new Batch("Fold queries", + new FoldPivot(), new FoldAggregate(), new FoldProject(), new FoldFilter(), @@ -149,7 +159,8 @@ class QueryFolder extends RuleExecutor { queryC.sort(), queryC.limit(), queryC.shouldTrackHits(), - queryC.shouldIncludeFrozen()); + queryC.shouldIncludeFrozen(), + queryC.minPageSize()); return new EsQueryExec(exec.source(), exec.index(), project.output(), clone); } return project; @@ -179,7 +190,8 @@ class QueryFolder extends RuleExecutor { qContainer.sort(), qContainer.limit(), qContainer.shouldTrackHits(), - qContainer.shouldIncludeFrozen()); + qContainer.shouldIncludeFrozen(), + qContainer.minPageSize()); return exec.with(qContainer); } @@ -204,190 +216,190 @@ class QueryFolder extends RuleExecutor { private static class FoldAggregate extends FoldingRule { @Override protected PhysicalPlan rule(AggregateExec a) { - if (a.child() instanceof EsQueryExec) { EsQueryExec exec = (EsQueryExec) a.child(); + return fold(a, exec); + } + return a; + } + + static EsQueryExec fold(AggregateExec a, EsQueryExec exec) { + // build the group aggregation + // and also collect info about it (since the group columns might be used inside the select) - // build the group aggregation - // and also collect info about it (since the group columns might be used inside the select) + GroupingContext groupingContext = QueryTranslator.groupBy(a.groupings()); - GroupingContext groupingContext = QueryTranslator.groupBy(a.groupings()); + QueryContainer queryC = exec.queryContainer(); + if (groupingContext != null) { + queryC = queryC.addGroups(groupingContext.groupMap.values()); + } - QueryContainer queryC = exec.queryContainer(); - if (groupingContext != null) { - queryC = queryC.addGroups(groupingContext.groupMap.values()); - } + Map aliases = new LinkedHashMap<>(); + // tracker for compound aggs seen in a group + Map compoundAggMap = new LinkedHashMap<>(); - Map aliases = new LinkedHashMap<>(); - // tracker for compound aggs seen in a group - Map compoundAggMap = new LinkedHashMap<>(); + // followed by actual aggregates + for (NamedExpression ne : a.aggregates()) { - // followed by actual aggregates - for (NamedExpression ne : a.aggregates()) { + // unwrap alias - it can be + // - an attribute (since we support aliases inside group-by) + // SELECT emp_no ... GROUP BY emp_no + // SELECT YEAR(hire_date) ... GROUP BY YEAR(hire_date) - // unwrap alias - it can be - // - an attribute (since we support aliases inside group-by) - // SELECT emp_no ... GROUP BY emp_no - // SELECT YEAR(hire_date) ... GROUP BY YEAR(hire_date) + // - an agg function (typically) + // SELECT COUNT(*), AVG(salary) ... GROUP BY salary; - // - an agg function (typically) - // SELECT COUNT(*), AVG(salary) ... GROUP BY salary; + // - a scalar function, which can be applied on an attribute or aggregate and can require one or multiple inputs - // - a scalar function, which can be applied on an attribute or aggregate and can require one or multiple inputs + // SELECT SIN(emp_no) ... GROUP BY emp_no + // SELECT CAST(YEAR(hire_date)) ... GROUP BY YEAR(hire_date) + // SELECT CAST(AVG(salary)) ... GROUP BY salary + // SELECT AVG(salary) + SIN(MIN(salary)) ... GROUP BY salary - // SELECT SIN(emp_no) ... GROUP BY emp_no - // SELECT CAST(YEAR(hire_date)) ... GROUP BY YEAR(hire_date) - // SELECT CAST(AVG(salary)) ... GROUP BY salary - // SELECT AVG(salary) + SIN(MIN(salary)) ... GROUP BY salary + if (ne instanceof Alias || ne instanceof Function) { + Alias as = ne instanceof Alias ? (Alias) ne : null; + Expression child = as != null ? as.child() : ne; - if (ne instanceof Alias || ne instanceof Function) { - Alias as = ne instanceof Alias ? (Alias) ne : null; - Expression child = as != null ? as.child() : ne; + // record aliases in case they are later referred in the tree + if (as != null && as.child() instanceof NamedExpression) { + aliases.put(as.toAttribute(), ((NamedExpression) as.child()).toAttribute()); + } - // record aliases in case they are later referred in the tree - if (as != null && as.child() instanceof NamedExpression) { - aliases.put(as.toAttribute(), ((NamedExpression) as.child()).toAttribute()); - } + // + // look first for scalar functions which might wrap the actual grouped target + // (e.g. + // CAST(field) GROUP BY field or + // ABS(YEAR(field)) GROUP BY YEAR(field) or + // ABS(AVG(salary)) ... GROUP BY salary + // ) + if (child instanceof ScalarFunction) { + ScalarFunction f = (ScalarFunction) child; + Pipe proc = f.asPipe(); - // - // look first for scalar functions which might wrap the actual grouped target - // (e.g. - // CAST(field) GROUP BY field or - // ABS(YEAR(field)) GROUP BY YEAR(field) or - // ABS(AVG(salary)) ... GROUP BY salary - // ) - if (child instanceof ScalarFunction) { - ScalarFunction f = (ScalarFunction) child; - Pipe proc = f.asPipe(); + final AtomicReference qC = new AtomicReference<>(queryC); - final AtomicReference qC = new AtomicReference<>(queryC); - - proc = proc.transformUp(p -> { - // bail out if the def is resolved - if (p.resolved()) { - return p; - } - - // get the backing expression and check if it belongs to a agg group or whether it's - // an expression in the first place - Expression exp = p.expression(); - GroupByKey matchingGroup = null; - if (groupingContext != null) { - // is there a group (aggregation) for this expression ? - matchingGroup = groupingContext.groupFor(exp); - } - else { - // a scalar function can be used only if has already been mentioned for grouping - // (otherwise it is the opposite of grouping) - if (exp instanceof ScalarFunction) { - throw new FoldingException(exp, "Scalar function " +exp.toString() - + " can be used only if included already in grouping"); - } - } - - // found match for expression; if it's an attribute or scalar, end the processing chain with - // the reference to the backing agg - if (matchingGroup != null) { - if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) { - Processor action = null; - boolean isDateBased = exp.dataType().isDateBased(); - /* - * special handling of dates since aggs return the typed Date object which needs - * extraction instead of handling this in the scroller, the folder handles this - * as it already got access to the extraction action - */ - if (exp instanceof DateTimeHistogramFunction) { - action = ((UnaryPipe) p).action(); - isDateBased = true; - } - return new AggPathInput(exp.source(), exp, - new GroupByRef(matchingGroup.id(), null, isDateBased), action); - } - } - // or found an aggregate expression (which has to work on an attribute used for grouping) - // (can happen when dealing with a root group) - if (Functions.isAggregate(exp)) { - Tuple withFunction = addAggFunction(matchingGroup, - (AggregateFunction) exp, compoundAggMap, qC.get()); - qC.set(withFunction.v1()); - return withFunction.v2(); - } - // not an aggregate and no matching - go to a higher node (likely a function YEAR(birth_date)) + proc = proc.transformUp(p -> { + // bail out if the def is resolved + if (p.resolved()) { return p; - }); - - if (!proc.resolved()) { - throw new FoldingException(child, "Cannot find grouping for '{}'", Expressions.name(child)); } - // add the computed column - queryC = qC.get().addColumn(new ComputedRef(proc), f.toAttribute()); - - // TODO: is this needed? - // redirect the alias to the scalar group id (changing the id altogether doesn't work it is - // already used in the aggpath) - //aliases.put(as.toAttribute(), sf.toAttribute()); - } - // apply the same logic above (for function inputs) to non-scalar functions with small variations: - // instead of adding things as input, add them as full blown column - else { + // get the backing expression and check if it belongs to a agg group or whether it's + // an expression in the first place + Expression exp = p.expression(); GroupByKey matchingGroup = null; if (groupingContext != null) { // is there a group (aggregation) for this expression ? - matchingGroup = groupingContext.groupFor(child); + matchingGroup = groupingContext.groupFor(exp); + } else { + // a scalar function can be used only if has already been mentioned for grouping + // (otherwise it is the opposite of grouping) + if (exp instanceof ScalarFunction) { + throw new FoldingException(exp, + "Scalar function " + exp.toString() + " can be used only if included already in grouping"); + } } - // attributes can only refer to declared groups - if (child instanceof Attribute) { - Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(child)); - queryC = queryC.addColumn( - new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), ((Attribute) child)); + + // found match for expression; if it's an attribute or scalar, end the processing chain with + // the reference to the backing agg + if (matchingGroup != null) { + if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) { + Processor action = null; + boolean isDateBased = exp.dataType().isDateBased(); + /* + * special handling of dates since aggs return the typed Date object which needs + * extraction instead of handling this in the scroller, the folder handles this + * as it already got access to the extraction action + */ + if (exp instanceof DateTimeHistogramFunction) { + action = ((UnaryPipe) p).action(); + isDateBased = true; + } + return new AggPathInput(exp.source(), exp, new GroupByRef(matchingGroup.id(), null, isDateBased), + action); + } } - // handle histogram - else if (child instanceof GroupingFunction) { - queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), - ((GroupingFunction) child).toAttribute()); + // or found an aggregate expression (which has to work on an attribute used for grouping) + // (can happen when dealing with a root group) + if (Functions.isAggregate(exp)) { + Tuple withFunction = addAggFunction(matchingGroup, (AggregateFunction) exp, + compoundAggMap, qC.get()); + qC.set(withFunction.v1()); + return withFunction.v2(); } + // not an aggregate and no matching - go to a higher node (likely a function YEAR(birth_date)) + return p; + }); + + if (!proc.resolved()) { + throw new FoldingException(child, "Cannot find grouping for '{}'", Expressions.name(child)); + } + + // add the computed column + queryC = qC.get().addColumn(new ComputedRef(proc), f.toAttribute()); + + // TODO: is this needed? + // redirect the alias to the scalar group id (changing the id altogether doesn't work it is + // already used in the aggpath) + //aliases.put(as.toAttribute(), sf.toAttribute()); + } + // apply the same logic above (for function inputs) to non-scalar functions with small variations: + // instead of adding things as input, add them as full blown column + else { + GroupByKey matchingGroup = null; + if (groupingContext != null) { + // is there a group (aggregation) for this expression ? + matchingGroup = groupingContext.groupFor(child); + } + // attributes can only refer to declared groups + if (child instanceof Attribute) { + Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(child)); + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), + ((Attribute) child)); + } + // handle histogram + else if (child instanceof GroupingFunction) { + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), + ((GroupingFunction) child).toAttribute()); + } else if (child.foldable()) { queryC = queryC.addColumn(ne.toAttribute()); } - // fallback to regular agg functions - else { - // the only thing left is agg function - Check.isTrue(Functions.isAggregate(child), - "Expected aggregate function inside alias; got [{}]", child.nodeString()); - AggregateFunction af = (AggregateFunction) child; - Tuple withAgg = addAggFunction(matchingGroup, af, compoundAggMap, queryC); - // make sure to add the inner id (to handle compound aggs) - queryC = withAgg.v1().addColumn(withAgg.v2().context(), af.toAttribute()); - } + // fallback to regular agg functions + else { + // the only thing left is agg function + Check.isTrue(Functions.isAggregate(child), "Expected aggregate function inside alias; got [{}]", + child.nodeString()); + AggregateFunction af = (AggregateFunction) child; + Tuple withAgg = addAggFunction(matchingGroup, af, compoundAggMap, queryC); + // make sure to add the inner id (to handle compound aggs) + queryC = withAgg.v1().addColumn(withAgg.v2().context(), af.toAttribute()); } + } // not an Alias or Function means it's an Attribute so apply the same logic as above - } else { - GroupByKey matchingGroup = null; - if (groupingContext != null) { - matchingGroup = groupingContext.groupFor(ne); - Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); + } else { + GroupByKey matchingGroup = null; + if (groupingContext != null) { + matchingGroup = groupingContext.groupFor(ne); + Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); - queryC = queryC.addColumn( - new GroupByRef(matchingGroup.id(), null, ne.dataType().isDateBased()), ne.toAttribute()); - } + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, ne.dataType().isDateBased()), ne.toAttribute()); + } else if (ne.foldable()) { queryC = queryC.addColumn(ne.toAttribute()); } } } - if (!aliases.isEmpty()) { - Map newAliases = new LinkedHashMap<>(queryC.aliases()); - newAliases.putAll(aliases); - queryC = queryC.withAliases(new AttributeMap<>(newAliases)); - } - return new EsQueryExec(exec.source(), exec.index(), a.output(), queryC); + if (!aliases.isEmpty()) { + Map newAliases = new LinkedHashMap<>(queryC.aliases()); + newAliases.putAll(aliases); + queryC = queryC.withAliases(new AttributeMap<>(newAliases)); } - return a; + return new EsQueryExec(exec.source(), exec.index(), a.output(), queryC); } - private Tuple addAggFunction(GroupByKey groupingAgg, AggregateFunction f, + private static Tuple addAggFunction(GroupByKey groupingAgg, AggregateFunction f, Map compoundAggMap, QueryContainer queryC) { String functionId = f.functionId(); // handle count as a special case agg @@ -551,6 +563,52 @@ class QueryFolder extends RuleExecutor { } } + + private static class FoldPivot extends FoldingRule { + + @Override + protected PhysicalPlan rule(PivotExec plan) { + if (plan.child() instanceof EsQueryExec) { + EsQueryExec exec = (EsQueryExec) plan.child(); + Pivot p = plan.pivot(); + EsQueryExec fold = FoldAggregate + .fold(new AggregateExec(plan.source(), exec, + new ArrayList<>(p.groupingSet()), combine(p.groupingSet(), p.aggregates())), exec); + + // replace the aggregate extractors with pivot specific extractors + // these require a reference to the pivoting column in order to compare the value + // due to the Pivot structure - the column is the last entry in the grouping set + QueryContainer query = fold.queryContainer(); + + List> fields = new ArrayList<>(query.fields()); + int startingIndex = fields.size() - p.aggregates().size() - 1; + // pivot grouping + Tuple groupTuple = fields.remove(startingIndex); + AttributeSet valuesOutput = plan.pivot().valuesOutput(); + + for (int i = startingIndex; i < fields.size(); i++) { + Tuple tuple = fields.remove(i); + for (Attribute attribute : valuesOutput) { + fields.add(new Tuple<>(new PivotColumnRef(groupTuple.v1(), tuple.v1(), attribute.fold()), attribute.id())); + } + i += valuesOutput.size(); + } + + return fold.with(new QueryContainer(query.query(), query.aggs(), + fields, + query.aliases(), + query.pseudoFunctions(), + query.scalarFunctions(), + query.sort(), + query.limit(), + query.shouldTrackHits(), + query.shouldIncludeFrozen(), + valuesOutput.size())); + } + return plan; + } + } + // // local // diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java index 1e527657ae0..fe4ec05ab33 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java @@ -5,7 +5,9 @@ */ package org.elasticsearch.xpack.sql.planner; +import org.elasticsearch.xpack.sql.expression.function.aggregate.InnerAggregate; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.plan.physical.PivotExec; import org.elasticsearch.xpack.sql.plan.physical.Unexecutable; import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec; import org.elasticsearch.xpack.sql.tree.Node; @@ -14,6 +16,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; + abstract class Verifier { static class Failure { @@ -53,8 +57,8 @@ abstract class Verifier { } } - private static Failure fail(Node source, String message) { - return new Failure(source, message); + private static Failure fail(Node source, String message, Object... args) { + return new Failure(source, format(null, message, args)); } static List verifyMappingPlan(PhysicalPlan plan) { @@ -70,10 +74,22 @@ abstract class Verifier { } }); }); + // verify Pivot + checkInnerAggsPivot(plan, failures); return failures; } + private static void checkInnerAggsPivot(PhysicalPlan plan, List failures) { + plan.forEachDown(p -> { + p.pivot().aggregates().forEach(agg -> agg.forEachDown(e -> { + if (e instanceof InnerAggregate) { + failures.add(fail(e, "Aggregation [{}] not supported (yet) by PIVOT", e.sourceText())); + } + })); + }, PivotExec.class); + } + static List verifyExecutingPlan(PhysicalPlan plan) { List failures = new ArrayList<>(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/PivotColumnRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/PivotColumnRef.java new file mode 100644 index 00000000000..60ee3b7409c --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/PivotColumnRef.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.querydsl.container; + +import org.elasticsearch.xpack.sql.execution.search.AggRef; +import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; + +public class PivotColumnRef extends AggRef { + + private final FieldExtraction agg; + private final FieldExtraction pivot; + private final Object value; + + public PivotColumnRef(FieldExtraction pivot, FieldExtraction agg, Object value) { + this.pivot = pivot; + this.agg = agg; + // due to the way Elasticsearch aggs work + // promote the object to expect types so that the comparison works + this.value = esAggType(value); + } + + private static Object esAggType(Object value) { + if (value instanceof Number) { + Number n = (Number) value; + if (value instanceof Double) { + return value; + } + if (value instanceof Float) { + return Double.valueOf(n.doubleValue()); + } + return Long.valueOf(n.longValue()); + } + return value; + } + + public FieldExtraction pivot() { + return pivot; + } + + public FieldExtraction agg() { + return agg; + } + + public Object value() { + return value; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 5ff560f4baa..c75a2008202 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -83,13 +83,15 @@ public class QueryContainer { private final int limit; private final boolean trackHits; private final boolean includeFrozen; + // used when pivoting for retrieving at least one pivot row + private final int minPageSize; // computed private Boolean aggsOnly; private Boolean customSort; public QueryContainer() { - this(null, null, null, null, null, null, null, -1, false, false); + this(null, null, null, null, null, null, null, -1, false, false, -1); } public QueryContainer(Query query, @@ -102,7 +104,8 @@ public class QueryContainer { Set sort, int limit, boolean trackHits, - boolean includeFrozen) { + boolean includeFrozen, + int minPageSize) { this.query = query; this.aggs = aggs == null ? Aggs.EMPTY : aggs; this.fields = fields == null || fields.isEmpty() ? emptyList() : fields; @@ -113,6 +116,7 @@ public class QueryContainer { this.limit = limit; this.trackHits = trackHits; this.includeFrozen = includeFrozen; + this.minPageSize = minPageSize; } /** @@ -247,49 +251,62 @@ public class QueryContainer { return includeFrozen; } + public int minPageSize() { + return minPageSize; + } + // // copy methods // public QueryContainer with(Query q) { - return new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); + } + + public QueryContainer withFields(List> f) { + return new QueryContainer(query, aggs, f, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } public QueryContainer withAliases(AttributeMap a) { - return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } public QueryContainer withPseudoFunctions(Map p) { - return new QueryContainer(query, aggs, fields, aliases, p, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, aliases, p, scalarFunctions, sort, limit, trackHits, includeFrozen, minPageSize); } public QueryContainer with(Aggs a) { - return new QueryContainer(query, a, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, a, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } public QueryContainer withLimit(int l) { return l == limit ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, l, trackHits, - includeFrozen); + includeFrozen, minPageSize); } public QueryContainer withTrackHits() { return trackHits ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, true, - includeFrozen); + includeFrozen, minPageSize); } public QueryContainer withFrozen() { return includeFrozen ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, - trackHits, true); + trackHits, true, minPageSize); } public QueryContainer withScalarProcessors(AttributeMap procs) { - return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits, includeFrozen, minPageSize); } public QueryContainer addSort(Sort sortable) { Set sort = new LinkedHashSet<>(this.sort); sort.add(sortable); - return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } private String aliasName(Attribute attr) { @@ -344,7 +361,8 @@ public class QueryContainer { false, attr.parent().name()); return new Tuple<>( - new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen), + new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize), nestedFieldRef); } @@ -447,7 +465,7 @@ public class QueryContainer { ExpressionId id = attr instanceof AggregateFunctionAttribute ? ((AggregateFunctionAttribute) attr).innerId() : attr.id(); return new QueryContainer(query, aggs, combine(fields, new Tuple<>(ref, id)), aliases, pseudoFunctions, scalarFunctions, - sort, limit, trackHits, includeFrozen); + sort, limit, trackHits, includeFrozen, minPageSize); } public AttributeMap scalarFunctions() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java index 8f2c3735602..6f1ee47f4da 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.common.io.SqlStreamInput; import org.elasticsearch.xpack.sql.common.io.SqlStreamOutput; -import org.elasticsearch.xpack.sql.execution.search.CompositeAggregationCursor; +import org.elasticsearch.xpack.sql.execution.search.CompositeAggCursor; +import org.elasticsearch.xpack.sql.execution.search.PivotCursor; import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractors; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractors; @@ -45,7 +46,8 @@ public final class Cursors { // cursors entries.add(new NamedWriteableRegistry.Entry(Cursor.class, EmptyCursor.NAME, in -> Cursor.EMPTY)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, ScrollCursor.NAME, ScrollCursor::new)); - entries.add(new NamedWriteableRegistry.Entry(Cursor.class, CompositeAggregationCursor.NAME, CompositeAggregationCursor::new)); + entries.add(new NamedWriteableRegistry.Entry(Cursor.class, CompositeAggCursor.NAME, CompositeAggCursor::new)); + entries.add(new NamedWriteableRegistry.Entry(Cursor.class, PivotCursor.NAME, PivotCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, TextFormatterCursor.NAME, TextFormatterCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, ListCursor.NAME, ListCursor::new)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java index 7e20abc31de..a07b7adfe37 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java @@ -21,7 +21,7 @@ import static java.util.Collections.emptyList; public class ListCursor implements Cursor { - public static final String NAME = "p"; + public static final String NAME = "l"; private final List> data; private final int columnCount; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 8844301006f..b4068932bf0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -844,4 +844,57 @@ public class VerifierErrorMessagesTests extends ESTestCase { accept("SELECT ST_X(shape) FROM test"); } -} + // + // Pivot verifications + // + public void testPivotNonExactColumn() { + assertEquals("1:72: Field [text] of data type [text] cannot be used for grouping;" + + " No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", + error("SELECT * FROM (SELECT int, text, keyword FROM test) " + "PIVOT(AVG(int) FOR text IN ('bla'))")); + } + + public void testPivotColumnUsedInsteadOfAgg() { + assertEquals("1:59: No aggregate function found in PIVOT at [int]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(int FOR keyword IN ('bla'))")); + } + + public void testPivotScalarUsedInsteadOfAgg() { + assertEquals("1:59: No aggregate function found in PIVOT at [ROUND(int)]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(ROUND(int) FOR keyword IN ('bla'))")); + } + + public void testPivotScalarUsedAlongSideAgg() { + assertEquals("1:59: Non-aggregate function found in PIVOT at [AVG(int) + ROUND(int)]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) + ROUND(int) FOR keyword IN ('bla'))")); + } + + public void testPivotValueNotFoldable() { + assertEquals("1:91: Non-literal [bool] found inside PIVOT values", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR keyword IN ('bla', bool))")); + } + + public void testPivotWithFunctionInput() { + assertEquals("1:37: No functions allowed (yet); encountered [YEAR(date)]", + error("SELECT * FROM (SELECT int, keyword, YEAR(date) FROM test) " + "PIVOT(AVG(int) FOR keyword IN ('bla'))")); + } + + public void testPivotWithFoldableFunctionInValues() { + assertEquals("1:85: Non-literal [UCASE('bla')] found inside PIVOT values", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR keyword IN ( UCASE('bla') ))")); + } + + public void testPivotWithNull() { + assertEquals("1:85: Null not allowed as a PIVOT value", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR keyword IN ( null ))")); + } + + public void testPivotValuesHaveDifferentTypeThanColumn() { + assertEquals("1:81: Literal ['bla'] of type [keyword] does not match type [boolean] of PIVOT column [bool]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR bool IN ('bla'))")); + } + + public void testPivotValuesWithMultipleDifferencesThanColumn() { + assertEquals("1:81: Literal ['bla'] of type [keyword] does not match type [boolean] of PIVOT column [bool]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR bool IN ('bla', true))")); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java index 4216db7cb70..195d11be434 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java @@ -19,8 +19,8 @@ import java.util.BitSet; import java.util.List; import java.util.function.Supplier; -public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingTestCase { - public static CompositeAggregationCursor randomCompositeCursor() { +public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingTestCase { + public static CompositeAggCursor randomCompositeCursor() { int extractorsSize = between(1, 20); ZoneId id = randomSafeZone(); List extractors = new ArrayList<>(extractorsSize); @@ -28,7 +28,7 @@ public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingT extractors.add(randomBucketExtractor(id)); } - return new CompositeAggregationCursor(new byte[randomInt(256)], extractors, randomBitSet(extractorsSize), + return new CompositeAggCursor(new byte[randomInt(256)], extractors, randomBitSet(extractorsSize), randomIntBetween(10, 1024), randomBoolean(), randomAlphaOfLength(5)); } @@ -41,8 +41,8 @@ public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingT } @Override - protected CompositeAggregationCursor mutateInstance(CompositeAggregationCursor instance) throws IOException { - return new CompositeAggregationCursor(instance.next(), instance.extractors(), + protected CompositeAggCursor mutateInstance(CompositeAggCursor instance) throws IOException { + return new CompositeAggCursor(instance.next(), instance.extractors(), randomValueOtherThan(instance.mask(), () -> randomBitSet(instance.extractors().size())), randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 512)), !instance.includeFrozen(), @@ -50,17 +50,17 @@ public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingT } @Override - protected CompositeAggregationCursor createTestInstance() { + protected CompositeAggCursor createTestInstance() { return randomCompositeCursor(); } @Override - protected Reader instanceReader() { - return CompositeAggregationCursor::new; + protected Reader instanceReader() { + return CompositeAggCursor::new; } @Override - protected ZoneId instanceZoneId(CompositeAggregationCursor instance) { + protected ZoneId instanceZoneId(CompositeAggCursor instance) { List extractors = instance.extractors(); for (BucketExtractor bucketExtractor : extractors) { ZoneId zoneId = MetricAggExtractorTests.extractZoneId(bucketExtractor); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 2d3b6cdee52..0238cfe8591 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.optimizer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer.PruneSubqueryAliases; +import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution; @@ -20,6 +21,7 @@ import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.Order.OrderDirection; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.sql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.sql.expression.function.aggregate.Count; import org.elasticsearch.xpack.sql.expression.function.aggregate.First; import org.elasticsearch.xpack.sql.expression.function.aggregate.Last; @@ -87,14 +89,17 @@ import org.elasticsearch.xpack.sql.optimizer.Optimizer.PropagateEquals; import org.elasticsearch.xpack.sql.optimizer.Optimizer.PruneDuplicateFunctions; import org.elasticsearch.xpack.sql.optimizer.Optimizer.ReplaceFoldableAttributes; import org.elasticsearch.xpack.sql.optimizer.Optimizer.ReplaceMinMaxWithTopHits; +import org.elasticsearch.xpack.sql.optimizer.Optimizer.RewritePivot; import org.elasticsearch.xpack.sql.optimizer.Optimizer.SimplifyCase; import org.elasticsearch.xpack.sql.optimizer.Optimizer.SimplifyConditional; import org.elasticsearch.xpack.sql.optimizer.Optimizer.SortAggregateOnOrderBy; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; +import org.elasticsearch.xpack.sql.plan.logical.EsRelation; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.command.ShowTables; @@ -1498,4 +1503,23 @@ public class OptimizerTests extends ESTestCase { assertEquals(firstAlias, groupings.get(0)); assertEquals(secondAlias, groupings.get(1)); } -} + + public void testPivotRewrite() { + FieldAttribute column = getFieldAttribute("pivot"); + FieldAttribute number = getFieldAttribute("number"); + List values = Arrays.asList(new Alias(EMPTY, "ONE", L(1)), new Alias(EMPTY, "TWO", L(2))); + List aggs = Arrays.asList(new Avg(EMPTY, number)); + Pivot pivot = new Pivot(EMPTY, new EsRelation(EMPTY, new EsIndex("table", emptyMap()), false), column, values, aggs); + + LogicalPlan result = new RewritePivot().apply(pivot); + assertEquals(Pivot.class, result.getClass()); + Pivot pv = (Pivot) result; + assertEquals(pv.aggregates(), aggs); + assertEquals(Filter.class, pv.child().getClass()); + Filter f = (Filter) pv.child(); + assertEquals(In.class, f.condition().getClass()); + In in = (In) f.condition(); + assertEquals(column, in.value()); + assertEquals(Arrays.asList(L(1), L(2)), in.list()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java new file mode 100644 index 00000000000..4e89fdb2154 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.planner; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; +import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; +import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; +import org.elasticsearch.xpack.sql.analysis.index.EsIndex; +import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; +import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; +import org.elasticsearch.xpack.sql.optimizer.Optimizer; +import org.elasticsearch.xpack.sql.parser.SqlParser; +import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.stats.Metrics; +import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.type.TypesTests; +import org.junit.After; +import org.junit.Before; + +import java.util.Map; + +public class PostOptimizerVerifierTests extends ESTestCase { + + private SqlParser parser; + private Analyzer analyzer; + private Optimizer optimizer; + private Planner planner; + private IndexResolution indexResolution; + + @Before + public void init() { + parser = new SqlParser(); + + Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); + EsIndex test = new EsIndex("test", mapping); + indexResolution = IndexResolution.valid(test); + analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), indexResolution, new Verifier(new Metrics())); + optimizer = new Optimizer(); + planner = new Planner(); + } + + @After + public void destroy() { + parser = null; + analyzer = null; + } + + private PhysicalPlan plan(String sql) { + return planner.plan(optimizer.optimize(analyzer.analyze(parser.createStatement(sql), true)), true); + } + + private String error(String sql) { + return error(indexResolution, sql); + } + + private String error(IndexResolution getIndexResult, String sql) { + PlanningException e = expectThrows(PlanningException.class, () -> plan(sql)); + assertTrue(e.getMessage().startsWith("Found ")); + String header = "Found 1 problem(s)\nline "; + return e.getMessage().substring(header.length()); + } + + public void testPivotInnerAgg() { + assertEquals("1:59: Aggregation [SUM_OF_SQUARES(int)] not supported (yet) by PIVOT", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(SUM_OF_SQUARES(int) FOR keyword IN ('bla'))")); + } + + public void testPivotNestedInnerAgg() { + assertEquals("1:65: Aggregation [SUM_OF_SQUARES(int)] not supported (yet) by PIVOT", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(ROUND(SUM_OF_SQUARES(int)) FOR keyword IN ('bla'))")); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index c94da662151..11f6cc949de 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; import org.elasticsearch.xpack.sql.optimizer.Optimizer; @@ -26,8 +27,10 @@ import org.elasticsearch.xpack.sql.type.TypesTests; import org.junit.AfterClass; import org.junit.BeforeClass; +import java.util.Arrays; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.startsWith; @@ -397,4 +400,18 @@ public class QueryFolderTests extends ESTestCase { AggregateFunctionAttribute afa = (AggregateFunctionAttribute) ee.output().get(0); assertThat(afa.propertyPath(), endsWith("[3.0]")); } + + public void testFoldingOfPivot() { + PhysicalPlan p = plan("SELECT * FROM (SELECT int, keyword, bool FROM test) PIVOT(AVG(int) FOR keyword IN ('A', 'B'))"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec ee = (EsQueryExec) p; + assertEquals(3, ee.output().size()); + assertEquals(Arrays.asList("bool", "'A'", "'B'"), Expressions.names(ee.output())); + String q = ee.toString().replaceAll("\\s+", ""); + assertThat(q, containsString("\"query\":{\"terms\":{\"keyword\":[\"A\",\"B\"]")); + String a = ee.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""); + assertThat(a, containsString("\"terms\":{\"field\":\"bool\"")); + assertThat(a, containsString("\"terms\":{\"field\":\"keyword\"")); + assertThat(a, containsString("{\"avg\":{\"field\":\"int\"}")); + } } From 08f28e642b9aa24a2c9be973782807f8ec47f4ce Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 23 Sep 2019 19:37:15 +0200 Subject: [PATCH 11/94] Replace SearchContext with QueryShardContext in query builder tests (#46978) This commit replaces the SearchContext used in AbstractQueryTestCase with a QueryShardContext in order to reduce the visibility of search contexts. Relates #46523 --- .../query/RankFeatureQueryBuilderTests.java | 3 +-- .../join/query/HasChildQueryBuilderTests.java | 19 +++++-------------- .../query/HasParentQueryBuilderTests.java | 19 +++++-------------- .../join/query/ParentIdQueryBuilderTests.java | 4 ++-- .../PercolateQueryBuilderTests.java | 3 +-- .../index/query/InnerHitContextBuilder.java | 4 ++++ ...angeFieldQueryStringQueryBuilderTests.java | 3 +-- .../index/query/BoolQueryBuilderTests.java | 4 +--- .../query/BoostingQueryBuilderTests.java | 7 +++---- .../query/CommonTermsQueryBuilderTests.java | 3 +-- .../query/ConstantScoreQueryBuilderTests.java | 5 ++--- .../index/query/DisMaxQueryBuilderTests.java | 5 ++--- .../DistanceFeatureQueryBuilderTests.java | 9 +++++---- .../index/query/ExistsQueryBuilderTests.java | 19 +++++++++---------- .../FieldMaskingSpanQueryBuilderTests.java | 7 ++++--- .../index/query/FuzzyQueryBuilderTests.java | 3 +-- .../GeoBoundingBoxQueryBuilderTests.java | 4 +--- .../query/GeoDistanceQueryBuilderTests.java | 3 +-- .../query/GeoPolygonQueryBuilderTests.java | 3 +-- .../query/GeoShapeQueryBuilderTests.java | 3 +-- .../index/query/IdsQueryBuilderTests.java | 7 +++---- .../query/IntervalQueryBuilderTests.java | 3 +-- .../query/MatchAllQueryBuilderTests.java | 3 +-- .../MatchBoolPrefixQueryBuilderTests.java | 5 +++-- .../query/MatchNoneQueryBuilderTests.java | 3 +-- .../MatchPhrasePrefixQueryBuilderTests.java | 3 +-- .../query/MatchPhraseQueryBuilderTests.java | 4 +--- .../index/query/MatchQueryBuilderTests.java | 4 +--- .../query/MoreLikeThisQueryBuilderTests.java | 3 +-- .../query/MultiMatchQueryBuilderTests.java | 3 +-- .../index/query/NestedQueryBuilderTests.java | 19 ++++++------------- .../index/query/PrefixQueryBuilderTests.java | 3 +-- .../query/QueryStringQueryBuilderTests.java | 3 +-- .../index/query/RangeQueryBuilderTests.java | 17 ++++++++--------- .../index/query/RegexpQueryBuilderTests.java | 3 +-- .../index/query/ScriptQueryBuilderTests.java | 3 +-- .../query/ScriptScoreQueryBuilderTests.java | 3 +-- .../query/SimpleQueryStringBuilderTests.java | 3 +-- .../SpanContainingQueryBuilderTests.java | 3 +-- .../query/SpanFirstQueryBuilderTests.java | 3 +-- .../index/query/SpanGapQueryBuilderTests.java | 7 +++---- .../query/SpanMultiTermQueryBuilderTests.java | 5 ++--- .../query/SpanNearQueryBuilderTests.java | 7 +++---- .../index/query/SpanNotQueryBuilderTests.java | 7 +++---- .../index/query/SpanOrQueryBuilderTests.java | 5 ++--- .../query/SpanTermQueryBuilderTests.java | 5 ++--- .../query/SpanWithinQueryBuilderTests.java | 3 +-- .../index/query/TermQueryBuilderTests.java | 5 ++--- .../index/query/TermsQueryBuilderTests.java | 3 +-- .../query/TermsSetQueryBuilderTests.java | 3 +-- .../index/query/TypeQueryBuilderTests.java | 3 +-- .../query/WildcardQueryBuilderTests.java | 3 +-- .../index/query/WrapperQueryBuilderTests.java | 5 ++--- .../FunctionScoreQueryBuilderTests.java | 3 +-- .../test/AbstractBuilderTestCase.java | 18 ------------------ .../test/AbstractQueryTestCase.java | 16 +++++++--------- .../PinnedQueryBuilderTests.java | 4 ++-- .../index/query/ShapeQueryBuilderTests.java | 4 ++-- 58 files changed, 123 insertions(+), 211 deletions(-) diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java index aea37e2a8ee..0cd048184b9 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.MapperExtrasPlugin; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.RankFeatureQueryBuilder.ScoreFunction; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -91,7 +90,7 @@ public class RankFeatureQueryBuilderTests extends AbstractQueryTestCase expectedClass = FeatureField.newSaturationQuery("", "", 1, 1).getClass(); assertThat(query, either(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(expectedClass))); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index 2e682eda733..f84fbb214df 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -52,8 +52,6 @@ import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.fetch.subphase.InnerHitsContext; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.AbstractQueryTestCase; @@ -171,7 +169,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase innerHitBuilders = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders); - final InnerHitsContext innerHitsContext = new InnerHitsContext(); - for (InnerHitContextBuilder builder : innerHitBuilders.values()) { - builder.build(searchContext, innerHitsContext); - } - assertEquals(1, innerHitsContext.getInnerHits().size()); - assertTrue(innerHitsContext.getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.InnerHitSubContext innerHits = innerHitsContext.getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); + assertTrue(innerHitBuilders.containsKey(queryBuilder.innerHit().getName())); + InnerHitContextBuilder innerHits = innerHitBuilders.get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.innerHitBuilder(), queryBuilder.innerHit()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index 73d29314130..1d26467853c 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -40,8 +40,6 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.fetch.subphase.InnerHitsContext; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.AbstractQueryTestCase; @@ -138,7 +136,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase innerHitBuilders = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders); - final InnerHitsContext innerHitsContext = new InnerHitsContext(); - for (InnerHitContextBuilder builder : innerHitBuilders.values()) { - builder.build(searchContext, innerHitsContext); - } - assertEquals(1, innerHitsContext.getInnerHits().size()); - assertTrue(innerHitsContext.getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.InnerHitSubContext innerHits = innerHitsContext.getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); + assertTrue(innerHitBuilders.containsKey(queryBuilder.innerHit().getName())); + InnerHitContextBuilder innerHits = innerHitBuilders.get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.innerHitBuilder(), queryBuilder.innerHit()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java index 83441ef92d2..f43214515be 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java @@ -32,10 +32,10 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.hamcrest.Matchers; @@ -111,7 +111,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase clauses = new ArrayList<>(); clauses.addAll(getBooleanClauses(queryBuilder.must(), BooleanClause.Occur.MUST, context)); clauses.addAll(getBooleanClauses(queryBuilder.mustNot(), BooleanClause.Occur.MUST_NOT, context)); diff --git a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java index 0e0f767d5a5..534126ee5f3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.queries.function.FunctionScoreQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -40,9 +39,9 @@ public class BoostingQueryBuilderTests extends AbstractQueryTestCase queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context.getQueryShardContext()); + protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Collection queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker())); diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index c2fcfdd7140..c1622057b6b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.joda.time.DateTime; import org.elasticsearch.index.query.DistanceFeatureQueryBuilder.Origin; @@ -74,7 +73,9 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase fields = context.getQueryShardContext().simpleMatchToIndexNames(fieldPattern); - Collection mappedFields = fields.stream().filter((field) -> context.getQueryShardContext().getObjectMapper(field) != null - || context.getQueryShardContext().getMapperService().fullName(field) != null).collect(Collectors.toList()); - if (context.mapperService().getIndexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) { + Collection fields = context.simpleMatchToIndexNames(fieldPattern); + Collection mappedFields = fields.stream().filter((field) -> context.getObjectMapper(field) != null + || context.getMapperService().fullName(field) != null).collect(Collectors.toList()); + if (context.getIndexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) { if (fields.size() == 1) { assertThat(query, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; @@ -93,21 +92,21 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase childFields = new ArrayList<>(); - context.getQueryShardContext().getObjectMapper(field).forEach(mapper -> childFields.add(mapper.name())); + context.getObjectMapper(field).forEach(mapper -> childFields.add(mapper.name())); assertThat(booleanQuery.clauses().size(), equalTo(childFields.size())); for (int i = 0; i < childFields.size(); i++) { BooleanClause booleanClause = booleanQuery.clauses().get(i); assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } - } else if (context.getQueryShardContext().getMapperService().fullName(field).hasDocValues()) { + } else if (context.getMapperService().fullName(field).hasDocValues()) { assertThat(constantScoreQuery.getQuery(), instanceOf(DocValuesFieldExistsQuery.class)); DocValuesFieldExistsQuery dvExistsQuery = (DocValuesFieldExistsQuery) constantScoreQuery.getQuery(); assertEquals(field, dvExistsQuery.getField()); - } else if (context.getQueryShardContext().getMapperService().fullName(field).omitNorms() == false) { + } else if (context.getMapperService().fullName(field).omitNorms() == false) { assertThat(constantScoreQuery.getQuery(), instanceOf(NormsFieldExistsQuery.class)); NormsFieldExistsQuery normsExistsQuery = (NormsFieldExistsQuery) constantScoreQuery.getQuery(); assertEquals(field, normsExistsQuery.getField()); diff --git a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java index 9d98a12358f..f564972b2a4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -43,12 +42,14 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase } @Override - protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { + protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { boolean allTypes = queryBuilder.types().length == 0 || queryBuilder.types().length == 1 && "_all".equals(queryBuilder.types()[0]); if (queryBuilder.ids().size() == 0 // no types - || context.getQueryShardContext().fieldMapper(IdFieldMapper.NAME) == null + || context.fieldMapper(IdFieldMapper.NAME) == null // there are types, but disjoint from the query || (allTypes == false && - Arrays.asList(queryBuilder.types()).indexOf(context.mapperService().documentMapper().type()) == -1)) { + Arrays.asList(queryBuilder.types()).indexOf(context.getMapperService().documentMapper().type()) == -1)) { assertThat(query, instanceOf(MatchNoDocsQuery.class)); } else { assertThat(query, instanceOf(TermInSetQuery.class)); diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 15f9b52d23b..379719f3616 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -134,7 +133,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase 0) { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 13309fa6edf..68506b443c0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type; import org.elasticsearch.index.search.MatchQuery; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -156,7 +155,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase innerHitInternals = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitInternals); - InnerHitsContext innerHitsContext = new InnerHitsContext(); - for (InnerHitContextBuilder builder : innerHitInternals.values()) { - builder.build(searchContext, innerHitsContext); - } - assertEquals(1, innerHitsContext.getInnerHits().size()); - assertTrue(innerHitsContext.getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.InnerHitSubContext innerHits = innerHitsContext.getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), INT_FIELD_NAME); + assertTrue(innerHitInternals.containsKey(queryBuilder.innerHit().getName())); + InnerHitContextBuilder innerHits = innerHitInternals.get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.innerHitBuilder(), queryBuilder.innerHit()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java index 2f868d02921..ee56a67092d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -68,7 +67,7 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanNearQuery.getClauses()) { - assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context.getQueryShardContext()))); + assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } } else if (query instanceof SpanTermQuery || query instanceof SpanBoostQuery) { assertThat(queryBuilder.clauses().size(), equalTo(1)); - assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context.getQueryShardContext()))); + assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java index 7df58553e27..ed6deb68448 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -56,11 +55,11 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanOrQuery.getClauses()) { - assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context.getQueryShardContext()))); + assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java index a5ef596e025..27f20f2295a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.spans.SpanTermQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -59,14 +58,14 @@ public class SpanTermQueryBuilderTests extends AbstractTermQueryTestCase> IFD getForField(MappedFieldType fieldType) { - return serviceHolder.indexFieldDataService.getForField(fieldType); // need to build / parse inner hits sort fields - } - - }; - return testSearchContext; - } - @After public void afterTest() { serviceHolder.clientInvocationHandler.delegate = null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 089423770d5..c405ed619c8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -54,7 +54,6 @@ import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.support.QueryParsers; -import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -422,14 +421,13 @@ public abstract class AbstractQueryTestCase> context.setAllowUnmappedFields(true); QB firstQuery = createTestQueryBuilder(); QB controlQuery = copyQuery(firstQuery); - SearchContext searchContext = getSearchContext(context); /* we use a private rewrite context here since we want the most realistic way of asserting that we are cacheable or not. * We do it this way in SearchService where * we first rewrite the query with a private context, then reset the context and then build the actual lucene query*/ QueryBuilder rewritten = rewriteQuery(firstQuery, new QueryShardContext(context)); Query firstLuceneQuery = rewritten.toQuery(context); assertNotNull("toQuery should not return null", firstLuceneQuery); - assertLuceneQuery(firstQuery, firstLuceneQuery, searchContext); + assertLuceneQuery(firstQuery, firstLuceneQuery, context); //remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well assertTrue( "query is not equal to its copy after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, @@ -445,10 +443,10 @@ public abstract class AbstractQueryTestCase> secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() + randomAlphaOfLengthBetween(1, 10)); } - searchContext = getSearchContext(context); + context = new QueryShardContext(context); Query secondLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); assertNotNull("toQuery should not return null", secondLuceneQuery); - assertLuceneQuery(secondQuery, secondLuceneQuery, searchContext); + assertLuceneQuery(secondQuery, secondLuceneQuery, context); if (builderGeneratesCacheableQueries()) { assertEquals("two equivalent query builders lead to different lucene queries", @@ -494,11 +492,11 @@ public abstract class AbstractQueryTestCase> /** * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} * and {@link QueryShardContext}. Verifies that named queries and boost are properly handled and delegates to - * {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, SearchContext)} for query specific checks. + * {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, QueryShardContext)} for query specific checks. */ - private void assertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException { + private void assertLuceneQuery(QB queryBuilder, Query query, QueryShardContext context) throws IOException { if (queryBuilder.queryName() != null) { - Query namedQuery = context.getQueryShardContext().copyNamedQueries().get(queryBuilder.queryName()); + Query namedQuery = context.copyNamedQueries().get(queryBuilder.queryName()); assertThat(namedQuery, equalTo(query)); } if (query != null) { @@ -522,7 +520,7 @@ public abstract class AbstractQueryTestCase> * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} * and {@link QueryShardContext}. Contains the query specific checks to be implemented by subclasses. */ - protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException; + protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, QueryShardContext context) throws IOException; protected void assertTermOrBoostQuery(Query query, String field, String value, float fieldBoost) { if (fieldBoost != AbstractQueryBuilder.DEFAULT_BOOST) { diff --git a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java index 57db7972655..db3d46fc1a7 100644 --- a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java +++ b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java @@ -18,9 +18,9 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -89,7 +89,7 @@ public class PinnedQueryBuilderTests extends AbstractQueryTestCase Date: Mon, 23 Sep 2019 13:21:37 -0700 Subject: [PATCH 12/94] Add support for aliases in queries on _index. (#46944) Previously, queries on the _index field were not able to specify index aliases. This was a regression in functionality compared to the 'indices' query that was deprecated and removed in 6.0. Now queries on _index can specify an alias, which is resolved to the concrete index names when we check whether an index matches. To match a remote shard target, the pattern needs to be of the form 'cluster:index' to match the fully-qualified index name. Index aliases can be specified in the following query types: term, terms, prefix, and wildcard. --- docs/reference/migration/migrate_7_5.asciidoc | 30 +++++++ .../multi_cluster/90_index_name_query.yml | 58 ++++++++++++ .../org/elasticsearch/index/IndexModule.java | 4 +- .../org/elasticsearch/index/IndexService.java | 9 +- .../index/mapper/IndexFieldMapper.java | 42 ++++----- .../index/query/QueryShardContext.java | 73 +++++++++------ .../index/query/SearchIndexNameMatcher.java | 84 +++++++++++++++++ .../elasticsearch/indices/IndicesService.java | 6 +- .../java/org/elasticsearch/node/Node.java | 8 +- .../elasticsearch/index/IndexModuleTests.java | 2 +- .../index/mapper/DateFieldTypeTests.java | 4 +- .../mapper/FieldNamesFieldTypeTests.java | 2 +- .../index/mapper/IndexFieldTypeTests.java | 24 +++-- .../index/mapper/RangeFieldTypeTests.java | 2 +- .../query/IntervalQueryBuilderTests.java | 2 +- .../index/query/QueryShardContextTests.java | 22 +---- .../index/query/RangeQueryRewriteTests.java | 9 +- .../query/SearchIndexNameMatcherTests.java | 90 +++++++++++++++++++ .../query/SimpleQueryStringBuilderTests.java | 10 --- .../query/WildcardQueryBuilderTests.java | 15 ---- .../bucket/histogram/ExtendedBoundsTests.java | 2 +- .../ScriptedMetricAggregatorTests.java | 2 +- .../highlight/HighlightBuilderTests.java | 2 +- .../rescore/QueryRescorerBuilderTests.java | 4 +- .../search/sort/AbstractSortTestCase.java | 2 +- .../AbstractSuggestionBuilderTestCase.java | 2 +- .../snapshots/SnapshotResiliencyTests.java | 1 + .../aggregations/AggregatorTestCase.java | 8 +- .../test/AbstractBuilderTestCase.java | 2 +- .../search/MockSearchServiceTests.java | 2 +- .../DocumentSubsetBitsetCacheTests.java | 2 +- ...ityIndexReaderWrapperIntegrationTests.java | 4 +- .../job/RollupIndexerIndexingTests.java | 2 +- 33 files changed, 392 insertions(+), 139 deletions(-) create mode 100644 docs/reference/migration/migrate_7_5.asciidoc create mode 100644 qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml create mode 100644 server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java create mode 100644 server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java diff --git a/docs/reference/migration/migrate_7_5.asciidoc b/docs/reference/migration/migrate_7_5.asciidoc new file mode 100644 index 00000000000..2334ce8aa5a --- /dev/null +++ b/docs/reference/migration/migrate_7_5.asciidoc @@ -0,0 +1,30 @@ +[[breaking-changes-7.5]] +== Breaking changes in 7.5 +++++ +7.5 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to Elasticsearch 7.5. + +See also <> and <>. + +coming[7.5.0] + +//NOTE: The notable-breaking-changes tagged regions are re-used in the +//Installation and Upgrade Guide + +//tag::notable-breaking-changes[] + +//end::notable-breaking-changes[] + +[discrete] +[[breaking_75_search_changes]] +=== Search Changes + +[discrete] +==== Stricter checking for wildcard queries on _index +Previously, a wildcard query on the `_index` field matched directly against the +fully-qualified index name. Now, in order to match against remote indices like +i`cluster:index`, the query must contain a colon, as in `cl*ster:inde*`. This +behavior aligns with the way indices are matched in the search endpoint. diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml new file mode 100644 index 00000000000..030dad662df --- /dev/null +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/90_index_name_query.yml @@ -0,0 +1,58 @@ +--- +setup: + - do: + indices.create: + index: single_doc_index + body: + settings: + index: + number_of_shards: 1 + number_of_replicas: 0 +--- +teardown: + - do: + indices.delete: + index: single_doc_index + ignore_unavailable: true + +--- +"Test that queries on _index match against the correct indices.": + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "single_doc_index"}}' + - '{"f1": "local_cluster", "sort_field": 0}' + + - do: + search: + rest_total_hits_as_int: true + index: "single_doc_index,my_remote_cluster:single_doc_index" + body: + query: + term: + "_index": "single_doc_index" + + - match: { hits.total: 1 } + - match: { hits.hits.0._index: "single_doc_index"} + - match: { _shards.total: 2 } + - match: { _shards.successful: 2 } + - match: { _shards.skipped : 0} + - match: { _shards.failed: 0 } + + - do: + search: + rest_total_hits_as_int: true + index: "single_doc_index,my_remote_cluster:single_doc_index" + body: + query: + term: + "_index": "my_remote_cluster:single_doc_index" + + - match: { hits.total: 1 } + - match: { hits.hits.0._index: "my_remote_cluster:single_doc_index"} + - match: { _shards.total: 2 } + - match: { _shards.successful: 2 } + - match: { _shards.skipped : 0} + - match: { _shards.failed: 0 } diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 6ef335144eb..b10d84ef1c6 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -30,6 +30,7 @@ import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -386,6 +387,7 @@ public final class IndexModule { BigArrays bigArrays, ThreadPool threadPool, ScriptService scriptService, + ClusterService clusterService, Client client, IndicesQueryCache indicesQueryCache, MapperRegistry mapperRegistry, @@ -411,7 +413,7 @@ public final class IndexModule { return new IndexService(indexSettings, indexCreationContext, environment, xContentRegistry, new SimilarityService(indexSettings, scriptService, similarities), shardStoreDeleter, analysisRegistry, engineFactory, circuitBreakerService, bigArrays, threadPool, scriptService, - client, queryCache, directoryFactory, eventListener, readerWrapperFactory, mapperRegistry, + clusterService, client, queryCache, directoryFactory, eventListener, readerWrapperFactory, mapperRegistry, indicesFieldDataCache, searchOperationListeners, indexOperationListeners, namedWriteableRegistry); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index da470a04afa..5e2ac0dbac6 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -32,6 +32,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -57,6 +58,7 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.SearchIndexNameMatcher; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.shard.IndexEventListener; @@ -134,6 +136,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final ThreadPool threadPool; private final BigArrays bigArrays; private final ScriptService scriptService; + private final ClusterService clusterService; private final Client client; private final CircuitBreakerService circuitBreakerService; private Supplier indexSortSupplier; @@ -151,6 +154,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust BigArrays bigArrays, ThreadPool threadPool, ScriptService scriptService, + ClusterService clusterService, Client client, QueryCache queryCache, IndexStorePlugin.DirectoryFactory directoryFactory, @@ -201,6 +205,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust this.bigArrays = bigArrays; this.threadPool = threadPool; this.scriptService = scriptService; + this.clusterService = clusterService; this.client = client; this.eventListener = eventListener; this.nodeEnv = nodeEnv; @@ -530,9 +535,11 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust * {@link IndexReader}-specific optimizations, such as rewriting containing range queries. */ public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searcher, LongSupplier nowInMillis, String clusterAlias) { + SearchIndexNameMatcher indexNameMatcher = new SearchIndexNameMatcher(index().getName(), clusterAlias, clusterService); return new QueryShardContext( shardId, indexSettings, bigArrays, indexCache.bitsetFilterCache(), indexFieldData::getForField, mapperService(), - similarityService(), scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, clusterAlias); + similarityService(), scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, clusterAlias, + indexNameMatcher); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 276a8e7583c..4e690640135 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -129,11 +129,16 @@ public class IndexFieldMapper extends MetadataFieldMapper { */ @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { - if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { + String pattern = value instanceof BytesRef + ? ((BytesRef) value).utf8ToString() + : value.toString(); + if (context.indexMatches(pattern)) { + // No need to OR these clauses - we can only logically be + // running in the context of just one of these index names. return Queries.newMatchAllQuery(); } else { - return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() - + " vs. " + value); + return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + + "] doesn't match the provided value [" + value + "]."); } } @@ -143,26 +148,29 @@ public class IndexFieldMapper extends MetadataFieldMapper { return super.termsQuery(values, context); } for (Object value : values) { - if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { + String pattern = value instanceof BytesRef + ? ((BytesRef) value).utf8ToString() + : value.toString(); + if (context.indexMatches(pattern)) { // No need to OR these clauses - we can only logically be // running in the context of just one of these index names. return Queries.newMatchAllQuery(); } } // None of the listed index names are this one - return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.getFullyQualifiedIndex().getName() - + " vs. " + values); + return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + + "] doesn't match the provided values [" + values + "]."); } @Override public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) { - String indexName = context.getFullyQualifiedIndex().getName(); - if (indexName.startsWith(value)) { + String pattern = value + "*"; + if (context.indexMatches(pattern)) { return Queries.newMatchAllQuery(); } else { - return Queries.newMatchNoDocsQuery("The index [" + indexName + + return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + "] doesn't match the provided prefix [" + value + "]."); } } @@ -176,8 +184,8 @@ public class IndexFieldMapper extends MetadataFieldMapper { if (pattern.matcher(indexName).matches()) { return Queries.newMatchAllQuery(); } else { - return Queries.newMatchNoDocsQuery("The index [" + indexName + - "] doesn't match the provided pattern [" + value + "]."); + return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + + "] doesn't match the provided pattern [" + value + "]."); } } @@ -185,20 +193,14 @@ public class IndexFieldMapper extends MetadataFieldMapper { public Query wildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) { - String indexName = context.getFullyQualifiedIndex().getName(); - if (isSameIndex(value, indexName)) { + if (context.indexMatches(value)) { return Queries.newMatchAllQuery(); } else { - return Queries.newMatchNoDocsQuery("The index [" + indexName + - "] doesn't match the provided pattern [" + value + "]."); + return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + + "] doesn't match the provided pattern [" + value + "]."); } } - private boolean isSameIndex(Object value, String indexName) { - String pattern = value instanceof BytesRef ? ((BytesRef) value).utf8ToString() : value.toString(); - return Regex.simpleMatch(pattern, indexName); - } - @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { return new ConstantIndexFieldData.Builder(mapperService -> fullyQualifiedIndexName); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index a631ea319b4..b6eea750748 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -69,6 +69,7 @@ import java.util.Set; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.LongSupplier; +import java.util.function.Predicate; import static java.util.Collections.unmodifiableMap; @@ -93,7 +94,9 @@ public class QueryShardContext extends QueryRewriteContext { private String[] types = Strings.EMPTY_ARRAY; private boolean cacheable = true; private final SetOnce frozen = new SetOnce<>(); + private final Index fullyQualifiedIndex; + private final Predicate indexNameMatcher; public void setTypes(String... types) { this.types = types; @@ -109,45 +112,48 @@ public class QueryShardContext extends QueryRewriteContext { private NestedScope nestedScope; public QueryShardContext(int shardId, - IndexSettings indexSettings, - BigArrays bigArrays, - BitsetFilterCache bitsetFilterCache, - BiFunction> indexFieldDataLookup, - MapperService mapperService, - SimilarityService similarityService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - NamedWriteableRegistry namedWriteableRegistry, - Client client, - IndexSearcher searcher, - LongSupplier nowInMillis, - String clusterAlias) { + IndexSettings indexSettings, + BigArrays bigArrays, + BitsetFilterCache bitsetFilterCache, + BiFunction> indexFieldDataLookup, + MapperService mapperService, + SimilarityService similarityService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry namedWriteableRegistry, + Client client, + IndexSearcher searcher, + LongSupplier nowInMillis, + String clusterAlias, + Predicate indexNameMatcher) { this(shardId, indexSettings, bigArrays, bitsetFilterCache, indexFieldDataLookup, mapperService, similarityService, - scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, + scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, indexNameMatcher, new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()), indexSettings.getIndex().getUUID())); } public QueryShardContext(QueryShardContext source) { this(source.shardId, source.indexSettings, source.bigArrays, source.bitsetFilterCache, source.indexFieldDataService, - source.mapperService, source.similarityService, source.scriptService, source.getXContentRegistry(), - source.getWriteableRegistry(), source.client, source.searcher, source.nowInMillis, source.fullyQualifiedIndex); + source.mapperService, source.similarityService, source.scriptService, source.getXContentRegistry(), + source.getWriteableRegistry(), source.client, source.searcher, source.nowInMillis, source.indexNameMatcher, + source.fullyQualifiedIndex); } private QueryShardContext(int shardId, - IndexSettings indexSettings, - BigArrays bigArrays, - BitsetFilterCache bitsetFilterCache, - BiFunction> indexFieldDataLookup, - MapperService mapperService, - SimilarityService similarityService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - NamedWriteableRegistry namedWriteableRegistry, - Client client, - IndexSearcher searcher, - LongSupplier nowInMillis, - Index fullyQualifiedIndex) { + IndexSettings indexSettings, + BigArrays bigArrays, + BitsetFilterCache bitsetFilterCache, + BiFunction> indexFieldDataLookup, + MapperService mapperService, + SimilarityService similarityService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry namedWriteableRegistry, + Client client, + IndexSearcher searcher, + LongSupplier nowInMillis, + Predicate indexNameMatcher, + Index fullyQualifiedIndex) { super(xContentRegistry, namedWriteableRegistry, client, nowInMillis); this.shardId = shardId; this.similarityService = similarityService; @@ -160,6 +166,7 @@ public class QueryShardContext extends QueryRewriteContext { this.scriptService = scriptService; this.indexSettings = indexSettings; this.searcher = searcher; + this.indexNameMatcher = indexNameMatcher; this.fullyQualifiedIndex = fullyQualifiedIndex; } @@ -311,6 +318,14 @@ public class QueryShardContext extends QueryRewriteContext { return indexSettings.getIndexVersionCreated(); } + /** + * Given an index pattern, checks whether it matches against the current shard. The pattern + * may represent a fully qualified index name if the search targets remote shards. + */ + public boolean indexMatches(String pattern) { + return indexNameMatcher.test(pattern); + } + public ParsedQuery toQuery(QueryBuilder queryBuilder) { return toQuery(queryBuilder, q -> { Query query = q.toQuery(this); diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java b/server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java new file mode 100644 index 00000000000..b2329d1d54c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/SearchIndexNameMatcher.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.transport.RemoteClusterAware; + +import java.util.function.Predicate; + +/** + * A predicate that checks whether an index pattern matches the current search shard target. + */ +public class SearchIndexNameMatcher implements Predicate { + private final String indexName; + private final String clusterAlias; + private final ClusterService clusterService; + private final IndexNameExpressionResolver expressionResolver; + + /** + * Creates a new index name matcher. + * + * @param indexName he name of the local index. + * @param clusterAlias the cluster alias of this search shard target. If it is a local target, the alias + * should be null or equal to {@link RemoteClusterAware#LOCAL_CLUSTER_GROUP_KEY}. + * @param clusterService the cluster service. + */ + public SearchIndexNameMatcher(String indexName, + String clusterAlias, + ClusterService clusterService) { + this.indexName = indexName; + this.clusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY.equals(clusterAlias) ? null : clusterAlias; + this.clusterService = clusterService; + this.expressionResolver = new IndexNameExpressionResolver(); + } + + /** + * Given an index pattern, checks whether it matches against the current shard. + * + * If this shard represents a remote shard target, then in order to match the pattern contain + * the separator ':', and must match on both the cluster alias and index name. + */ + public boolean test(String pattern) { + int separatorIndex = pattern.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (separatorIndex < 0) { + return clusterAlias == null && matchesIndex(pattern); + } else { + String clusterPattern = pattern.substring(0, separatorIndex); + String indexPattern = pattern.substring(separatorIndex + 1); + + return Regex.simpleMatch(clusterPattern, clusterAlias) && matchesIndex(indexPattern); + } + } + + private boolean matchesIndex(String pattern) { + String[] concreteIndices = expressionResolver.concreteIndexNames( + clusterService.state(), IndicesOptions.lenientExpandOpen(), pattern); + for (String index : concreteIndices) { + if (Regex.simpleMatch(index, indexName)) { + return true; + } + } + return false; + } +} diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 38ab7149521..b6c87e576bd 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -43,6 +43,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -186,6 +187,7 @@ public class IndicesService extends AbstractLifecycleComponent private final CircuitBreakerService circuitBreakerService; private final BigArrays bigArrays; private final ScriptService scriptService; + private final ClusterService clusterService; private final Client client; private volatile Map indices = emptyMap(); private final Map> pendingDeletes = new HashMap<>(); @@ -213,7 +215,7 @@ public class IndicesService extends AbstractLifecycleComponent AnalysisRegistry analysisRegistry, IndexNameExpressionResolver indexNameExpressionResolver, MapperRegistry mapperRegistry, NamedWriteableRegistry namedWriteableRegistry, ThreadPool threadPool, IndexScopedSettings indexScopedSettings, CircuitBreakerService circuitBreakerService, BigArrays bigArrays, - ScriptService scriptService, Client client, MetaStateService metaStateService, + ScriptService scriptService, ClusterService clusterService, Client client, MetaStateService metaStateService, Collection>> engineFactoryProviders, Map directoryFactories) { this.settings = settings; @@ -235,6 +237,7 @@ public class IndicesService extends AbstractLifecycleComponent this.circuitBreakerService = circuitBreakerService; this.bigArrays = bigArrays; this.scriptService = scriptService; + this.clusterService = clusterService; this.client = client; this.indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() { @Override @@ -556,6 +559,7 @@ public class IndicesService extends AbstractLifecycleComponent bigArrays, threadPool, scriptService, + clusterService, client, indicesQueryCache, mapperRegistry, diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 86a55ceb47a..efa7ddcd657 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -427,10 +427,10 @@ public class Node implements Closeable { .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); final IndicesService indicesService = - new IndicesService(settings, pluginsService, nodeEnvironment, xContentRegistry, analysisModule.getAnalysisRegistry(), - clusterModule.getIndexNameExpressionResolver(), indicesModule.getMapperRegistry(), namedWriteableRegistry, - threadPool, settingsModule.getIndexScopedSettings(), circuitBreakerService, bigArrays, - scriptModule.getScriptService(), client, metaStateService, engineFactoryProviders, indexStoreFactories); + new IndicesService(settings, pluginsService, nodeEnvironment, xContentRegistry, analysisModule.getAnalysisRegistry(), + clusterModule.getIndexNameExpressionResolver(), indicesModule.getMapperRegistry(), namedWriteableRegistry, + threadPool, settingsModule.getIndexScopedSettings(), circuitBreakerService, bigArrays, scriptModule.getScriptService(), + clusterService, client, metaStateService, engineFactoryProviders, indexStoreFactories); final AliasValidator aliasValidator = new AliasValidator(); diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index d052fa365be..7c8d7b902fb 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -150,7 +150,7 @@ public class IndexModuleTests extends ESTestCase { private IndexService newIndexService(IndexModule module) throws IOException { return module.newIndexService(CREATE_INDEX, nodeEnvironment, xContentRegistry(), deleter, circuitBreakerService, bigArrays, - threadPool, scriptService, null, indicesQueryCache, mapperRegistry, + threadPool, scriptService, clusterService, null, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener), writableRegistry()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 479f4d7fc55..6ac59169ad9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -179,7 +179,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null); + xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date = "2015-10-12T14:10:55"; @@ -202,7 +202,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(), - null, null, () -> nowInMillis, null); + null, null, () -> nowInMillis, null, null); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date1 = "2015-10-12T14:10:55"; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index 9bbeecdfc8f..1a9460115f0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -68,7 +68,7 @@ public class FieldNamesFieldTypeTests extends FieldTypeTestCase { QueryShardContext queryShardContext = new QueryShardContext(0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, - null, null, null, null, null, null, () -> 0L, null); + null, null, null, null, null, null, () -> 0L, null, null); fieldNamesFieldType.setEnabled(true); Query termQuery = fieldNamesFieldType.termQuery("field_name", queryShardContext); assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index 82f0edf24f4..11b365ff16e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -21,11 +21,14 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.elasticsearch.index.Index; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryShardContext; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import java.util.function.Predicate; public class IndexFieldTypeTests extends FieldTypeTestCase { @@ -62,12 +65,15 @@ public class IndexFieldTypeTests extends FieldTypeTestCase { } private QueryShardContext createContext() { - QueryShardContext context = mock(QueryShardContext.class); + IndexMetaData indexMetaData = IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, Settings.EMPTY); - Index index = new Index("index", "123"); - when(context.getFullyQualifiedIndex()).thenReturn(index); - when(context.index()).thenReturn(index); - - return context; + Predicate indexNameMatcher = pattern -> Regex.simpleMatch(pattern, "index"); + return new QueryShardContext(0, indexSettings, null, null, null, null, null, null, xContentRegistry(), writableRegistry(), + null, null, System::currentTimeMillis, null, indexNameMatcher); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 16fe2ceee8f..79ab18afbd5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -229,7 +229,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings); return new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null); + xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null); } public void testDateRangeQueryUsingMappingFormat() { diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 379719f3616..4f2d9d217f9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -371,7 +371,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase mappedFieldType.fielddataBuilder(idxName).build(indexSettings, mappedFieldType, null, null, null), mapperService, null, null, NamedXContentRegistry.EMPTY, new NamedWriteableRegistry(Collections.emptyList()), - null, null, () -> nowInMillis, clusterAlias); + null, null, () -> nowInMillis, clusterAlias, null); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java index f4d7c90488f..83ab9c8e62b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java @@ -41,7 +41,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), - null, new IndexSearcher(reader), null, null); + null, new IndexSearcher(reader), null, null, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); assertEquals(Relation.DISJOINT, range.getRelation(context)); } @@ -57,9 +57,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { .endObject().endObject()); indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, - null, null, indexService.mapperService(), null, null, - xContentRegistry(), writableRegistry(), null, null, null, null); + QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, null, + indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), null, null, null, null, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // can't make assumptions on a missing reader, so it must return INTERSECT assertEquals(Relation.INTERSECTS, range.getRelation(context)); @@ -79,7 +78,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(), - null, new IndexSearcher(reader), null, null); + null, new IndexSearcher(reader), null, null, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // no values -> DISJOINT assertEquals(Relation.DISJOINT, range.getRelation(context)); diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java new file mode 100644 index 00000000000..a796586bcf5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SearchIndexNameMatcherTests extends ESTestCase { + private SearchIndexNameMatcher matcher; + private SearchIndexNameMatcher remoteMatcher; + + @Before + public void setUpMatchers() { + MetaData.Builder metaDataBuilder = MetaData.builder() + .put(indexBuilder("index1").putAlias(AliasMetaData.builder("alias"))) + .put(indexBuilder("index2").putAlias(AliasMetaData.builder("alias"))) + .put(indexBuilder("index3")); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(metaDataBuilder).build(); + + ClusterService clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(state); + + matcher = new SearchIndexNameMatcher("index1", "", clusterService); + remoteMatcher = new SearchIndexNameMatcher("index1", "cluster", clusterService); + } + + private static IndexMetaData.Builder indexBuilder(String index) { + Settings.Builder settings = settings(Version.CURRENT). + put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0); + return IndexMetaData.builder(index).settings(settings); + } + + public void testLocalIndex() { + assertTrue(matcher.test("index1")); + assertTrue(matcher.test("ind*x1")); + assertFalse(matcher.test("index2")); + + assertTrue(matcher.test("alias")); + assertTrue(matcher.test("*lias")); + + assertFalse(matcher.test("cluster:index1")); + } + + public void testRemoteIndex() { + assertTrue(remoteMatcher.test("cluster:index1")); + assertTrue(remoteMatcher.test("cluster:ind*x1")); + assertTrue(remoteMatcher.test("*luster:ind*x1")); + assertFalse(remoteMatcher.test("cluster:index2")); + + assertTrue(remoteMatcher.test("cluster:alias")); + assertTrue(remoteMatcher.test("cluster:*lias")); + + assertFalse(remoteMatcher.test("index1")); + assertFalse(remoteMatcher.test("alias")); + + assertFalse(remoteMatcher.test("*index1")); + assertFalse(remoteMatcher.test("*alias")); + assertFalse(remoteMatcher.test("cluster*")); + assertFalse(remoteMatcher.test("cluster*index1")); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 78a7ca35eae..bad1a6c7045 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; @@ -416,15 +415,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase now, null); + null, null, () -> now, null, null); DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime"); DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC, DateFieldMapper.Resolution.MILLISECONDS); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index 7203b5dd443..9d0d1d69f02 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -426,6 +426,6 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase { Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); return new QueryShardContext(0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, scriptService, - xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null); + xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null, null); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 65d2e92555c..a1f669558a5 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -280,7 +280,7 @@ public class HighlightBuilderTests extends ESTestCase { // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), namedWriteableRegistry, - null, null, System::currentTimeMillis, null) { + null, null, System::currentTimeMillis, null, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 995cfa3b1c9..accf23a9644 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -144,7 +144,7 @@ public class QueryRescorerBuilderTests extends ESTestCase { // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null) { + xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); @@ -188,7 +188,7 @@ public class QueryRescorerBuilderTests extends ESTestCase { // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null) { + xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index a09cb4b0dfa..28ca23df124 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -192,7 +192,7 @@ public abstract class AbstractSortTestCase> extends EST return builder.build(idxSettings, fieldType, new IndexFieldDataCache.None(), null, null); }; return new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataLookup, - null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null, () -> randomNonNegativeLong(), null) { + null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null, () -> randomNonNegativeLong(), null, null) { @Override public MappedFieldType fieldMapper(String name) { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index d0289c7fa97..f60c3f07740 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -181,7 +181,7 @@ public abstract class AbstractSuggestionBuilderTestCase nowInMillis, null); + namedWriteableRegistry, this.client, searcher, () -> nowInMillis, null, null); } ScriptModule createScriptModule(List scriptPlugins) { diff --git a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java index 668495f6f70..8a8842487f1 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java @@ -43,7 +43,7 @@ public class MockSearchServiceTests extends ESTestCase { final long nowInMillis = randomNonNegativeLong(); SearchContext s = new TestSearchContext(new QueryShardContext(0, new IndexSettings(EMPTY_INDEX_METADATA, Settings.EMPTY), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null)) { + xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null)) { @Override public SearchShardTarget shardTarget() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java index f78b9c2aa6f..a50c39d4e6a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -241,7 +241,7 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase { final QueryShardContext context = new QueryShardContext(shardId.id(), indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), - client, new IndexSearcher(directoryReader), () -> nowInMillis, null); + client, new IndexSearcher(directoryReader), () -> nowInMillis, null, null); body.accept(context, leaf); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 8214d327491..ca49e4ae4a3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -85,7 +85,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT final long nowInMillis = randomNonNegativeLong(); QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), - client, null, () -> nowInMillis, null); + client, null, () -> nowInMillis, null, null); QueryShardContext queryShardContext = spy(realQueryShardContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY); XPackLicenseState licenseState = mock(XPackLicenseState.class); @@ -200,7 +200,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT final long nowInMillis = randomNonNegativeLong(); QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), - client, null, () -> nowInMillis, null); + client, null, () -> nowInMillis, null, null); QueryShardContext queryShardContext = spy(realQueryShardContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 7f5a8232a6d..492d24b88f0 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -93,7 +93,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { settings = createIndexSettings(); queryShardContext = new QueryShardContext(0, settings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - null, null, null, null, () -> 0L, null); + null, null, null, null, () -> 0L, null, null); } public void testSimpleDateHisto() throws Exception { From 5ca37db60cc98916b07b0bef98b3c5a8c5420cc7 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 23 Sep 2019 17:06:27 -0600 Subject: [PATCH 13/94] Mute SLMSnapshotBlockingIntegTests.testRetentionWhileSnapshotInProgress Relates to #46508 --- .../elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index b42a1f98074..05ef3fa792f 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -143,6 +143,7 @@ public class SLMSnapshotBlockingIntegTests extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/46508") public void testRetentionWhileSnapshotInProgress() throws Exception { final String indexName = "test"; final String policyId = "slm-policy"; From 6986d7f9680cd20de60ae615c77281a9f955611f Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 24 Sep 2019 08:57:39 +0200 Subject: [PATCH 14/94] Add blob container retries tests for Google Cloud Storage (#46968) Similarly to what has been done for S3 in #45383, this commit adds unit tests that verify the behavior of the SDK client and blob container implementation for Google Storage when the remote service returns errors. The main purpose was to add an extra test to the specific retry logic for 410-Gone errors added in #45963. Relates #45963 --- ...CloudStorageBlobContainerRetriesTests.java | 433 ++++++++++++++++++ ...eCloudStorageBlobStoreRepositoryTests.java | 158 ++----- .../repositories/gcs/TestUtils.java | 158 +++++++ 3 files changed, 626 insertions(+), 123 deletions(-) create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/TestUtils.java diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java new file mode 100644 index 00000000000..714ea968ff0 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -0,0 +1,433 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.StorageException; +import com.google.cloud.storage.StorageOptions; +import com.sun.net.httpserver.HttpContext; +import com.sun.net.httpserver.HttpServer; +import org.apache.http.HttpStatus; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; +import org.elasticsearch.common.lucene.store.InputStreamIndexInput; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.mocksocket.MockHttpServer; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; +import org.threeten.bp.Duration; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.SocketTimeoutException; +import java.nio.file.NoSuchFileException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.TOKEN_URI_SETTING; +import static org.elasticsearch.repositories.gcs.TestUtils.createServiceAccount; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +@SuppressForbidden(reason = "use a http server") +public class GoogleCloudStorageBlobContainerRetriesTests extends ESTestCase { + + private HttpServer httpServer; + + private String httpServerUrl() { + assertThat(httpServer, notNullValue()); + InetSocketAddress address = httpServer.getAddress(); + return "http://" + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort(); + } + + @Before + public void setUp() throws Exception { + httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpServer.start(); + super.setUp(); + } + + @After + public void tearDown() throws Exception { + httpServer.stop(0); + super.tearDown(); + } + + private BlobContainer createBlobContainer(final int maxRetries, final @Nullable TimeValue readTimeout) { + final Settings.Builder clientSettings = Settings.builder(); + final String client = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + clientSettings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(client).getKey(), httpServerUrl()); + clientSettings.put(TOKEN_URI_SETTING.getConcreteSettingForNamespace(client).getKey(), httpServerUrl() + "/token"); + if (readTimeout != null) { + clientSettings.put(READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(client).getKey(), readTimeout); + } + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(client).getKey(), createServiceAccount(random())); + clientSettings.setSecureSettings(secureSettings); + + final GoogleCloudStorageService service = new GoogleCloudStorageService() { + @Override + StorageOptions createStorageOptions(final GoogleCloudStorageClientSettings clientSettings, + final HttpTransportOptions httpTransportOptions) { + StorageOptions options = super.createStorageOptions(clientSettings, httpTransportOptions); + return options.toBuilder() + .setRetrySettings(RetrySettings.newBuilder() + .setTotalTimeout(options.getRetrySettings().getTotalTimeout()) + .setInitialRetryDelay(Duration.ofMillis(10L)) + .setRetryDelayMultiplier(options.getRetrySettings().getRetryDelayMultiplier()) + .setMaxRetryDelay(Duration.ofSeconds(1L)) + .setMaxAttempts(maxRetries) + .setJittered(false) + .setInitialRpcTimeout(options.getRetrySettings().getInitialRpcTimeout()) + .setRpcTimeoutMultiplier(options.getRetrySettings().getRpcTimeoutMultiplier()) + .setMaxRpcTimeout(options.getRetrySettings().getMaxRpcTimeout()) + .build()) + .build(); + } + }; + service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(clientSettings.build())); + + final List httpContexts = Arrays.asList( + // Auth + httpServer.createContext("/token", exchange -> { + byte[] response = ("{\"access_token\":\"foo\",\"token_type\":\"Bearer\",\"expires_in\":3600}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, response.length); + exchange.getResponseBody().write(response); + exchange.close(); + }), + // Does bucket exists? + httpServer.createContext("/storage/v1/b/bucket", exchange -> { + byte[] response = ("{\"kind\":\"storage#bucket\",\"name\":\"bucket\",\"id\":\"0\"}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json; charset=utf-8"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, response.length); + exchange.getResponseBody().write(response); + exchange.close(); + }) + ); + + final GoogleCloudStorageBlobStore blobStore = new GoogleCloudStorageBlobStore("bucket", client, service); + httpContexts.forEach(httpContext -> httpServer.removeContext(httpContext)); + + return new GoogleCloudStorageBlobContainer(BlobPath.cleanPath(), blobStore); + } + + public void testReadNonexistentBlobThrowsNoSuchFileException() { + final BlobContainer blobContainer = createBlobContainer(between(1, 5), null); + final Exception exception = expectThrows(NoSuchFileException.class, + () -> Streams.readFully(blobContainer.readBlob("read_nonexistent_blob"))); + assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("blob [read_nonexistent_blob] does not exist")); + } + + public void testReadBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(2, 10); + final CountDown countDown = new CountDown(maxRetries); + + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/download/storage/v1/b/bucket/o/read_blob_max_retries", exchange -> { + Streams.readFully(exchange.getRequestBody()); + if (countDown.countDown()) { + exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), bytes.length); + exchange.getResponseBody().write(bytes); + exchange.close(); + return; + } + exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); + if (randomBoolean()) { + exchange.close(); + } + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries, TimeValue.timeValueMillis(between(100, 500))); + try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { + assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); + assertThat(countDown.isCountedDown(), is(true)); + } + } + + public void testReadBlobWithReadTimeouts() { + final int maxRetries = randomIntBetween(1, 3); + final BlobContainer blobContainer = createBlobContainer(maxRetries, TimeValue.timeValueMillis(between(100, 200))); + + // HTTP server does not send a response + httpServer.createContext("/download/storage/v1/b/bucket/o/read_blob_unresponsive", exchange -> {}); + + StorageException storageException = expectThrows(StorageException.class, + () -> Streams.readFully(blobContainer.readBlob("read_blob_unresponsive"))); + assertThat(storageException.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + assertThat(storageException.getCause(), instanceOf(SocketTimeoutException.class)); + + // HTTP server sends a partial response + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/download/storage/v1/b/bucket/o/read_blob_incomplete", exchange -> { + exchange.getResponseHeaders().add("Content-Type", "text/plain; charset=utf-8"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, bytes.length); + final int bytesToSend = randomIntBetween(0, bytes.length - 1); + if (bytesToSend > 0) { + exchange.getResponseBody().write(bytes, 0, bytesToSend); + } + if (randomBoolean()) { + exchange.getResponseBody().flush(); + } + }); + + storageException = expectThrows(StorageException.class, () -> { + try (InputStream stream = blobContainer.readBlob("read_blob_incomplete")) { + Streams.readFully(stream); + } + }); + assertThat(storageException.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + assertThat(storageException.getCause(), instanceOf(SocketTimeoutException.class)); + } + + public void testWriteBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(2, 10); + final CountDown countDown = new CountDown(maxRetries); + + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/upload/storage/v1/b/bucket/o", exchange -> { + assertThat(exchange.getRequestURI().getQuery(), containsString("uploadType=multipart")); + if (countDown.countDown()) { + Optional> content = TestUtils.parseMultipartRequestBody(exchange.getRequestBody()); + assertThat(content.isPresent(), is(true)); + assertThat(content.get().v1(), equalTo("write_blob_max_retries")); + if (Objects.deepEquals(bytes, content.get().v2().array())) { + byte[] response = ("{\"bucket\":\"bucket\",\"name\":\"" + content.get().v1() + "\"}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + } else { + exchange.sendResponseHeaders(HttpStatus.SC_BAD_REQUEST, -1); + } + exchange.close(); + return; + } + if (randomBoolean()) { + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody(), new byte[randomIntBetween(1, Math.max(1, bytes.length - 1))]); + } else { + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); + } + } + exchange.close(); + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries, null); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { + blobContainer.writeBlob("write_blob_max_retries", stream, bytes.length, false); + } + assertThat(countDown.isCountedDown(), is(true)); + } + + public void testWriteBlobWithReadTimeouts() { + final byte[] bytes = randomByteArrayOfLength(randomIntBetween(10, 128)); + final TimeValue readTimeout = TimeValue.timeValueMillis(randomIntBetween(100, 500)); + final BlobContainer blobContainer = createBlobContainer(1, readTimeout); + + // HTTP server does not send a response + httpServer.createContext("/upload/storage/v1/b/bucket/o", exchange -> { + if (randomBoolean()) { + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody(), new byte[randomIntBetween(1, bytes.length - 1)]); + } else { + Streams.readFully(exchange.getRequestBody()); + } + } + }); + + Exception exception = expectThrows(StorageException.class, () -> { + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { + blobContainer.writeBlob("write_blob_timeout", stream, bytes.length, false); + } + }); + assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + + assertThat(exception.getCause(), instanceOf(SocketTimeoutException.class)); + assertThat(exception.getCause().getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + } + + public void testWriteLargeBlob() throws IOException { + // See {@link BaseWriteChannel#DEFAULT_CHUNK_SIZE} + final int defaultChunkSize = 8 * 256 * 1024; + final int nbChunks = randomIntBetween(3, 5); + final int lastChunkSize = randomIntBetween(1, defaultChunkSize - 1); + final int totalChunks = nbChunks + 1; + final byte[] data = randomBytes(defaultChunkSize * nbChunks + lastChunkSize); + assertThat(data.length, greaterThan(GoogleCloudStorageBlobStore.LARGE_BLOB_THRESHOLD_BYTE_SIZE)); + + logger.debug("resumable upload is composed of [{}] total chunks ([{}] chunks of length [{}] and last chunk of length [{}]", + totalChunks, nbChunks, defaultChunkSize, lastChunkSize); + + final int nbErrors = 2; // we want all requests to fail at least once + final AtomicInteger countInits = new AtomicInteger(nbErrors); + final AtomicInteger countUploads = new AtomicInteger(nbErrors * totalChunks); + final AtomicBoolean allow410Gone = new AtomicBoolean(randomBoolean()); + final AtomicBoolean allowReadTimeout = new AtomicBoolean(rarely()); + final int wrongChunk = randomIntBetween(1, totalChunks); + + final AtomicReference sessionUploadId = new AtomicReference<>(UUIDs.randomBase64UUID()); + logger.debug("starting with resumable upload id [{}]", sessionUploadId.get()); + + httpServer.createContext("/upload/storage/v1/b/bucket/o", exchange -> { + final Map params = new HashMap<>(); + RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + assertThat(params.get("uploadType"), equalTo("resumable")); + + if ("POST".equals(exchange.getRequestMethod())) { + assertThat(params.get("name"), equalTo("write_large_blob")); + if (countInits.decrementAndGet() <= 0) { + byte[] response = Streams.readFully(exchange.getRequestBody()).utf8ToString().getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.getResponseHeaders().add("Location", httpServerUrl() + + "/upload/storage/v1/b/bucket/o?uploadType=resumable&upload_id=" + sessionUploadId.get()); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + exchange.close(); + return; + } + if (allowReadTimeout.get()) { + assertThat(wrongChunk, greaterThan(0)); + return; + } + + } else if ("PUT".equals(exchange.getRequestMethod())) { + final String uploadId = params.get("upload_id"); + if (uploadId.equals(sessionUploadId.get()) == false) { + logger.debug("session id [{}] is gone", uploadId); + assertThat(wrongChunk, greaterThan(0)); + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(HttpStatus.SC_GONE, -1); + exchange.close(); + return; + } + + if (countUploads.get() == (wrongChunk * nbErrors)) { + if (allowReadTimeout.compareAndSet(true, false)) { + assertThat(wrongChunk, greaterThan(0)); + return; + } + if (allow410Gone.compareAndSet(true, false)) { + final String newUploadId = UUIDs.randomBase64UUID(random()); + logger.debug("chunk [{}] gone, updating session ids [{} -> {}]", wrongChunk, sessionUploadId.get(), newUploadId); + sessionUploadId.set(newUploadId); + + // we must reset the counters because the whole object upload will be retried + countInits.set(nbErrors); + countUploads.set(nbErrors * totalChunks); + + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(HttpStatus.SC_GONE, -1); + exchange.close(); + return; + } + } + + final String range = exchange.getRequestHeaders().getFirst("Content-Range"); + assertTrue(Strings.hasLength(range)); + + if (countUploads.decrementAndGet() % 2 == 0) { + final ByteArrayOutputStream requestBody = new ByteArrayOutputStream(); + final long bytesRead = Streams.copy(exchange.getRequestBody(), requestBody); + assertThat(Math.toIntExact(bytesRead), anyOf(equalTo(defaultChunkSize), equalTo(lastChunkSize))); + + final int rangeStart = TestUtils.getContentRangeStart(range); + final int rangeEnd = TestUtils.getContentRangeEnd(range); + assertThat(rangeEnd + 1 - rangeStart, equalTo(Math.toIntExact(bytesRead))); + assertArrayEquals(Arrays.copyOfRange(data, rangeStart, rangeEnd + 1), requestBody.toByteArray()); + + final Integer limit = TestUtils.getContentRangeLimit(range); + if (limit != null) { + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); + exchange.close(); + return; + } else { + exchange.getResponseHeaders().add("Range", String.format(Locale.ROOT, "bytes=%d/%d", rangeStart, rangeEnd)); + exchange.getResponseHeaders().add("Content-Length", "0"); + exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1); + exchange.close(); + return; + } + } + } + + // read all the request body, otherwise the SDK client throws a non-retryable StorageException + Streams.readFully(exchange.getRequestBody()); + if (randomBoolean()) { + exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); + } + exchange.close(); + }); + + final TimeValue readTimeout = allowReadTimeout.get() ? TimeValue.timeValueSeconds(3) : null; + + final BlobContainer blobContainer = createBlobContainer(nbErrors + 1, readTimeout); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { + blobContainer.writeBlob("write_large_blob", stream, data.length, false); + } + + assertThat(countInits.get(), equalTo(0)); + assertThat(countUploads.get(), equalTo(0)); + assertThat(allow410Gone.get(), is(false)); + } + + private static byte[] randomBlobContent() { + return randomByteArrayOfLength(randomIntBetween(1, frequently() ? 512 : 1 << 20)); // rarely up to 1mb + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 914746f7830..0fa9dfe9102 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.MockSecureSettings; @@ -38,8 +39,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.Repository; @@ -54,9 +53,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; -import java.security.KeyPairGenerator; -import java.util.Arrays; -import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -64,13 +60,12 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.UUID; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.zip.GZIPInputStream; import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; @@ -78,6 +73,7 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSetting import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.TOKEN_URI_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository.BUCKET; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository.CLIENT_NAME; +import static org.hamcrest.Matchers.lessThanOrEqualTo; @SuppressForbidden(reason = "this test uses a HttpServer to emulate a Google Cloud Storage endpoint") public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryIntegTestCase { @@ -119,7 +115,7 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe @Override protected Settings nodeSettings(int nodeOrdinal) { if (serviceAccount == null) { - serviceAccount = createServiceAccount(); + serviceAccount = TestUtils.createServiceAccount(random()); } final Settings.Builder settings = Settings.builder(); @@ -218,31 +214,6 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe } } - private static byte[] createServiceAccount() { - try { - final KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); - keyPairGenerator.initialize(1024); - final String privateKey = Base64.getEncoder().encodeToString(keyPairGenerator.generateKeyPair().getPrivate().getEncoded()); - - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - try (XContentBuilder builder = new XContentBuilder(XContentType.JSON.xContent(), out)) { - builder.startObject(); - { - builder.field("type", "service_account"); - builder.field("project_id", getTestClass().getName().toLowerCase(Locale.ROOT)); - builder.field("private_key_id", UUID.randomUUID().toString()); - builder.field("private_key", "-----BEGIN PRIVATE KEY-----\n" + privateKey + "\n-----END PRIVATE KEY-----\n"); - builder.field("client_email", "elastic@appspot.gserviceaccount.com"); - builder.field("client_id", String.valueOf(randomNonNegativeLong())); - } - builder.endObject(); - } - return out.toByteArray(); - } catch (Exception e) { - throw new AssertionError("Unable to create service account file", e); - } - } - /** * Minimal HTTP handler that acts as a Google Cloud Storage compliant server */ @@ -345,65 +316,16 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe exchange.getResponseBody().write(response); } else if (Regex.simpleMatch("POST /upload/storage/v1/b/bucket/*uploadType=multipart*", request)) { - try (BufferedInputStream in = new BufferedInputStream(new GZIPInputStream(exchange.getRequestBody()))) { - byte[] response = new byte[0]; - String blob = null; - int read; - while ((read = in.read()) != -1) { - boolean markAndContinue = false; - try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - do { // search next consecutive {carriage return, new line} chars and stop - if ((char) read == '\r') { - int next = in.read(); - if (next != -1) { - if (next == '\n') { - break; - } - out.write(read); - out.write(next); - continue; - } - } - out.write(read); - } while ((read = in.read()) != -1); + Optional> content = TestUtils.parseMultipartRequestBody(exchange.getRequestBody()); + if (content.isPresent()) { + blobs.put(content.get().v1(), content.get().v2()); - final String line = new String(out.toByteArray(), UTF_8); - if (line.length() == 0 || line.equals("\r\n") || line.startsWith("--") - || line.toLowerCase(Locale.ROOT).startsWith("content")) { - markAndContinue = true; - } else if (line.startsWith("{\"bucket\":\"bucket\"")) { - markAndContinue = true; - Matcher matcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); - if (matcher.find()) { - blob = matcher.group(1); - response = line.getBytes(UTF_8); - } - } - if (markAndContinue) { - in.mark(Integer.MAX_VALUE); - continue; - } - } - if (blob != null) { - in.reset(); - try (ByteArrayOutputStream binary = new ByteArrayOutputStream()) { - while ((read = in.read()) != -1) { - binary.write(read); - } - binary.flush(); - byte[] tmp = binary.toByteArray(); - // removes the trailing end "\r\n--__END_OF_PART__--\r\n" which is 23 bytes long - blobs.put(blob, new BytesArray(Arrays.copyOf(tmp, tmp.length - 23))); - - exchange.getResponseHeaders().add("Content-Type", "application/json"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); - - } finally { - blob = null; - } - } - } + byte[] response = ("{\"bucket\":\"bucket\",\"name\":\"" + content.get().v1() + "\"}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + } else { + exchange.sendResponseHeaders(RestStatus.BAD_REQUEST.getStatus(), -1); } } else if (Regex.simpleMatch("POST /upload/storage/v1/b/bucket/*uploadType=resumable*", request)) { @@ -426,41 +348,31 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); final String blobName = params.get("test_blob_name"); + byte[] blob = blobs.get(blobName).array(); + assertNotNull(blob); + final String range = exchange.getRequestHeaders().getFirst("Content-Range"); - assert Strings.hasLength(range); + final Integer limit = TestUtils.getContentRangeLimit(range); + final int start = TestUtils.getContentRangeStart(range); + final int end = TestUtils.getContentRangeEnd(range); - Matcher matcher = Pattern.compile("bytes ([^/]*)/([0-9\\*]*)").matcher(range); - if (matcher.find()) { - String bytes = matcher.group(1); - String limit = matcher.group(2); - byte[] blob = blobs.get(blobName).array(); - assert blob != null; - // client is uploading a chunk - matcher = Pattern.compile("([0-9]*)-([0-9]*)").matcher(bytes); - assert matcher.find(); + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + long bytesRead = Streams.copy(exchange.getRequestBody(), out); + int length = Math.max(end + 1, limit != null ? limit : 0); + assertThat((int) bytesRead, lessThanOrEqualTo(length)); + if (length > blob.length) { + blob = ArrayUtil.growExact(blob, length); + } + System.arraycopy(out.toByteArray(), 0, blob, start, Math.toIntExact(bytesRead)); + blobs.put(blobName, new BytesArray(blob)); - int end = Integer.parseInt(matcher.group(2)); - int start = Integer.parseInt(matcher.group(1)); - - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - long count = Streams.copy(exchange.getRequestBody(), out); - int length = Math.max(end + 1, "*".equals(limit) ? 0 : Integer.parseInt(limit)); - assert count <= length; - if (length > blob.length) { - blob = ArrayUtil.growExact(blob, length); - } - assert blob.length >= end; - System.arraycopy(out.toByteArray(), 0, blob, start, Math.toIntExact(count)); - blobs.put(blobName, new BytesArray(blob)); - - if ("*".equals(limit)) { - exchange.getResponseHeaders().add("Range", String.format(Locale.ROOT, "bytes=%d/%d", start, end)); - exchange.getResponseHeaders().add("Content-Length", "0"); - exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1); - } else { - assert blob.length == Integer.parseInt(limit); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); - } + if (limit == null) { + exchange.getResponseHeaders().add("Range", String.format(Locale.ROOT, "bytes=%d/%d", start, end)); + exchange.getResponseHeaders().add("Content-Length", "0"); + exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1); + } else { + assertThat(limit, lessThanOrEqualTo(blob.length)); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); } } else { exchange.sendResponseHeaders(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), -1); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/TestUtils.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/TestUtils.java new file mode 100644 index 00000000000..a6ae0578fbd --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/TestUtils.java @@ -0,0 +1,158 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.security.KeyPairGenerator; +import java.util.Arrays; +import java.util.Base64; +import java.util.Locale; +import java.util.Optional; +import java.util.Random; +import java.util.UUID; +import java.util.function.BiFunction; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; + +import static java.nio.charset.StandardCharsets.UTF_8; + +final class TestUtils { + + private TestUtils() {} + + /** + * Creates a random Service Account file for testing purpose + */ + static byte[] createServiceAccount(final Random random) { + try { + final KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); + keyPairGenerator.initialize(1024); + final String privateKey = Base64.getEncoder().encodeToString(keyPairGenerator.generateKeyPair().getPrivate().getEncoded()); + + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (XContentBuilder builder = new XContentBuilder(XContentType.JSON.xContent(), out)) { + builder.startObject(); + { + builder.field("type", "service_account"); + builder.field("project_id", "test"); + builder.field("private_key_id", UUID.randomUUID().toString()); + builder.field("private_key", "-----BEGIN PRIVATE KEY-----\n" + privateKey + "\n-----END PRIVATE KEY-----\n"); + builder.field("client_email", "elastic@appspot.gserviceaccount.com"); + builder.field("client_id", String.valueOf(Math.abs(random.nextLong()))); + } + builder.endObject(); + } + return out.toByteArray(); + } catch (Exception e) { + throw new AssertionError("Unable to create service account file", e); + } + } + + static Optional> parseMultipartRequestBody(final InputStream requestBody) throws IOException { + Tuple content = null; + try (BufferedInputStream in = new BufferedInputStream(new GZIPInputStream(requestBody))) { + String name = null; + int read; + while ((read = in.read()) != -1) { + boolean markAndContinue = false; + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + do { // search next consecutive {carriage return, new line} chars and stop + if ((char) read == '\r') { + int next = in.read(); + if (next != -1) { + if (next == '\n') { + break; + } + out.write(read); + out.write(next); + continue; + } + } + out.write(read); + } while ((read = in.read()) != -1); + + final String line = new String(out.toByteArray(), UTF_8); + if (line.length() == 0 || line.equals("\r\n") || line.startsWith("--") + || line.toLowerCase(Locale.ROOT).startsWith("content")) { + markAndContinue = true; + } else if (line.startsWith("{\"bucket\":\"bucket\"")) { + markAndContinue = true; + Matcher matcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); + if (matcher.find()) { + name = matcher.group(1); + } + } + if (markAndContinue) { + in.mark(Integer.MAX_VALUE); + continue; + } + } + if (name != null) { + in.reset(); + try (ByteArrayOutputStream binary = new ByteArrayOutputStream()) { + while ((read = in.read()) != -1) { + binary.write(read); + } + binary.flush(); + byte[] tmp = binary.toByteArray(); + // removes the trailing end "\r\n--__END_OF_PART__--\r\n" which is 23 bytes long + content = Tuple.tuple(name, new BytesArray(Arrays.copyOf(tmp, tmp.length - 23))); + } + } + } + } + return Optional.ofNullable(content); + } + + private static final Pattern PATTERN_CONTENT_RANGE = Pattern.compile("bytes ([^/]*)/([0-9\\*]*)"); + private static final Pattern PATTERN_CONTENT_RANGE_BYTES = Pattern.compile("([0-9]*)-([0-9]*)"); + + private static Integer parse(final Pattern pattern, final String contentRange, final BiFunction fn) { + final Matcher matcher = pattern.matcher(contentRange); + if (matcher.matches() == false || matcher.groupCount() != 2) { + throw new IllegalArgumentException("Unable to parse content range header"); + } + return fn.apply(matcher.group(1), matcher.group(2)); + } + + static Integer getContentRangeLimit(final String contentRange) { + return parse(PATTERN_CONTENT_RANGE, contentRange, (bytes, limit) -> "*".equals(limit) ? null : Integer.parseInt(limit)); + } + + static int getContentRangeStart(final String contentRange) { + return parse(PATTERN_CONTENT_RANGE, contentRange, + (bytes, limit) -> parse(PATTERN_CONTENT_RANGE_BYTES, bytes, + (start, end) -> Integer.parseInt(start))); + } + + static int getContentRangeEnd(final String contentRange) { + return parse(PATTERN_CONTENT_RANGE, contentRange, + (bytes, limit) -> parse(PATTERN_CONTENT_RANGE_BYTES, bytes, + (start, end) -> Integer.parseInt(end))); + } +} From 618efcfcf94a5803b4974113a9e572b7bb23011f Mon Sep 17 00:00:00 2001 From: maidoo Date: Tue, 24 Sep 2019 16:01:07 +0800 Subject: [PATCH 15/94] Add submitDeleteByQueryTask method to RestHighLevelClient (#46833) The HLRC has a method for reindex, that allows to trigger an async reindex by running RestHighLevelClient.submitReindexTask and RestHighLevelClient.reindex. The delete by query however only has an RestHighLevelClient.deleteByQuery method (and its async counterpart), but no RestHighLevelClient.submitDeleteByQueryTask. So add RestHighLevelClient.submitDeleteByQueryTask Closes #46395 --- .../client/RequestConverters.java | 60 +++++++++++-------- .../client/RestHighLevelClient.java | 15 +++++ .../org/elasticsearch/client/ReindexIT.java | 41 +++++++++++++ .../client/RequestConvertersTests.java | 1 + 4 files changed, 92 insertions(+), 25 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 2fbfeb21a2e..169fe405e87 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -553,6 +553,10 @@ final class RequestConverters { return prepareReindexRequest(reindexRequest, false); } + static Request submitDeleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException { + return prepareDeleteByQueryRequest(deleteByQueryRequest, false); + } + private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException { String endpoint = new EndpointBuilder().addPathPart("_reindex").build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); @@ -572,6 +576,36 @@ final class RequestConverters { return request; } + private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, + boolean waitForCompletion) throws IOException { + String endpoint = + endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query"); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params params = new Params() + .withRouting(deleteByQueryRequest.getRouting()) + .withRefresh(deleteByQueryRequest.isRefresh()) + .withTimeout(deleteByQueryRequest.getTimeout()) + .withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards()) + .withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond()) + .withIndicesOptions(deleteByQueryRequest.indicesOptions()) + .withWaitForCompletion(waitForCompletion); + if (deleteByQueryRequest.isAbortOnVersionConflict() == false) { + params.putParam("conflicts", "proceed"); + } + if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) { + params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize())); + } + if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) { + params.putParam("scroll", deleteByQueryRequest.getScrollTime()); + } + if (deleteByQueryRequest.getMaxDocs() > 0) { + params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs())); + } + request.addParameters(params.asMap()); + request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request updateByQuery(UpdateByQueryRequest updateByQueryRequest) throws IOException { String endpoint = endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query"); @@ -602,31 +636,7 @@ final class RequestConverters { } static Request deleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException { - String endpoint = - endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query"); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - Params params = new Params() - .withRouting(deleteByQueryRequest.getRouting()) - .withRefresh(deleteByQueryRequest.isRefresh()) - .withTimeout(deleteByQueryRequest.getTimeout()) - .withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards()) - .withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond()) - .withIndicesOptions(deleteByQueryRequest.indicesOptions()); - if (deleteByQueryRequest.isAbortOnVersionConflict() == false) { - params.putParam("conflicts", "proceed"); - } - if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) { - params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize())); - } - if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) { - params.putParam("scroll", deleteByQueryRequest.getScrollTime()); - } - if (deleteByQueryRequest.getMaxDocs() > 0) { - params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs())); - } - request.addParameters(params.asMap()); - request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; + return prepareDeleteByQueryRequest(deleteByQueryRequest, true); } static Request rethrottleReindex(RethrottleRequest rethrottleRequest) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 0992edd936c..65d517231f2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -590,6 +590,21 @@ public class RestHighLevelClient implements Closeable { ); } + /** + * Submits a delete by query task + * See + * Delete By Query API on elastic.co + * @param deleteByQueryRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the submission response + */ + public final TaskSubmissionResponse submitDeleteByQueryTask(DeleteByQueryRequest deleteByQueryRequest, + RequestOptions options) throws IOException { + return performRequestAndParseEntity( + deleteByQueryRequest, RequestConverters::submitDeleteByQuery, options, TaskSubmissionResponse::fromXContent, emptySet() + ); + } + /** * Asynchronously executes a delete by query request. * See diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java index 90cfa3a9388..256e38da858 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java @@ -436,6 +436,47 @@ public class ReindexIT extends ESRestHighLevelClientTestCase { } } + public void testDeleteByQueryTask() throws Exception { + final String sourceIndex = "source456"; + { + // Prepare + Settings settings = Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .build(); + createIndex(sourceIndex, settings); + assertEquals( + RestStatus.OK, + highLevelClient().bulk( + new BulkRequest() + .add(new IndexRequest(sourceIndex).id("1") + .source(Collections.singletonMap("foo", 1), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("2") + .source(Collections.singletonMap("foo", 2), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("3") + .source(Collections.singletonMap("foo", 3), XContentType.JSON)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), + RequestOptions.DEFAULT + ).status() + ); + } + { + // tag::submit-delete_by_query-task + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(); + deleteByQueryRequest.indices(sourceIndex); + deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1")); + deleteByQueryRequest.setRefresh(true); + + TaskSubmissionResponse deleteByQuerySubmission = highLevelClient() + .submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); + + String taskId = deleteByQuerySubmission.getTask(); + // end::submit-delete_by_query-task + + assertBusy(checkCompletionStatus(client(), taskId)); + } + } + private static TaskId findTaskToRethrottle(String actionName) throws IOException { long start = System.nanoTime(); ListTasksRequest request = new ListTasksRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 57f6a579c70..106a58edebf 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -582,6 +582,7 @@ public class RequestConvertersTests extends ESTestCase { } setRandomIndicesOptions(deleteByQueryRequest::setIndicesOptions, deleteByQueryRequest::indicesOptions, expectedParams); setRandomTimeout(deleteByQueryRequest::setTimeout, ReplicationRequest.DEFAULT_TIMEOUT, expectedParams); + expectedParams.put("wait_for_completion", Boolean.TRUE.toString()); Request request = RequestConverters.deleteByQuery(deleteByQueryRequest); StringJoiner joiner = new StringJoiner("/", "/", ""); joiner.add(String.join(",", deleteByQueryRequest.indices())); From 56224068d46f87654b434431c59cc6454f8e7531 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Tue, 24 Sep 2019 09:23:25 +0100 Subject: [PATCH 16/94] Release highlights for 7.4.0 (#46963) Co-Authored-By: James Rodewig --- .../release-notes/highlights-7.4.0.asciidoc | 156 ++++++++++++++++++ .../release-notes/highlights.asciidoc | 6 +- 2 files changed, 160 insertions(+), 2 deletions(-) create mode 100644 docs/reference/release-notes/highlights-7.4.0.asciidoc diff --git a/docs/reference/release-notes/highlights-7.4.0.asciidoc b/docs/reference/release-notes/highlights-7.4.0.asciidoc new file mode 100644 index 00000000000..03a1b6dcf4f --- /dev/null +++ b/docs/reference/release-notes/highlights-7.4.0.asciidoc @@ -0,0 +1,156 @@ +[[release-highlights-7.4.0]] +== 7.4.0 release highlights +++++ +7.4.0 +++++ + +//NOTE: The notable-highlights tagged regions are re-used in the +//Installation and Upgrade Guide + +// tag::notable-highlights[] +[float] +==== Results pinning + +You can use the new <> +to define the first records +(and the order in which they are returned) +in a result set directly within {es}. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== New `shape` field type + +A new <> field type has been added, +which allows you to position and query shapes +in a geometry of your choosing. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Circle ingest processor + +A new <> has been added, +which translates circles into regular polygons (bounded by the circles). +This makes ingesting, indexing, searching, and aggregating circles both easy and efficient. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Aggregations on range fields + +The <> +and <> +aggregations now support the <> field type. + +Range aggregations are useful +when counting ranges that overlap with specific buckets +(e.g. the number of phone calls that took place during a specific minute). + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Cumulative cardinality aggregation + +A new <> +has been added +as part of our ongoing effort to provide advanced aggregations. + +You can use this new pipeline aggregation +to calculate a net-new total of document occurrences +within a given time range. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Snapshot lifecycle management + +We’re introducing <>, +which allows an administrator to define policies, +via API or {kibana-ref}/index-lifecycle-policies.html[{kib} UI], +that manage when and how often snapshots are taken. +You can use SLM +to ensure that appropriate, recent backups are ready +if disaster strikes +or you need to restore {es} data. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== API key management + +New {stack-ov}/security-privileges.html[cluster privileges] to manage API keys have been added, +allowing cluster administrators to manage everything, +and regular users to manage their own keys. +Users can create API keys +and use them to provide long-term credentials +while interacting with {es}. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== TLS settings for email notifications + +Notifications may contain sensitive information that must be protected over the wire. This requires that communication with the mail server is encrypted and authenticated properly. +{es} now supports custom <> for email notifications, +allowing secure connections to servers with custom security configuration. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Automatic query cancellation + +{es} now automatically terminates queries +sent through the `_search` endpoint +when the initiating connection is closed. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Support for AdoptOpenJDK + +AdoptOpenJDK 13 is now supported and shipped with {es} as the pre-bundled JDK. + +If you want to use your own JDK, +you can still do so by setting `JAVA_HOME` before starting Elasticsearch. + +The availability of a notarized AdoptOpenJDK package +(per the new requirements for software running on macOS Catalina) +facilitates notarization of {es} for continued support on macOS. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== Regression analysis - Experimental + +{stack-ov}/dfa-regression.html[Regression analysis] is an experimental machine learning process +for estimating the relationships among a number of feature variables and a dependent variable, +then making further predictions based on the described relationship. + +// end::notable-highlights[] + +// tag::notable-highlights[] +[float] +==== New vector distance functions for document script scoring - Experimental + +Two experimential similarity measurements— +Manhattan distance (L1 norm) +and Euclidean distance (L2 norm)— +have been added. +Like the dot product and cosine similarity, +the Euclidean and Manhattan distances are provided as <> +so that they may be incorporated with other query elements +as part of a <> query. + +// end::notable-highlights[] + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 1b48c8118c6..dbf45641958 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -3,9 +3,10 @@ [partintro] -- -This section summarizes the most important changes in each release. For the -full list, see <> and <>. +This section summarizes the most important changes in each release. For the +full list, see <> and <>. +* <> * <> * <> * <> @@ -13,6 +14,7 @@ full list, see <> and <>. -- +include::highlights-7.4.0.asciidoc[] include::highlights-7.3.0.asciidoc[] include::highlights-7.2.0.asciidoc[] include::highlights-7.1.0.asciidoc[] From 98e6bb4d01de21788e36a88e0dc3d67ed5a25a37 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 24 Sep 2019 12:47:56 +0300 Subject: [PATCH 17/94] Workaround JDK-8213202 in SSLClientAuthTests (#46995) This change works around JDK-8213202, which is a bug related to TLSv1.3 session resumption before JDK 11.0.3 that occurs when there are multiple concurrent sessions being established. Nodes connecting to each other will trigger this bug when client authentication is disabled, which is the case for SSLClientAuthTests. Backport of #46680 --- .../java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index e5fb9c71831..37cf17792a7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -96,6 +96,10 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { return builder // invert the require auth settings .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.NONE) + // Due to the TLSv1.3 bug with session resumption when client authentication is not + // used, we need to set the protocols since we disabled client auth for transport + // to avoid failures on pre 11.0.3 JDKs. See #getProtocols + .putList("xpack.security.transport.ssl.supported_protocols", getProtocols()) .put("xpack.security.http.ssl.enabled", true) .put("xpack.security.http.ssl.client_authentication", SSLClientAuth.REQUIRED) .build(); From 6943a3101fea1a4a438323c6ec98cf9650e2245c Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 24 Sep 2019 12:31:13 +0100 Subject: [PATCH 18/94] Cut PersistedState interface from GatewayMetaState (#46655) Today `GatewayMetaState` implements `PersistedState` but it's an error to use it as a `PersistedState` before it's been started, or if the node is master-ineligible. It also holds some fields that are meaningless on nodes that do not persist their states. Finally, it takes responsibility for both loading the original cluster state and some of the high-level logic for writing the cluster state back to disk. This commit addresses these concerns by introducing a more specific `PersistedState` implementation for use on master-eligible nodes which is only instantiated if and when it's appropriate. It also moves the fields and high-level persistence logic into a new `IncrementalClusterStateWriter` with a more appropriate lifecycle. Follow-up to #46326 and #46532 Relates #47001 --- .../gateway/GatewayMetaState.java | 603 +++++------------- .../IncrementalClusterStateWriter.java | 384 +++++++++++ .../java/org/elasticsearch/node/Node.java | 4 +- .../GatewayMetaStatePersistedStateTests.java | 48 +- .../gateway/GatewayMetaStateTests.java | 397 +----------- .../IncrementalClusterStateWriterTests.java | 429 +++++++++++++ .../AbstractCoordinatorTestCase.java | 10 +- .../gateway/MockGatewayMetaState.java | 16 +- 8 files changed, 1003 insertions(+), 888 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java create mode 100644 server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index c6e9182fd8f..f9433ee6059 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -29,6 +29,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.coordination.CoordinationState.PersistedState; import org.elasticsearch.cluster.coordination.InMemoryPersistedState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -36,8 +37,6 @@ import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; @@ -49,124 +48,104 @@ import org.elasticsearch.plugins.MetaDataUpgrader; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; -import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.UnaryOperator; /** - * This class is responsible for storing/retrieving metadata to/from disk. - * When instance of this class is created, constructor ensures that this version is compatible with state stored on disk and performs - * state upgrade if necessary. Also it checks that atomic move is supported on the filesystem level, because it's a must for metadata - * store algorithm. - * Please note that the state being loaded when constructing the instance of this class is NOT the state that will be used as a - * {@link ClusterState#metaData()}. Instead when node is starting up, it calls {@link #getMetaData()} method and if this node is - * elected as master, it requests metaData from other master eligible nodes. After that, master node performs re-conciliation on the - * gathered results, re-creates {@link ClusterState} and broadcasts this state to other nodes in the cluster. + * Loads (and maybe upgrades) cluster metadata at startup, and persistently stores cluster metadata for future restarts. + * + * When started, ensures that this version is compatible with the state stored on disk, and performs a state upgrade if necessary. Note that + * the state being loaded when constructing the instance of this class is not necessarily the state that will be used as {@link + * ClusterState#metaData()} because it might be stale or incomplete. Master-eligible nodes must perform an election to find a complete and + * non-stale state, and master-ineligible nodes receive the real cluster state from the elected master after joining the cluster. */ -public class GatewayMetaState implements PersistedState { - protected static final Logger logger = LogManager.getLogger(GatewayMetaState.class); +public class GatewayMetaState { + private static final Logger logger = LogManager.getLogger(GatewayMetaState.class); - private final MetaStateService metaStateService; - private final Settings settings; - - // On master-eligible Zen2 nodes, we use this very object for the PersistedState (so that the state is actually persisted); on other - // nodes we use an InMemoryPersistedState instead and persist using a cluster applier if needed. In all cases it's an error to try and - // use this object as a PersistedState before calling start(). TODO stop implementing PersistedState at the top level. + // Set by calling start() private final SetOnce persistedState = new SetOnce<>(); - // on master-eligible nodes we call updateClusterState under the Coordinator's mutex; on master-ineligible data nodes we call - // updateClusterState on the (unique) cluster applier thread; on other nodes we never call updateClusterState. In all cases there's no - // need to synchronize access to these variables. - protected Manifest previousManifest; - protected ClusterState previousClusterState; - protected boolean incrementalWrite; - - public GatewayMetaState(Settings settings, MetaStateService metaStateService) { - this.settings = settings; - this.metaStateService = metaStateService; + public PersistedState getPersistedState() { + final PersistedState persistedState = this.persistedState.get(); + assert persistedState != null : "not started"; + return persistedState; } - public void start(TransportService transportService, ClusterService clusterService, - MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) { - assert previousClusterState == null : "should only start once, but already have " + previousClusterState; + public MetaData getMetaData() { + return getPersistedState().getLastAcceptedState().metaData(); + } + + public void start(Settings settings, TransportService transportService, ClusterService clusterService, + MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, + MetaDataUpgrader metaDataUpgrader) { + assert persistedState.get() == null : "should only start once, but already have " + persistedState.get(); + + final Tuple manifestClusterStateTuple; try { - upgradeMetaData(metaDataIndexUpgradeService, metaDataUpgrader); - initializeClusterState(ClusterName.CLUSTER_NAME_SETTING.get(settings)); + upgradeMetaData(settings, metaStateService, metaDataIndexUpgradeService, metaDataUpgrader); + manifestClusterStateTuple = loadStateAndManifest(ClusterName.CLUSTER_NAME_SETTING.get(settings), metaStateService); } catch (IOException e) { throw new ElasticsearchException("failed to load metadata", e); } - incrementalWrite = false; - - applyClusterStateUpdaters(transportService, clusterService); + final IncrementalClusterStateWriter incrementalClusterStateWriter + = new IncrementalClusterStateWriter(metaStateService, manifestClusterStateTuple.v1(), + prepareInitialClusterState(transportService, clusterService, manifestClusterStateTuple.v2())); if (DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings).equals(DiscoveryModule.ZEN_DISCOVERY_TYPE)) { - // only for tests that simulate a mixed Zen1/Zen2 clusters, see Zen1IT - if (isMasterOrDataNode()) { - clusterService.addLowPriorityApplier(this::applyClusterState); + // only for tests that simulate mixed Zen1/Zen2 clusters, see Zen1IT + if (isMasterOrDataNode(settings)) { + clusterService.addLowPriorityApplier(new GatewayClusterApplier(incrementalClusterStateWriter)); } - persistedState.set(new InMemoryPersistedState(getCurrentTerm(), getLastAcceptedState())); + persistedState.set(new InMemoryPersistedState(manifestClusterStateTuple.v1().getCurrentTerm(), manifestClusterStateTuple.v2())); + } else if (DiscoveryNode.isMasterNode(settings) == false) { + if (DiscoveryNode.isDataNode(settings)) { + // Master-eligible nodes persist index metadata for all indices regardless of whether they hold any shards or not. It's + // vitally important to the safety of the cluster coordination system that master-eligible nodes persist this metadata when + // _accepting_ the cluster state (i.e. before it is committed). This persistence happens on the generic threadpool. + // + // In contrast, master-ineligible data nodes only persist the index metadata for shards that they hold. When all shards of + // an index are moved off such a node the IndicesStore is responsible for removing the corresponding index directory, + // including the metadata, and does so on the cluster applier thread. + // + // This presents a problem: if a shard is unassigned from a node and then reassigned back to it again then there is a race + // between the IndicesStore deleting the index folder and the CoordinationState concurrently trying to write the updated + // metadata into it. We could probably solve this with careful synchronization, but in fact there is no need. The persisted + // state on master-ineligible data nodes is mostly ignored - it's only there to support dangling index imports, which is + // inherently unsafe anyway. Thus we can safely delay metadata writes on master-ineligible data nodes until applying the + // cluster state, which is what this does: + clusterService.addLowPriorityApplier(new GatewayClusterApplier(incrementalClusterStateWriter)); + } + + // Master-ineligible nodes do not need to persist the cluster state when accepting it because they are not in the voting + // configuration, so it's ok if they have a stale or incomplete cluster state when restarted. We track the latest cluster state + // in memory instead. + persistedState.set(new InMemoryPersistedState(manifestClusterStateTuple.v1().getCurrentTerm(), manifestClusterStateTuple.v2())); } else { - if (DiscoveryNode.isMasterNode(settings) == false) { - if (DiscoveryNode.isDataNode(settings)) { - // Master-eligible nodes persist index metadata for all indices regardless of whether they hold any shards or not. It's - // vitally important to the safety of the cluster coordination system that master-eligible nodes persist this metadata - // when _accepting_ the cluster state (i.e. before it is committed). This persistence happens on the generic threadpool. - // - // In contrast, master-ineligible data nodes only persist the index metadata for shards that they hold. When all shards - // of an index are moved off such a node the IndicesStore is responsible for removing the corresponding index directory, - // including the metadata, and does so on the cluster applier thread. - // - // This presents a problem: if a shard is unassigned from a node and then reassigned back to it again then there is a - // race between the IndicesStore deleting the index folder and the CoordinationState concurrently trying to write the - // updated metadata into it. We could probably solve this with careful synchronization, but in fact there is no need. - // The persisted state on master-ineligible data nodes is mostly ignored - it's only there to support dangling index - // imports, which is inherently unsafe anyway. Thus we can safely delay metadata writes on master-ineligible data nodes - // until applying the cluster state, which is what this does: - clusterService.addLowPriorityApplier(this::applyClusterState); - } - persistedState.set(new InMemoryPersistedState(getCurrentTerm(), getLastAcceptedState())); - } else { - persistedState.set(this); - } + // Master-ineligible nodes must persist the cluster state when accepting it because they must reload the (complete, fresh) + // last-accepted cluster state when restarted. + persistedState.set(new GatewayPersistedState(incrementalClusterStateWriter)); } } - private void initializeClusterState(ClusterName clusterName) throws IOException { - long startNS = System.nanoTime(); - Tuple manifestAndMetaData = metaStateService.loadFullState(); - previousManifest = manifestAndMetaData.v1(); - - final MetaData metaData = manifestAndMetaData.v2(); - - previousClusterState = ClusterState.builder(clusterName) - .version(previousManifest.getClusterStateVersion()) - .metaData(metaData).build(); - - logger.debug("took {} to load state", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - startNS))); - } - - protected void applyClusterStateUpdaters(TransportService transportService, ClusterService clusterService) { - assert previousClusterState.nodes().getLocalNode() == null : "applyClusterStateUpdaters must only be called once"; + // exposed so it can be overridden by tests + ClusterState prepareInitialClusterState(TransportService transportService, ClusterService clusterService, ClusterState clusterState) { + assert clusterState.nodes().getLocalNode() == null : "prepareInitialClusterState must only be called once"; assert transportService.getLocalNode() != null : "transport service is not yet started"; - - previousClusterState = Function.identity() + return Function.identity() .andThen(ClusterStateUpdaters::addStateNotRecoveredBlock) .andThen(state -> ClusterStateUpdaters.setLocalNode(state, transportService.getLocalNode())) .andThen(state -> ClusterStateUpdaters.upgradeAndArchiveUnknownOrInvalidSettings(state, clusterService.getClusterSettings())) .andThen(ClusterStateUpdaters::recoverClusterBlocks) - .apply(previousClusterState); + .apply(clusterState); } - protected void upgradeMetaData(MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) - throws IOException { - if (isMasterOrDataNode()) { + // exposed so it can be overridden by tests + void upgradeMetaData(Settings settings, MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, + MetaDataUpgrader metaDataUpgrader) throws IOException { + if (isMasterOrDataNode(settings)) { try { final Tuple metaStateAndData = metaStateService.loadFullState(); final Manifest manifest = metaStateAndData.v1(); @@ -179,7 +158,8 @@ public class GatewayMetaState implements PersistedState { // if there is manifest file, it means metadata is properly persisted to all data paths // if there is no manifest file (upgrade from 6.x to 7.x) metadata might be missing on some data paths, // but anyway we will re-write it as soon as we receive first ClusterState - final AtomicClusterStateWriter writer = new AtomicClusterStateWriter(metaStateService, manifest); + final IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = new IncrementalClusterStateWriter.AtomicClusterStateWriter(metaStateService, manifest); final MetaData upgradedMetaData = upgradeMetaData(metaData, metaDataIndexUpgradeService, metaDataUpgrader); final long globalStateGeneration; @@ -207,233 +187,25 @@ public class GatewayMetaState implements PersistedState { } } - private boolean isMasterOrDataNode() { + private static Tuple loadStateAndManifest(ClusterName clusterName, + MetaStateService metaStateService) throws IOException { + final long startNS = System.nanoTime(); + final Tuple manifestAndMetaData = metaStateService.loadFullState(); + final Manifest manifest = manifestAndMetaData.v1(); + + final ClusterState clusterState = ClusterState.builder(clusterName) + .version(manifest.getClusterStateVersion()) + .metaData(manifestAndMetaData.v2()).build(); + + logger.debug("took {} to load state", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - startNS))); + + return Tuple.tuple(manifest, clusterState); + } + + private static boolean isMasterOrDataNode(Settings settings) { return DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings); } - public PersistedState getPersistedState() { - final PersistedState persistedState = this.persistedState.get(); - assert persistedState != null : "not started"; - return persistedState; - } - - public MetaData getMetaData() { - return previousClusterState.metaData(); - } - - private void applyClusterState(ClusterChangedEvent event) { - assert isMasterOrDataNode(); - - if (event.state().blocks().disableStatePersistence()) { - incrementalWrite = false; - return; - } - - try { - // Hack: This is to ensure that non-master-eligible Zen2 nodes always store a current term - // that's higher than the last accepted term. - // TODO: can we get rid of this hack? - if (event.state().term() > getCurrentTerm()) { - innerSetCurrentTerm(event.state().term()); - } - - updateClusterState(event.state(), event.previousState()); - incrementalWrite = true; - } catch (WriteStateException e) { - logger.warn("Exception occurred when storing new meta data", e); - } - } - - @Override - public long getCurrentTerm() { - return previousManifest.getCurrentTerm(); - } - - @Override - public ClusterState getLastAcceptedState() { - assert previousClusterState.nodes().getLocalNode() != null : "Cluster state is not fully built yet"; - return previousClusterState; - } - - @Override - public void setCurrentTerm(long currentTerm) { - try { - innerSetCurrentTerm(currentTerm); - } catch (WriteStateException e) { - logger.error(new ParameterizedMessage("Failed to set current term to {}", currentTerm), e); - e.rethrowAsErrorOrUncheckedException(); - } - } - - private void innerSetCurrentTerm(long currentTerm) throws WriteStateException { - Manifest manifest = new Manifest(currentTerm, previousManifest.getClusterStateVersion(), previousManifest.getGlobalGeneration(), - new HashMap<>(previousManifest.getIndexGenerations())); - metaStateService.writeManifestAndCleanup("current term changed", manifest); - previousManifest = manifest; - } - - @Override - public void setLastAcceptedState(ClusterState clusterState) { - try { - incrementalWrite = previousClusterState.term() == clusterState.term(); - updateClusterState(clusterState, previousClusterState); - } catch (WriteStateException e) { - logger.error(new ParameterizedMessage("Failed to set last accepted state with version {}", clusterState.version()), e); - e.rethrowAsErrorOrUncheckedException(); - } - } - - /** - * This class is used to write changed global {@link MetaData}, {@link IndexMetaData} and {@link Manifest} to disk. - * This class delegates write* calls to corresponding write calls in {@link MetaStateService} and - * additionally it keeps track of cleanup actions to be performed if transaction succeeds or fails. - */ - static class AtomicClusterStateWriter { - private static final String FINISHED_MSG = "AtomicClusterStateWriter is finished"; - private final List commitCleanupActions; - private final List rollbackCleanupActions; - private final Manifest previousManifest; - private final MetaStateService metaStateService; - private boolean finished; - - AtomicClusterStateWriter(MetaStateService metaStateService, Manifest previousManifest) { - this.metaStateService = metaStateService; - assert previousManifest != null; - this.previousManifest = previousManifest; - this.commitCleanupActions = new ArrayList<>(); - this.rollbackCleanupActions = new ArrayList<>(); - this.finished = false; - } - - long writeGlobalState(String reason, MetaData metaData) throws WriteStateException { - assert finished == false : FINISHED_MSG; - try { - rollbackCleanupActions.add(() -> metaStateService.cleanupGlobalState(previousManifest.getGlobalGeneration())); - long generation = metaStateService.writeGlobalState(reason, metaData); - commitCleanupActions.add(() -> metaStateService.cleanupGlobalState(generation)); - return generation; - } catch (WriteStateException e) { - rollback(); - throw e; - } - } - - long writeIndex(String reason, IndexMetaData metaData) throws WriteStateException { - assert finished == false : FINISHED_MSG; - try { - Index index = metaData.getIndex(); - Long previousGeneration = previousManifest.getIndexGenerations().get(index); - if (previousGeneration != null) { - // we prefer not to clean-up index metadata in case of rollback, - // if it's not referenced by previous manifest file - // not to break dangling indices functionality - rollbackCleanupActions.add(() -> metaStateService.cleanupIndex(index, previousGeneration)); - } - long generation = metaStateService.writeIndex(reason, metaData); - commitCleanupActions.add(() -> metaStateService.cleanupIndex(index, generation)); - return generation; - } catch (WriteStateException e) { - rollback(); - throw e; - } - } - - void writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { - assert finished == false : FINISHED_MSG; - try { - metaStateService.writeManifestAndCleanup(reason, manifest); - commitCleanupActions.forEach(Runnable::run); - finished = true; - } catch (WriteStateException e) { - // if Manifest write results in dirty WriteStateException it's not safe to remove - // new metadata files, because if Manifest was actually written to disk and its deletion - // fails it will reference these new metadata files. - // In the future, we might decide to add more fine grained check to understand if after - // WriteStateException Manifest deletion has actually failed. - if (e.isDirty() == false) { - rollback(); - } - throw e; - } - } - - void rollback() { - rollbackCleanupActions.forEach(Runnable::run); - finished = true; - } - } - - /** - * Updates manifest and meta data on disk. - * - * @param newState new {@link ClusterState} - * @param previousState previous {@link ClusterState} - * - * @throws WriteStateException if exception occurs. See also {@link WriteStateException#isDirty()}. - */ - private void updateClusterState(ClusterState newState, ClusterState previousState) - throws WriteStateException { - MetaData newMetaData = newState.metaData(); - - final AtomicClusterStateWriter writer = new AtomicClusterStateWriter(metaStateService, previousManifest); - long globalStateGeneration = writeGlobalState(writer, newMetaData); - Map indexGenerations = writeIndicesMetadata(writer, newState, previousState); - Manifest manifest = new Manifest(previousManifest.getCurrentTerm(), newState.version(), globalStateGeneration, indexGenerations); - writeManifest(writer, manifest); - - previousManifest = manifest; - previousClusterState = newState; - } - - private void writeManifest(AtomicClusterStateWriter writer, Manifest manifest) throws WriteStateException { - if (manifest.equals(previousManifest) == false) { - writer.writeManifestAndCleanup("changed", manifest); - } - } - - private Map writeIndicesMetadata(AtomicClusterStateWriter writer, ClusterState newState, ClusterState previousState) - throws WriteStateException { - Map previouslyWrittenIndices = previousManifest.getIndexGenerations(); - Set relevantIndices = getRelevantIndices(newState, previousState, previouslyWrittenIndices.keySet()); - - Map newIndices = new HashMap<>(); - - MetaData previousMetaData = incrementalWrite ? previousState.metaData() : null; - Iterable actions = resolveIndexMetaDataActions(previouslyWrittenIndices, relevantIndices, previousMetaData, - newState.metaData()); - - for (IndexMetaDataAction action : actions) { - long generation = action.execute(writer); - newIndices.put(action.getIndex(), generation); - } - - return newIndices; - } - - private long writeGlobalState(AtomicClusterStateWriter writer, MetaData newMetaData) - throws WriteStateException { - if (incrementalWrite == false || MetaData.isGlobalStateEquals(previousClusterState.metaData(), newMetaData) == false) { - return writer.writeGlobalState("changed", newMetaData); - } - return previousManifest.getGlobalGeneration(); - } - - public static Set getRelevantIndices(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { - Set relevantIndices; - if (isDataOnlyNode(state)) { - relevantIndices = getRelevantIndicesOnDataOnlyNode(state, previousState, previouslyWrittenIndices); - } else if (state.nodes().getLocalNode().isMasterNode()) { - relevantIndices = getRelevantIndicesForMasterEligibleNode(state); - } else { - relevantIndices = Collections.emptySet(); - } - return relevantIndices; - } - - private static boolean isDataOnlyNode(ClusterState state) { - return state.nodes().getLocalNode().isMasterNode() == false && state.nodes().getLocalNode().isDataNode(); - } - /** * Elasticsearch 2.0 removed several deprecated features and as well as support for Lucene 3.x. This method calls * {@link MetaDataIndexUpgradeService} to makes sure that indices are compatible with the current version. The @@ -489,160 +261,81 @@ public class GatewayMetaState implements PersistedState { return false; } - /** - * Returns list of {@link IndexMetaDataAction} for each relevant index. - * For each relevant index there are 3 options: - *
    - *
  1. - * {@link KeepPreviousGeneration} - index metadata is already stored to disk and index metadata version is not changed, no - * action is required. - *
  2. - *
  3. - * {@link WriteNewIndexMetaData} - there is no index metadata on disk and index metadata for this index should be written. - *
  4. - *
  5. - * {@link WriteChangedIndexMetaData} - index metadata is already on disk, but index metadata version has changed. Updated - * index metadata should be written to disk. - *
  6. - *
- * - * @param previouslyWrittenIndices A list of indices for which the state was already written before - * @param relevantIndices The list of indices for which state should potentially be written - * @param previousMetaData The last meta data we know of - * @param newMetaData The new metadata - * @return list of {@link IndexMetaDataAction} for each relevant index. - */ - public static List resolveIndexMetaDataActions(Map previouslyWrittenIndices, - Set relevantIndices, - MetaData previousMetaData, - MetaData newMetaData) { - List actions = new ArrayList<>(); - for (Index index : relevantIndices) { - IndexMetaData newIndexMetaData = newMetaData.getIndexSafe(index); - IndexMetaData previousIndexMetaData = previousMetaData == null ? null : previousMetaData.index(index); - if (previouslyWrittenIndices.containsKey(index) == false || previousIndexMetaData == null) { - actions.add(new WriteNewIndexMetaData(newIndexMetaData)); - } else if (previousIndexMetaData.getVersion() != newIndexMetaData.getVersion()) { - actions.add(new WriteChangedIndexMetaData(previousIndexMetaData, newIndexMetaData)); - } else { - actions.add(new KeepPreviousGeneration(index, previouslyWrittenIndices.get(index))); + private static class GatewayClusterApplier implements ClusterStateApplier { + + private final IncrementalClusterStateWriter incrementalClusterStateWriter; + + private GatewayClusterApplier(IncrementalClusterStateWriter incrementalClusterStateWriter) { + this.incrementalClusterStateWriter = incrementalClusterStateWriter; + } + + @Override + public void applyClusterState(ClusterChangedEvent event) { + if (event.state().blocks().disableStatePersistence()) { + incrementalClusterStateWriter.setIncrementalWrite(false); + return; + } + + try { + // Hack: This is to ensure that non-master-eligible Zen2 nodes always store a current term + // that's higher than the last accepted term. + // TODO: can we get rid of this hack? + if (event.state().term() > incrementalClusterStateWriter.getPreviousManifest().getCurrentTerm()) { + incrementalClusterStateWriter.setCurrentTerm(event.state().term()); + } + + incrementalClusterStateWriter.updateClusterState(event.state(), event.previousState()); + incrementalClusterStateWriter.setIncrementalWrite(true); + } catch (WriteStateException e) { + logger.warn("Exception occurred when storing new meta data", e); } } - return actions; + } - private static Set getRelevantIndicesOnDataOnlyNode(ClusterState state, ClusterState previousState, Set - previouslyWrittenIndices) { - RoutingNode newRoutingNode = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); - if (newRoutingNode == null) { - throw new IllegalStateException("cluster state does not contain this node - cannot write index meta state"); + private static class GatewayPersistedState implements PersistedState { + + private final IncrementalClusterStateWriter incrementalClusterStateWriter; + + GatewayPersistedState(IncrementalClusterStateWriter incrementalClusterStateWriter) { + this.incrementalClusterStateWriter = incrementalClusterStateWriter; } - Set indices = new HashSet<>(); - for (ShardRouting routing : newRoutingNode) { - indices.add(routing.index()); + + @Override + public long getCurrentTerm() { + return incrementalClusterStateWriter.getPreviousManifest().getCurrentTerm(); } - // we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if - // we have it written on disk previously - for (IndexMetaData indexMetaData : state.metaData()) { - boolean isOrWasClosed = indexMetaData.getState().equals(IndexMetaData.State.CLOSE); - // if the index is open we might still have to write the state if it just transitioned from closed to open - // so we have to check for that as well. - IndexMetaData previousMetaData = previousState.metaData().index(indexMetaData.getIndex()); - if (previousMetaData != null) { - isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE); - } - if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) { - indices.add(indexMetaData.getIndex()); + + @Override + public ClusterState getLastAcceptedState() { + final ClusterState previousClusterState = incrementalClusterStateWriter.getPreviousClusterState(); + assert previousClusterState.nodes().getLocalNode() != null : "Cluster state is not fully built yet"; + return previousClusterState; + } + + @Override + public void setCurrentTerm(long currentTerm) { + try { + incrementalClusterStateWriter.setCurrentTerm(currentTerm); + } catch (WriteStateException e) { + logger.error(new ParameterizedMessage("Failed to set current term to {}", currentTerm), e); + e.rethrowAsErrorOrUncheckedException(); } } - return indices; - } - - private static Set getRelevantIndicesForMasterEligibleNode(ClusterState state) { - Set relevantIndices = new HashSet<>(); - // we have to iterate over the metadata to make sure we also capture closed indices - for (IndexMetaData indexMetaData : state.metaData()) { - relevantIndices.add(indexMetaData.getIndex()); - } - return relevantIndices; - } - - /** - * Action to perform with index metadata. - */ - public interface IndexMetaDataAction { - /** - * @return index for index metadata. - */ - Index getIndex(); - - /** - * Executes this action using provided {@link AtomicClusterStateWriter}. - * - * @return new index metadata state generation, to be used in manifest file. - * @throws WriteStateException if exception occurs. - */ - long execute(AtomicClusterStateWriter writer) throws WriteStateException; - } - - public static class KeepPreviousGeneration implements IndexMetaDataAction { - private final Index index; - private final long generation; - - KeepPreviousGeneration(Index index, long generation) { - this.index = index; - this.generation = generation; - } @Override - public Index getIndex() { - return index; + public void setLastAcceptedState(ClusterState clusterState) { + try { + final ClusterState previousClusterState = incrementalClusterStateWriter.getPreviousClusterState(); + incrementalClusterStateWriter.setIncrementalWrite(previousClusterState.term() == clusterState.term()); + incrementalClusterStateWriter.updateClusterState(clusterState, previousClusterState); + } catch (WriteStateException e) { + logger.error(new ParameterizedMessage("Failed to set last accepted state with version {}", clusterState.version()), e); + e.rethrowAsErrorOrUncheckedException(); + } } - @Override - public long execute(AtomicClusterStateWriter writer) { - return generation; - } } - public static class WriteNewIndexMetaData implements IndexMetaDataAction { - private final IndexMetaData indexMetaData; - - WriteNewIndexMetaData(IndexMetaData indexMetaData) { - this.indexMetaData = indexMetaData; - } - - @Override - public Index getIndex() { - return indexMetaData.getIndex(); - } - - @Override - public long execute(AtomicClusterStateWriter writer) throws WriteStateException { - return writer.writeIndex("freshly created", indexMetaData); - } - } - - public static class WriteChangedIndexMetaData implements IndexMetaDataAction { - private final IndexMetaData newIndexMetaData; - private final IndexMetaData oldIndexMetaData; - - WriteChangedIndexMetaData(IndexMetaData oldIndexMetaData, IndexMetaData newIndexMetaData) { - this.oldIndexMetaData = oldIndexMetaData; - this.newIndexMetaData = newIndexMetaData; - } - - @Override - public Index getIndex() { - return newIndexMetaData.getIndex(); - } - - @Override - public long execute(AtomicClusterStateWriter writer) throws WriteStateException { - return writer.writeIndex( - "version changed from [" + oldIndexMetaData.getVersion() + "] to [" + newIndexMetaData.getVersion() + "]", - newIndexMetaData); - } - } } diff --git a/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java b/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java new file mode 100644 index 00000000000..5facb826a24 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java @@ -0,0 +1,384 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gateway; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.Manifest; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.index.Index; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Tracks the metadata written to disk, allowing updated metadata to be written incrementally (i.e. only writing out the changed metadata). + */ +class IncrementalClusterStateWriter { + + private final MetaStateService metaStateService; + + // On master-eligible nodes we call updateClusterState under the Coordinator's mutex; on master-ineligible data nodes we call + // updateClusterState on the (unique) cluster applier thread; on other nodes we never call updateClusterState. In all cases there's + // no need to synchronize access to these fields. + private Manifest previousManifest; + private ClusterState previousClusterState; + private boolean incrementalWrite; + + IncrementalClusterStateWriter(MetaStateService metaStateService, Manifest manifest, ClusterState clusterState) { + this.metaStateService = metaStateService; + this.previousManifest = manifest; + this.previousClusterState = clusterState; + this.incrementalWrite = false; + } + + void setCurrentTerm(long currentTerm) throws WriteStateException { + Manifest manifest = new Manifest(currentTerm, previousManifest.getClusterStateVersion(), previousManifest.getGlobalGeneration(), + new HashMap<>(previousManifest.getIndexGenerations())); + metaStateService.writeManifestAndCleanup("current term changed", manifest); + previousManifest = manifest; + } + + Manifest getPreviousManifest() { + return previousManifest; + } + + ClusterState getPreviousClusterState() { + return previousClusterState; + } + + void setIncrementalWrite(boolean incrementalWrite) { + this.incrementalWrite = incrementalWrite; + } + + /** + * Updates manifest and meta data on disk. + * + * @param newState new {@link ClusterState} + * @param previousState previous {@link ClusterState} + * + * @throws WriteStateException if exception occurs. See also {@link WriteStateException#isDirty()}. + */ + void updateClusterState(ClusterState newState, ClusterState previousState) throws WriteStateException { + MetaData newMetaData = newState.metaData(); + + final AtomicClusterStateWriter writer = new AtomicClusterStateWriter(metaStateService, previousManifest); + long globalStateGeneration = writeGlobalState(writer, newMetaData); + Map indexGenerations = writeIndicesMetadata(writer, newState, previousState); + Manifest manifest = new Manifest(previousManifest.getCurrentTerm(), newState.version(), globalStateGeneration, indexGenerations); + writeManifest(writer, manifest); + + previousManifest = manifest; + previousClusterState = newState; + } + + private void writeManifest(AtomicClusterStateWriter writer, Manifest manifest) throws WriteStateException { + if (manifest.equals(previousManifest) == false) { + writer.writeManifestAndCleanup("changed", manifest); + } + } + + private Map writeIndicesMetadata(AtomicClusterStateWriter writer, ClusterState newState, ClusterState previousState) + throws WriteStateException { + Map previouslyWrittenIndices = previousManifest.getIndexGenerations(); + Set relevantIndices = getRelevantIndices(newState, previousState, previouslyWrittenIndices.keySet()); + + Map newIndices = new HashMap<>(); + + MetaData previousMetaData = incrementalWrite ? previousState.metaData() : null; + Iterable actions = resolveIndexMetaDataActions(previouslyWrittenIndices, relevantIndices, previousMetaData, + newState.metaData()); + + for (IndexMetaDataAction action : actions) { + long generation = action.execute(writer); + newIndices.put(action.getIndex(), generation); + } + + return newIndices; + } + + private long writeGlobalState(AtomicClusterStateWriter writer, MetaData newMetaData) throws WriteStateException { + if (incrementalWrite == false || MetaData.isGlobalStateEquals(previousClusterState.metaData(), newMetaData) == false) { + return writer.writeGlobalState("changed", newMetaData); + } + return previousManifest.getGlobalGeneration(); + } + + + /** + * Returns list of {@link IndexMetaDataAction} for each relevant index. + * For each relevant index there are 3 options: + *
    + *
  1. + * {@link KeepPreviousGeneration} - index metadata is already stored to disk and index metadata version is not changed, no + * action is required. + *
  2. + *
  3. + * {@link WriteNewIndexMetaData} - there is no index metadata on disk and index metadata for this index should be written. + *
  4. + *
  5. + * {@link WriteChangedIndexMetaData} - index metadata is already on disk, but index metadata version has changed. Updated + * index metadata should be written to disk. + *
  6. + *
+ * + * @param previouslyWrittenIndices A list of indices for which the state was already written before + * @param relevantIndices The list of indices for which state should potentially be written + * @param previousMetaData The last meta data we know of + * @param newMetaData The new metadata + * @return list of {@link IndexMetaDataAction} for each relevant index. + */ + // exposed for tests + static List resolveIndexMetaDataActions(Map previouslyWrittenIndices, + Set relevantIndices, + MetaData previousMetaData, + MetaData newMetaData) { + List actions = new ArrayList<>(); + for (Index index : relevantIndices) { + IndexMetaData newIndexMetaData = newMetaData.getIndexSafe(index); + IndexMetaData previousIndexMetaData = previousMetaData == null ? null : previousMetaData.index(index); + + if (previouslyWrittenIndices.containsKey(index) == false || previousIndexMetaData == null) { + actions.add(new WriteNewIndexMetaData(newIndexMetaData)); + } else if (previousIndexMetaData.getVersion() != newIndexMetaData.getVersion()) { + actions.add(new WriteChangedIndexMetaData(previousIndexMetaData, newIndexMetaData)); + } else { + actions.add(new KeepPreviousGeneration(index, previouslyWrittenIndices.get(index))); + } + } + return actions; + } + + private static Set getRelevantIndicesOnDataOnlyNode(ClusterState state, ClusterState previousState, Set + previouslyWrittenIndices) { + RoutingNode newRoutingNode = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); + if (newRoutingNode == null) { + throw new IllegalStateException("cluster state does not contain this node - cannot write index meta state"); + } + Set indices = new HashSet<>(); + for (ShardRouting routing : newRoutingNode) { + indices.add(routing.index()); + } + // we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if + // we have it written on disk previously + for (IndexMetaData indexMetaData : state.metaData()) { + boolean isOrWasClosed = indexMetaData.getState().equals(IndexMetaData.State.CLOSE); + // if the index is open we might still have to write the state if it just transitioned from closed to open + // so we have to check for that as well. + IndexMetaData previousMetaData = previousState.metaData().index(indexMetaData.getIndex()); + if (previousMetaData != null) { + isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE); + } + if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) { + indices.add(indexMetaData.getIndex()); + } + } + return indices; + } + + private static Set getRelevantIndicesForMasterEligibleNode(ClusterState state) { + Set relevantIndices = new HashSet<>(); + // we have to iterate over the metadata to make sure we also capture closed indices + for (IndexMetaData indexMetaData : state.metaData()) { + relevantIndices.add(indexMetaData.getIndex()); + } + return relevantIndices; + } + + // exposed for tests + static Set getRelevantIndices(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { + Set relevantIndices; + if (isDataOnlyNode(state)) { + relevantIndices = getRelevantIndicesOnDataOnlyNode(state, previousState, previouslyWrittenIndices); + } else if (state.nodes().getLocalNode().isMasterNode()) { + relevantIndices = getRelevantIndicesForMasterEligibleNode(state); + } else { + relevantIndices = Collections.emptySet(); + } + return relevantIndices; + } + + private static boolean isDataOnlyNode(ClusterState state) { + return state.nodes().getLocalNode().isMasterNode() == false && state.nodes().getLocalNode().isDataNode(); + } + + /** + * Action to perform with index metadata. + */ + interface IndexMetaDataAction { + /** + * @return index for index metadata. + */ + Index getIndex(); + + /** + * Executes this action using provided {@link AtomicClusterStateWriter}. + * + * @return new index metadata state generation, to be used in manifest file. + * @throws WriteStateException if exception occurs. + */ + long execute(AtomicClusterStateWriter writer) throws WriteStateException; + } + + /** + * This class is used to write changed global {@link MetaData}, {@link IndexMetaData} and {@link Manifest} to disk. + * This class delegates write* calls to corresponding write calls in {@link MetaStateService} and + * additionally it keeps track of cleanup actions to be performed if transaction succeeds or fails. + */ + static class AtomicClusterStateWriter { + private static final String FINISHED_MSG = "AtomicClusterStateWriter is finished"; + private final List commitCleanupActions; + private final List rollbackCleanupActions; + private final Manifest previousManifest; + private final MetaStateService metaStateService; + private boolean finished; + + AtomicClusterStateWriter(MetaStateService metaStateService, Manifest previousManifest) { + this.metaStateService = metaStateService; + assert previousManifest != null; + this.previousManifest = previousManifest; + this.commitCleanupActions = new ArrayList<>(); + this.rollbackCleanupActions = new ArrayList<>(); + this.finished = false; + } + + long writeGlobalState(String reason, MetaData metaData) throws WriteStateException { + assert finished == false : FINISHED_MSG; + try { + rollbackCleanupActions.add(() -> metaStateService.cleanupGlobalState(previousManifest.getGlobalGeneration())); + long generation = metaStateService.writeGlobalState(reason, metaData); + commitCleanupActions.add(() -> metaStateService.cleanupGlobalState(generation)); + return generation; + } catch (WriteStateException e) { + rollback(); + throw e; + } + } + + long writeIndex(String reason, IndexMetaData metaData) throws WriteStateException { + assert finished == false : FINISHED_MSG; + try { + Index index = metaData.getIndex(); + Long previousGeneration = previousManifest.getIndexGenerations().get(index); + if (previousGeneration != null) { + // we prefer not to clean-up index metadata in case of rollback, + // if it's not referenced by previous manifest file + // not to break dangling indices functionality + rollbackCleanupActions.add(() -> metaStateService.cleanupIndex(index, previousGeneration)); + } + long generation = metaStateService.writeIndex(reason, metaData); + commitCleanupActions.add(() -> metaStateService.cleanupIndex(index, generation)); + return generation; + } catch (WriteStateException e) { + rollback(); + throw e; + } + } + + void writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { + assert finished == false : FINISHED_MSG; + try { + metaStateService.writeManifestAndCleanup(reason, manifest); + commitCleanupActions.forEach(Runnable::run); + finished = true; + } catch (WriteStateException e) { + // If the Manifest write results in a dirty WriteStateException it's not safe to roll back, removing the new metadata files, + // because if the Manifest was actually written to disk and its deletion fails it will reference these new metadata files. + // On master-eligible nodes a dirty WriteStateException here is fatal to the node since we no longer really have any idea + // what the state on disk is and the only sensible response is to start again from scratch. + if (e.isDirty() == false) { + rollback(); + } + throw e; + } + } + + void rollback() { + rollbackCleanupActions.forEach(Runnable::run); + finished = true; + } + } + + static class KeepPreviousGeneration implements IndexMetaDataAction { + private final Index index; + private final long generation; + + KeepPreviousGeneration(Index index, long generation) { + this.index = index; + this.generation = generation; + } + + @Override + public Index getIndex() { + return index; + } + + @Override + public long execute(AtomicClusterStateWriter writer) { + return generation; + } + } + + static class WriteNewIndexMetaData implements IndexMetaDataAction { + private final IndexMetaData indexMetaData; + + WriteNewIndexMetaData(IndexMetaData indexMetaData) { + this.indexMetaData = indexMetaData; + } + + @Override + public Index getIndex() { + return indexMetaData.getIndex(); + } + + @Override + public long execute(AtomicClusterStateWriter writer) throws WriteStateException { + return writer.writeIndex("freshly created", indexMetaData); + } + } + + static class WriteChangedIndexMetaData implements IndexMetaDataAction { + private final IndexMetaData newIndexMetaData; + private final IndexMetaData oldIndexMetaData; + + WriteChangedIndexMetaData(IndexMetaData oldIndexMetaData, IndexMetaData newIndexMetaData) { + this.oldIndexMetaData = oldIndexMetaData; + this.newIndexMetaData = newIndexMetaData; + } + + @Override + public Index getIndex() { + return newIndexMetaData.getIndex(); + } + + @Override + public long execute(AtomicClusterStateWriter writer) throws WriteStateException { + return writer.writeIndex( + "version changed from [" + oldIndexMetaData.getVersion() + "] to [" + newIndexMetaData.getVersion() + "]", + newIndexMetaData); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index efa7ddcd657..feb35a91283 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -482,7 +482,7 @@ public class Node implements Closeable { ).collect(Collectors.toSet()); final TransportService transportService = newTransportService(settings, transport, threadPool, networkModule.getTransportInterceptor(), localNodeFactory, settingsModule.getClusterSettings(), taskHeaders); - final GatewayMetaState gatewayMetaState = new GatewayMetaState(settings, metaStateService); + final GatewayMetaState gatewayMetaState = new GatewayMetaState(); final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); final SearchTransportService searchTransportService = new SearchTransportService(transportService, SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); @@ -700,7 +700,7 @@ public class Node implements Closeable { // Load (and maybe upgrade) the metadata stored on disk final GatewayMetaState gatewayMetaState = injector.getInstance(GatewayMetaState.class); - gatewayMetaState.start(transportService, clusterService, + gatewayMetaState.start(settings(), transportService, clusterService, injector.getInstance(MetaStateService.class), injector.getInstance(MetaDataIndexUpgradeService.class), injector.getInstance(MetaDataUpgrader.class)); // we load the global state here (the persistent part of the cluster state stored on disk) to // pass it to the bootstrap checks to allow plugins to enforce certain preconditions based on the recovered state. diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java index 107cc7541fe..e723d08d735 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java @@ -24,6 +24,8 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.CoordinationMetaData; import org.elasticsearch.cluster.coordination.CoordinationMetaData.VotingConfigExclusion; +import org.elasticsearch.cluster.coordination.CoordinationState; +import org.elasticsearch.cluster.coordination.InMemoryPersistedState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; @@ -35,10 +37,10 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; public class GatewayMetaStatePersistedStateTests extends ESTestCase { @@ -63,21 +65,23 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { super.tearDown(); } - private MockGatewayMetaState newGateway() { - final MockGatewayMetaState gateway = new MockGatewayMetaState(settings, nodeEnvironment, xContentRegistry(), localNode); - gateway.start(); - return gateway; + private CoordinationState.PersistedState newGatewayPersistedState() { + final MockGatewayMetaState gateway = new MockGatewayMetaState(localNode); + gateway.start(settings, nodeEnvironment, xContentRegistry()); + final CoordinationState.PersistedState persistedState = gateway.getPersistedState(); + assertThat(persistedState, not(instanceOf(InMemoryPersistedState.class))); + return persistedState; } - private MockGatewayMetaState maybeNew(MockGatewayMetaState gateway) throws IOException { + private CoordinationState.PersistedState maybeNew(CoordinationState.PersistedState persistedState) { if (randomBoolean()) { - return newGateway(); + return newGatewayPersistedState(); } - return gateway; + return persistedState; } - public void testInitialState() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testInitialState() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); ClusterState state = gateway.getLastAcceptedState(); assertThat(state.getClusterName(), equalTo(clusterName)); assertTrue(MetaData.isGlobalStateEquals(state.metaData(), MetaData.EMPTY_META_DATA)); @@ -88,8 +92,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { assertThat(currentTerm, equalTo(Manifest.empty().getCurrentTerm())); } - public void testSetCurrentTerm() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testSetCurrentTerm() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); for (int i = 0; i < randomIntBetween(1, 5); i++) { final long currentTerm = randomNonNegativeLong(); @@ -142,8 +146,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { } } - public void testSetLastAcceptedState() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testSetLastAcceptedState() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); final long term = randomNonNegativeLong(); for (int i = 0; i < randomIntBetween(1, 5); i++) { @@ -165,8 +169,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { } } - public void testSetLastAcceptedStateTermChanged() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testSetLastAcceptedStateTermChanged() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); final String indexName = randomAlphaOfLength(10); final int numberOfShards = randomIntBetween(1, 5); @@ -178,7 +182,7 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { gateway.setLastAcceptedState(state); gateway = maybeNew(gateway); - final long newTerm = randomValueOtherThan(term, () -> randomNonNegativeLong()); + final long newTerm = randomValueOtherThan(term, ESTestCase::randomNonNegativeLong); final int newNumberOfShards = randomValueOtherThan(numberOfShards, () -> randomIntBetween(1,5)); final IndexMetaData newIndexMetaData = createIndexMetaData(indexName, newNumberOfShards, version); final ClusterState newClusterState = createClusterState(randomNonNegativeLong(), @@ -189,11 +193,11 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { assertThat(gateway.getLastAcceptedState().metaData().index(indexName), equalTo(newIndexMetaData)); } - public void testCurrentTermAndTermAreDifferent() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testCurrentTermAndTermAreDifferent() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); long currentTerm = randomNonNegativeLong(); - long term = randomValueOtherThan(currentTerm, () -> randomNonNegativeLong()); + long term = randomValueOtherThan(currentTerm, ESTestCase::randomNonNegativeLong); gateway.setCurrentTerm(currentTerm); gateway.setLastAcceptedState(createClusterState(randomNonNegativeLong(), @@ -204,8 +208,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { assertThat(gateway.getLastAcceptedState().coordinationMetaData().term(), equalTo(term)); } - public void testMarkAcceptedConfigAsCommitted() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testMarkAcceptedConfigAsCommitted() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); //generate random coordinationMetaData with different lastAcceptedConfiguration and lastCommittedConfiguration CoordinationMetaData coordinationMetaData; diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index c8f274c2f18..d0101f276d8 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -19,417 +19,24 @@ package org.elasticsearch.gateway; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; -import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.Index; import org.elasticsearch.plugins.MetaDataUpgrader; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestCustomMetaData; -import org.mockito.ArgumentCaptor; -import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; -public class GatewayMetaStateTests extends ESAllocationTestCase { - - private ClusterState noIndexClusterState(boolean masterEligible) { - MetaData metaData = MetaData.builder().build(); - RoutingTable routingTable = RoutingTable.builder().build(); - - return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) - .metaData(metaData) - .routingTable(routingTable) - .nodes(generateDiscoveryNodes(masterEligible)) - .build(); - } - - private ClusterState clusterStateWithUnassignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - MetaData metaData = MetaData.builder() - .put(indexMetaData, false) - .build(); - - RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) - .build(); - - return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) - .metaData(metaData) - .routingTable(routingTable) - .nodes(generateDiscoveryNodes(masterEligible)) - .build(); - } - - private ClusterState clusterStateWithAssignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - AllocationService strategy = createAllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 100) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) - .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) - .build()); - - ClusterState oldClusterState = clusterStateWithUnassignedIndex(indexMetaData, masterEligible); - RoutingTable routingTable = strategy.reroute(oldClusterState, "reroute").routingTable(); - - MetaData metaDataNewClusterState = MetaData.builder() - .put(oldClusterState.metaData().index("test"), false) - .build(); - - return ClusterState.builder(oldClusterState).routingTable(routingTable) - .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); - } - - private ClusterState clusterStateWithClosedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - ClusterState oldClusterState = clusterStateWithAssignedIndex(indexMetaData, masterEligible); - - MetaData metaDataNewClusterState = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.CLOSE) - .numberOfShards(5).numberOfReplicas(2)) - .version(oldClusterState.metaData().version() + 1) - .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaDataNewClusterState.index("test")) - .build(); - - return ClusterState.builder(oldClusterState).routingTable(routingTable) - .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); - } - - private ClusterState clusterStateWithJustOpenedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - ClusterState oldClusterState = clusterStateWithClosedIndex(indexMetaData, masterEligible); - - MetaData metaDataNewClusterState = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.OPEN) - .numberOfShards(5).numberOfReplicas(2)) - .version(oldClusterState.metaData().version() + 1) - .build(); - - return ClusterState.builder(oldClusterState) - .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); - } - - private DiscoveryNodes.Builder generateDiscoveryNodes(boolean masterEligible) { - Set dataOnlyRoles = Collections.singleton(DiscoveryNodeRole.DATA_ROLE); - return DiscoveryNodes.builder().add(newNode("node1", masterEligible ? MASTER_DATA_ROLES : dataOnlyRoles)) - .add(newNode("master_node", MASTER_DATA_ROLES)).localNodeId("node1").masterNodeId(masterEligible ? "node1" : "master_node"); - } - - private Set randomPrevWrittenIndices(IndexMetaData indexMetaData) { - if (randomBoolean()) { - return Collections.singleton(indexMetaData.getIndex()); - } else { - return Collections.emptySet(); - } - } - - private IndexMetaData createIndexMetaData(String name) { - return IndexMetaData.builder(name). - settings(settings(Version.CURRENT)). - numberOfShards(5). - numberOfReplicas(2). - build(); - } - - public void testGetRelevantIndicesWithUnassignedShardsOnMasterEligibleNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithUnassignedIndex(indexMetaData, true), - noIndexClusterState(true), - randomPrevWrittenIndices(indexMetaData)); - assertThat(indices.size(), equalTo(1)); - } - - public void testGetRelevantIndicesWithUnassignedShardsOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithUnassignedIndex(indexMetaData, false), - noIndexClusterState(false), - randomPrevWrittenIndices(indexMetaData)); - assertThat(indices.size(), equalTo(0)); - } - - public void testGetRelevantIndicesWithAssignedShards() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - boolean masterEligible = randomBoolean(); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithAssignedIndex(indexMetaData, masterEligible), - clusterStateWithUnassignedIndex(indexMetaData, masterEligible), - randomPrevWrittenIndices(indexMetaData)); - assertThat(indices.size(), equalTo(1)); - } - - public void testGetRelevantIndicesForClosedPrevWrittenIndexOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithClosedIndex(indexMetaData, false), - clusterStateWithAssignedIndex(indexMetaData, false), - Collections.singleton(indexMetaData.getIndex())); - assertThat(indices.size(), equalTo(1)); - } - - public void testGetRelevantIndicesForClosedPrevNotWrittenIndexOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithJustOpenedIndex(indexMetaData, false), - clusterStateWithClosedIndex(indexMetaData, false), - Collections.emptySet()); - assertThat(indices.size(), equalTo(0)); - } - - public void testGetRelevantIndicesForWasClosedPrevWrittenIndexOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithJustOpenedIndex(indexMetaData, false), - clusterStateWithClosedIndex(indexMetaData, false), - Collections.singleton(indexMetaData.getIndex())); - assertThat(indices.size(), equalTo(1)); - } - - public void testResolveStatesToBeWritten() throws WriteStateException { - Map indices = new HashMap<>(); - Set relevantIndices = new HashSet<>(); - - IndexMetaData removedIndex = createIndexMetaData("removed_index"); - indices.put(removedIndex.getIndex(), 1L); - - IndexMetaData versionChangedIndex = createIndexMetaData("version_changed_index"); - indices.put(versionChangedIndex.getIndex(), 2L); - relevantIndices.add(versionChangedIndex.getIndex()); - - IndexMetaData notChangedIndex = createIndexMetaData("not_changed_index"); - indices.put(notChangedIndex.getIndex(), 3L); - relevantIndices.add(notChangedIndex.getIndex()); - - IndexMetaData newIndex = createIndexMetaData("new_index"); - relevantIndices.add(newIndex.getIndex()); - - MetaData oldMetaData = MetaData.builder() - .put(removedIndex, false) - .put(versionChangedIndex, false) - .put(notChangedIndex, false) - .build(); - - MetaData newMetaData = MetaData.builder() - .put(versionChangedIndex, true) - .put(notChangedIndex, false) - .put(newIndex, false) - .build(); - - IndexMetaData newVersionChangedIndex = newMetaData.index(versionChangedIndex.getIndex()); - - List actions = - GatewayMetaState.resolveIndexMetaDataActions(indices, relevantIndices, oldMetaData, newMetaData); - - assertThat(actions, hasSize(3)); - - for (GatewayMetaState.IndexMetaDataAction action : actions) { - if (action instanceof GatewayMetaState.KeepPreviousGeneration) { - assertThat(action.getIndex(), equalTo(notChangedIndex.getIndex())); - GatewayMetaState.AtomicClusterStateWriter writer = mock(GatewayMetaState.AtomicClusterStateWriter.class); - assertThat(action.execute(writer), equalTo(3L)); - verifyZeroInteractions(writer); - } - if (action instanceof GatewayMetaState.WriteNewIndexMetaData) { - assertThat(action.getIndex(), equalTo(newIndex.getIndex())); - GatewayMetaState.AtomicClusterStateWriter writer = mock(GatewayMetaState.AtomicClusterStateWriter.class); - when(writer.writeIndex("freshly created", newIndex)).thenReturn(0L); - assertThat(action.execute(writer), equalTo(0L)); - } - if (action instanceof GatewayMetaState.WriteChangedIndexMetaData) { - assertThat(action.getIndex(), equalTo(newVersionChangedIndex.getIndex())); - GatewayMetaState.AtomicClusterStateWriter writer = mock(GatewayMetaState.AtomicClusterStateWriter.class); - when(writer.writeIndex(anyString(), eq(newVersionChangedIndex))).thenReturn(3L); - assertThat(action.execute(writer), equalTo(3L)); - ArgumentCaptor reason = ArgumentCaptor.forClass(String.class); - verify(writer).writeIndex(reason.capture(), eq(newVersionChangedIndex)); - assertThat(reason.getValue(), containsString(Long.toString(versionChangedIndex.getVersion()))); - assertThat(reason.getValue(), containsString(Long.toString(newVersionChangedIndex.getVersion()))); - } - } - } - - private static class MetaStateServiceWithFailures extends MetaStateService { - private final int invertedFailRate; - private boolean failRandomly; - - private MetaDataStateFormat wrap(MetaDataStateFormat format) { - return new MetaDataStateFormat(format.getPrefix()) { - @Override - public void toXContent(XContentBuilder builder, T state) throws IOException { - format.toXContent(builder, state); - } - - @Override - public T fromXContent(XContentParser parser) throws IOException { - return format.fromXContent(parser); - } - - @Override - protected Directory newDirectory(Path dir) { - MockDirectoryWrapper mock = newMockFSDirectory(dir); - if (failRandomly) { - MockDirectoryWrapper.Failure fail = new MockDirectoryWrapper.Failure() { - @Override - public void eval(MockDirectoryWrapper dir) throws IOException { - int r = randomIntBetween(0, invertedFailRate); - if (r == 0) { - throw new MockDirectoryWrapper.FakeIOException(); - } - } - }; - mock.failOn(fail); - } - closeAfterSuite(mock); - return mock; - } - }; - } - - MetaStateServiceWithFailures(int invertedFailRate, NodeEnvironment nodeEnv, NamedXContentRegistry namedXContentRegistry) { - super(nodeEnv, namedXContentRegistry); - META_DATA_FORMAT = wrap(MetaData.FORMAT); - INDEX_META_DATA_FORMAT = wrap(IndexMetaData.FORMAT); - MANIFEST_FORMAT = wrap(Manifest.FORMAT); - failRandomly = false; - this.invertedFailRate = invertedFailRate; - } - - void failRandomly() { - failRandomly = true; - } - - void noFailures() { - failRandomly = false; - } - } - - private boolean metaDataEquals(MetaData md1, MetaData md2) { - boolean equals = MetaData.isGlobalStateEquals(md1, md2); - - for (IndexMetaData imd : md1) { - IndexMetaData imd2 = md2.index(imd.getIndex()); - equals = equals && imd.equals(imd2); - } - - for (IndexMetaData imd : md2) { - IndexMetaData imd2 = md1.index(imd.getIndex()); - equals = equals && imd.equals(imd2); - } - return equals; - } - - private static MetaData randomMetaDataForTx() { - int settingNo = randomIntBetween(0, 10); - MetaData.Builder builder = MetaData.builder() - .persistentSettings(Settings.builder().put("setting" + settingNo, randomAlphaOfLength(5)).build()); - int numOfIndices = randomIntBetween(0, 3); - - for (int i = 0; i < numOfIndices; i++) { - int indexNo = randomIntBetween(0, 50); - IndexMetaData indexMetaData = IndexMetaData.builder("index" + indexNo).settings( - Settings.builder() - .put(IndexMetaData.SETTING_INDEX_UUID, "index" + indexNo) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .build() - ).build(); - builder.put(indexMetaData, false); - } - return builder.build(); - } - - public void testAtomicityWithFailures() throws IOException { - try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateServiceWithFailures metaStateService = - new MetaStateServiceWithFailures(randomIntBetween(100, 1000), env, xContentRegistry()); - - // We only guarantee atomicity of writes, if there is initial Manifest file - Manifest manifest = Manifest.empty(); - MetaData metaData = MetaData.EMPTY_META_DATA; - metaStateService.writeManifestAndCleanup("startup", Manifest.empty()); - long currentTerm = randomNonNegativeLong(); - long clusterStateVersion = randomNonNegativeLong(); - - metaStateService.failRandomly(); - Set possibleMetaData = new HashSet<>(); - possibleMetaData.add(metaData); - - for (int i = 0; i < randomIntBetween(1, 5); i++) { - GatewayMetaState.AtomicClusterStateWriter writer = - new GatewayMetaState.AtomicClusterStateWriter(metaStateService, manifest); - metaData = randomMetaDataForTx(); - Map indexGenerations = new HashMap<>(); - - try { - long globalGeneration = writer.writeGlobalState("global", metaData); - - for (IndexMetaData indexMetaData : metaData) { - long generation = writer.writeIndex("index", indexMetaData); - indexGenerations.put(indexMetaData.getIndex(), generation); - } - - Manifest newManifest = new Manifest(currentTerm, clusterStateVersion, globalGeneration, indexGenerations); - writer.writeManifestAndCleanup("manifest", newManifest); - possibleMetaData.clear(); - possibleMetaData.add(metaData); - manifest = newManifest; - } catch (WriteStateException e) { - if (e.isDirty()) { - possibleMetaData.add(metaData); - /* - * If dirty WriteStateException occurred, it's only safe to proceed if there is subsequent - * successful write of metadata and Manifest. We prefer to break here, not to over complicate test logic. - * See also MetaDataStateFormat#testFailRandomlyAndReadAnyState, that does not break. - */ - break; - } - } - } - - metaStateService.noFailures(); - - Tuple manifestAndMetaData = metaStateService.loadFullState(); - MetaData loadedMetaData = manifestAndMetaData.v2(); - - assertTrue(possibleMetaData.stream().anyMatch(md -> metaDataEquals(md, loadedMetaData))); - } - } +public class GatewayMetaStateTests extends ESTestCase { public void testAddCustomMetaDataOnUpgrade() throws Exception { MetaData metaData = randomMetaData(); diff --git a/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java b/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java new file mode 100644 index 00000000000..b41a24bb820 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java @@ -0,0 +1,429 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gateway; + +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.Manifest; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +public class IncrementalClusterStateWriterTests extends ESAllocationTestCase { + + private ClusterState noIndexClusterState(boolean masterEligible) { + MetaData metaData = MetaData.builder().build(); + RoutingTable routingTable = RoutingTable.builder().build(); + + return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .routingTable(routingTable) + .nodes(generateDiscoveryNodes(masterEligible)) + .build(); + } + + private ClusterState clusterStateWithUnassignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + MetaData metaData = MetaData.builder() + .put(indexMetaData, false) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .routingTable(routingTable) + .nodes(generateDiscoveryNodes(masterEligible)) + .build(); + } + + private ClusterState clusterStateWithAssignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + AllocationService strategy = createAllocationService(Settings.builder() + .put("cluster.routing.allocation.node_concurrent_recoveries", 100) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") + .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) + .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) + .build()); + + ClusterState oldClusterState = clusterStateWithUnassignedIndex(indexMetaData, masterEligible); + RoutingTable routingTable = strategy.reroute(oldClusterState, "reroute").routingTable(); + + MetaData metaDataNewClusterState = MetaData.builder() + .put(oldClusterState.metaData().index("test"), false) + .build(); + + return ClusterState.builder(oldClusterState).routingTable(routingTable) + .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); + } + + private ClusterState clusterStateWithClosedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + ClusterState oldClusterState = clusterStateWithAssignedIndex(indexMetaData, masterEligible); + + MetaData metaDataNewClusterState = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.CLOSE) + .numberOfShards(5).numberOfReplicas(2)) + .version(oldClusterState.metaData().version() + 1) + .build(); + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaDataNewClusterState.index("test")) + .build(); + + return ClusterState.builder(oldClusterState).routingTable(routingTable) + .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); + } + + private ClusterState clusterStateWithJustOpenedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + ClusterState oldClusterState = clusterStateWithClosedIndex(indexMetaData, masterEligible); + + MetaData metaDataNewClusterState = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.OPEN) + .numberOfShards(5).numberOfReplicas(2)) + .version(oldClusterState.metaData().version() + 1) + .build(); + + return ClusterState.builder(oldClusterState) + .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); + } + + private DiscoveryNodes.Builder generateDiscoveryNodes(boolean masterEligible) { + Set dataOnlyRoles = Collections.singleton(DiscoveryNodeRole.DATA_ROLE); + return DiscoveryNodes.builder().add(newNode("node1", masterEligible ? MASTER_DATA_ROLES : dataOnlyRoles)) + .add(newNode("master_node", MASTER_DATA_ROLES)).localNodeId("node1").masterNodeId(masterEligible ? "node1" : "master_node"); + } + + private Set randomPrevWrittenIndices(IndexMetaData indexMetaData) { + if (randomBoolean()) { + return Collections.singleton(indexMetaData.getIndex()); + } else { + return Collections.emptySet(); + } + } + + private IndexMetaData createIndexMetaData(String name) { + return IndexMetaData.builder(name). + settings(settings(Version.CURRENT)). + numberOfShards(5). + numberOfReplicas(2). + build(); + } + + public void testGetRelevantIndicesWithUnassignedShardsOnMasterEligibleNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithUnassignedIndex(indexMetaData, true), + noIndexClusterState(true), + randomPrevWrittenIndices(indexMetaData)); + assertThat(indices.size(), equalTo(1)); + } + + public void testGetRelevantIndicesWithUnassignedShardsOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithUnassignedIndex(indexMetaData, false), + noIndexClusterState(false), + randomPrevWrittenIndices(indexMetaData)); + assertThat(indices.size(), equalTo(0)); + } + + public void testGetRelevantIndicesWithAssignedShards() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + boolean masterEligible = randomBoolean(); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithAssignedIndex(indexMetaData, masterEligible), + clusterStateWithUnassignedIndex(indexMetaData, masterEligible), + randomPrevWrittenIndices(indexMetaData)); + assertThat(indices.size(), equalTo(1)); + } + + public void testGetRelevantIndicesForClosedPrevWrittenIndexOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithClosedIndex(indexMetaData, false), + clusterStateWithAssignedIndex(indexMetaData, false), + Collections.singleton(indexMetaData.getIndex())); + assertThat(indices.size(), equalTo(1)); + } + + public void testGetRelevantIndicesForClosedPrevNotWrittenIndexOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithJustOpenedIndex(indexMetaData, false), + clusterStateWithClosedIndex(indexMetaData, false), + Collections.emptySet()); + assertThat(indices.size(), equalTo(0)); + } + + public void testGetRelevantIndicesForWasClosedPrevWrittenIndexOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithJustOpenedIndex(indexMetaData, false), + clusterStateWithClosedIndex(indexMetaData, false), + Collections.singleton(indexMetaData.getIndex())); + assertThat(indices.size(), equalTo(1)); + } + + public void testResolveStatesToBeWritten() throws WriteStateException { + Map indices = new HashMap<>(); + Set relevantIndices = new HashSet<>(); + + IndexMetaData removedIndex = createIndexMetaData("removed_index"); + indices.put(removedIndex.getIndex(), 1L); + + IndexMetaData versionChangedIndex = createIndexMetaData("version_changed_index"); + indices.put(versionChangedIndex.getIndex(), 2L); + relevantIndices.add(versionChangedIndex.getIndex()); + + IndexMetaData notChangedIndex = createIndexMetaData("not_changed_index"); + indices.put(notChangedIndex.getIndex(), 3L); + relevantIndices.add(notChangedIndex.getIndex()); + + IndexMetaData newIndex = createIndexMetaData("new_index"); + relevantIndices.add(newIndex.getIndex()); + + MetaData oldMetaData = MetaData.builder() + .put(removedIndex, false) + .put(versionChangedIndex, false) + .put(notChangedIndex, false) + .build(); + + MetaData newMetaData = MetaData.builder() + .put(versionChangedIndex, true) + .put(notChangedIndex, false) + .put(newIndex, false) + .build(); + + IndexMetaData newVersionChangedIndex = newMetaData.index(versionChangedIndex.getIndex()); + + List actions = + IncrementalClusterStateWriter.resolveIndexMetaDataActions(indices, relevantIndices, oldMetaData, newMetaData); + + assertThat(actions, hasSize(3)); + + for (IncrementalClusterStateWriter.IndexMetaDataAction action : actions) { + if (action instanceof IncrementalClusterStateWriter.KeepPreviousGeneration) { + assertThat(action.getIndex(), equalTo(notChangedIndex.getIndex())); + IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); + assertThat(action.execute(writer), equalTo(3L)); + verifyZeroInteractions(writer); + } + if (action instanceof IncrementalClusterStateWriter.WriteNewIndexMetaData) { + assertThat(action.getIndex(), equalTo(newIndex.getIndex())); + IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); + when(writer.writeIndex("freshly created", newIndex)).thenReturn(0L); + assertThat(action.execute(writer), equalTo(0L)); + } + if (action instanceof IncrementalClusterStateWriter.WriteChangedIndexMetaData) { + assertThat(action.getIndex(), equalTo(newVersionChangedIndex.getIndex())); + IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); + when(writer.writeIndex(anyString(), eq(newVersionChangedIndex))).thenReturn(3L); + assertThat(action.execute(writer), equalTo(3L)); + ArgumentCaptor reason = ArgumentCaptor.forClass(String.class); + verify(writer).writeIndex(reason.capture(), eq(newVersionChangedIndex)); + assertThat(reason.getValue(), containsString(Long.toString(versionChangedIndex.getVersion()))); + assertThat(reason.getValue(), containsString(Long.toString(newVersionChangedIndex.getVersion()))); + } + } + } + + private static class MetaStateServiceWithFailures extends MetaStateService { + private final int invertedFailRate; + private boolean failRandomly; + + private MetaDataStateFormat wrap(MetaDataStateFormat format) { + return new MetaDataStateFormat(format.getPrefix()) { + @Override + public void toXContent(XContentBuilder builder, T state) throws IOException { + format.toXContent(builder, state); + } + + @Override + public T fromXContent(XContentParser parser) throws IOException { + return format.fromXContent(parser); + } + + @Override + protected Directory newDirectory(Path dir) { + MockDirectoryWrapper mock = newMockFSDirectory(dir); + if (failRandomly) { + MockDirectoryWrapper.Failure fail = new MockDirectoryWrapper.Failure() { + @Override + public void eval(MockDirectoryWrapper dir) throws IOException { + int r = randomIntBetween(0, invertedFailRate); + if (r == 0) { + throw new MockDirectoryWrapper.FakeIOException(); + } + } + }; + mock.failOn(fail); + } + closeAfterSuite(mock); + return mock; + } + }; + } + + MetaStateServiceWithFailures(int invertedFailRate, NodeEnvironment nodeEnv, NamedXContentRegistry namedXContentRegistry) { + super(nodeEnv, namedXContentRegistry); + META_DATA_FORMAT = wrap(MetaData.FORMAT); + INDEX_META_DATA_FORMAT = wrap(IndexMetaData.FORMAT); + MANIFEST_FORMAT = wrap(Manifest.FORMAT); + failRandomly = false; + this.invertedFailRate = invertedFailRate; + } + + void failRandomly() { + failRandomly = true; + } + + void noFailures() { + failRandomly = false; + } + } + + private boolean metaDataEquals(MetaData md1, MetaData md2) { + boolean equals = MetaData.isGlobalStateEquals(md1, md2); + + for (IndexMetaData imd : md1) { + IndexMetaData imd2 = md2.index(imd.getIndex()); + equals = equals && imd.equals(imd2); + } + + for (IndexMetaData imd : md2) { + IndexMetaData imd2 = md1.index(imd.getIndex()); + equals = equals && imd.equals(imd2); + } + return equals; + } + + private static MetaData randomMetaDataForTx() { + int settingNo = randomIntBetween(0, 10); + MetaData.Builder builder = MetaData.builder() + .persistentSettings(Settings.builder().put("setting" + settingNo, randomAlphaOfLength(5)).build()); + int numOfIndices = randomIntBetween(0, 3); + + for (int i = 0; i < numOfIndices; i++) { + int indexNo = randomIntBetween(0, 50); + IndexMetaData indexMetaData = IndexMetaData.builder("index" + indexNo).settings( + Settings.builder() + .put(IndexMetaData.SETTING_INDEX_UUID, "index" + indexNo) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build() + ).build(); + builder.put(indexMetaData, false); + } + return builder.build(); + } + + public void testAtomicityWithFailures() throws IOException { + try (NodeEnvironment env = newNodeEnvironment()) { + MetaStateServiceWithFailures metaStateService = + new MetaStateServiceWithFailures(randomIntBetween(100, 1000), env, xContentRegistry()); + + // We only guarantee atomicity of writes, if there is initial Manifest file + Manifest manifest = Manifest.empty(); + MetaData metaData = MetaData.EMPTY_META_DATA; + metaStateService.writeManifestAndCleanup("startup", Manifest.empty()); + long currentTerm = randomNonNegativeLong(); + long clusterStateVersion = randomNonNegativeLong(); + + metaStateService.failRandomly(); + Set possibleMetaData = new HashSet<>(); + possibleMetaData.add(metaData); + + for (int i = 0; i < randomIntBetween(1, 5); i++) { + IncrementalClusterStateWriter.AtomicClusterStateWriter writer = + new IncrementalClusterStateWriter.AtomicClusterStateWriter(metaStateService, manifest); + metaData = randomMetaDataForTx(); + Map indexGenerations = new HashMap<>(); + + try { + long globalGeneration = writer.writeGlobalState("global", metaData); + + for (IndexMetaData indexMetaData : metaData) { + long generation = writer.writeIndex("index", indexMetaData); + indexGenerations.put(indexMetaData.getIndex(), generation); + } + + Manifest newManifest = new Manifest(currentTerm, clusterStateVersion, globalGeneration, indexGenerations); + writer.writeManifestAndCleanup("manifest", newManifest); + possibleMetaData.clear(); + possibleMetaData.add(metaData); + manifest = newManifest; + } catch (WriteStateException e) { + if (e.isDirty()) { + possibleMetaData.add(metaData); + /* + * If dirty WriteStateException occurred, it's only safe to proceed if there is subsequent + * successful write of metadata and Manifest. We prefer to break here, not to over complicate test logic. + * See also MetaDataStateFormat#testFailRandomlyAndReadAnyState, that does not break. + */ + break; + } + } + } + + metaStateService.noFailures(); + + Tuple manifestAndMetaData = metaStateService.loadFullState(); + MetaData loadedMetaData = manifestAndMetaData.v2(); + + assertTrue(possibleMetaData.stream().anyMatch(md -> metaDataEquals(md, loadedMetaData))); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index fe7b8720981..102de69cc43 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -703,9 +703,8 @@ public class AbstractCoordinatorTestCase extends ESTestCase { if (rarely()) { nodeEnvironment = newNodeEnvironment(); nodeEnvironments.add(nodeEnvironment); - final MockGatewayMetaState gatewayMetaState - = new MockGatewayMetaState(Settings.EMPTY, nodeEnvironment, xContentRegistry(), localNode); - gatewayMetaState.start(); + final MockGatewayMetaState gatewayMetaState = new MockGatewayMetaState(localNode); + gatewayMetaState.start(Settings.EMPTY, nodeEnvironment, xContentRegistry()); delegate = gatewayMetaState.getPersistedState(); } else { nodeEnvironment = null; @@ -736,9 +735,8 @@ public class AbstractCoordinatorTestCase extends ESTestCase { new Manifest(updatedTerm, manifest.getClusterStateVersion(), manifest.getGlobalGeneration(), manifest.getIndexGenerations())); } - final MockGatewayMetaState gatewayMetaState - = new MockGatewayMetaState(Settings.EMPTY, nodeEnvironment, xContentRegistry(), newLocalNode); - gatewayMetaState.start(); + final MockGatewayMetaState gatewayMetaState = new MockGatewayMetaState(newLocalNode); + gatewayMetaState.start(Settings.EMPTY, nodeEnvironment, xContentRegistry()); delegate = gatewayMetaState.getPersistedState(); } else { nodeEnvironment = null; diff --git a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java index 006f2948831..b66b5ea3ee2 100644 --- a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java +++ b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java @@ -19,6 +19,7 @@ package org.elasticsearch.gateway; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -37,24 +38,23 @@ import org.elasticsearch.transport.TransportService; public class MockGatewayMetaState extends GatewayMetaState { private final DiscoveryNode localNode; - public MockGatewayMetaState(Settings settings, NodeEnvironment nodeEnvironment, - NamedXContentRegistry xContentRegistry, DiscoveryNode localNode) { - super(settings, new MetaStateService(nodeEnvironment, xContentRegistry)); + public MockGatewayMetaState(DiscoveryNode localNode) { this.localNode = localNode; } @Override - protected void upgradeMetaData(MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) { + void upgradeMetaData(Settings settings, MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, + MetaDataUpgrader metaDataUpgrader) { // MetaData upgrade is tested in GatewayMetaStateTests, we override this method to NOP to make mocking easier } @Override - public void applyClusterStateUpdaters(TransportService transportService, ClusterService clusterService) { + ClusterState prepareInitialClusterState(TransportService transportService, ClusterService clusterService, ClusterState clusterState) { // Just set localNode here, not to mess with ClusterService and IndicesService mocking - previousClusterState = ClusterStateUpdaters.setLocalNode(previousClusterState, localNode); + return ClusterStateUpdaters.setLocalNode(clusterState, localNode); } - public void start() { - start(null, null, null, null); + public void start(Settings settings, NodeEnvironment nodeEnvironment, NamedXContentRegistry xContentRegistry) { + start(settings, null, null, new MetaStateService(nodeEnvironment, xContentRegistry), null, null); } } From 9135e2f9e391dbb94e4c8495c4fa6f3e9a00bfbf Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 24 Sep 2019 13:39:44 +0100 Subject: [PATCH 19/94] Improve LeaderCheck rejection messages (#46998) Today the `LeaderChecker` rejects checks from nodes that are not in the current cluster with the exception message `leader check from unknown node` which offers no information about why the node is unknown. In fact the node must have been in the cluster in the recent past, so it might help guide the user to a more useful log message if we describe it as a `removed node` instead of an `unknown node`. This commit changes the exception message like this, and also tidies up a few other loose ends in the `LeaderChecker`. --- .../cluster/coordination/LeaderChecker.java | 28 +++++++++---------- .../coordination/LeaderCheckerTests.java | 9 +++--- 2 files changed, 18 insertions(+), 19 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java index bb6dabbc2de..d1b58320fe9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java @@ -64,7 +64,7 @@ public class LeaderChecker { private static final Logger logger = LogManager.getLogger(LeaderChecker.class); - public static final String LEADER_CHECK_ACTION_NAME = "internal:coordination/fault_detection/leader_check"; + static final String LEADER_CHECK_ACTION_NAME = "internal:coordination/fault_detection/leader_check"; // the time between checks sent to the leader public static final Setting LEADER_CHECK_INTERVAL_SETTING = @@ -92,7 +92,7 @@ public class LeaderChecker { private volatile DiscoveryNodes discoveryNodes; - public LeaderChecker(final Settings settings, final TransportService transportService, final Consumer onLeaderFailure) { + LeaderChecker(final Settings settings, final TransportService transportService, final Consumer onLeaderFailure) { this.settings = settings; leaderCheckInterval = LEADER_CHECK_INTERVAL_SETTING.get(settings); leaderCheckTimeout = LEADER_CHECK_TIMEOUT_SETTING.get(settings); @@ -134,7 +134,7 @@ public class LeaderChecker { * * @param leader the node to be checked as leader, or null if checks should be disabled */ - public void updateLeader(@Nullable final DiscoveryNode leader) { + void updateLeader(@Nullable final DiscoveryNode leader) { assert transportService.getLocalNode().equals(leader) == false; final CheckScheduler checkScheduler; if (leader != null) { @@ -154,12 +154,8 @@ public class LeaderChecker { /** * Update the "known" discovery nodes. Should be called on the leader before a new cluster state is published to reflect the new * publication targets, and also called if a leader becomes a non-leader. - * TODO if heartbeats can make nodes become followers then this needs to be called before a heartbeat is sent to a new node too. - *

- * isLocalNodeElectedMaster() should reflect whether this node is a leader, and nodeExists() - * should indicate whether nodes are known publication targets or not. */ - public void setCurrentNodes(DiscoveryNodes discoveryNodes) { + void setCurrentNodes(DiscoveryNodes discoveryNodes) { logger.trace("setCurrentNodes: {}", discoveryNodes); this.discoveryNodes = discoveryNodes; } @@ -174,11 +170,13 @@ public class LeaderChecker { assert discoveryNodes != null; if (discoveryNodes.isLocalNodeElectedMaster() == false) { - logger.debug("non-master handling {}", request); - throw new CoordinationStateRejectedException("non-leader rejecting leader check"); + logger.debug("rejecting leader check on non-master {}", request); + throw new CoordinationStateRejectedException( + "rejecting leader check from [" + request.getSender() + "] sent to a node that is no longer the master"); } else if (discoveryNodes.nodeExists(request.getSender()) == false) { - logger.debug("leader check from unknown node: {}", request); - throw new CoordinationStateRejectedException("leader check from unknown node"); + logger.debug("rejecting leader check from removed node: {}", request); + throw new CoordinationStateRejectedException( + "rejecting leader check since [" + request.getSender() + "] has been removed from the cluster"); } else { logger.trace("handling {}", request); } @@ -332,15 +330,15 @@ public class LeaderChecker { } } - public static class LeaderCheckRequest extends TransportRequest { + static class LeaderCheckRequest extends TransportRequest { private final DiscoveryNode sender; - public LeaderCheckRequest(final DiscoveryNode sender) { + LeaderCheckRequest(final DiscoveryNode sender) { this.sender = sender; } - public LeaderCheckRequest(final StreamInput in) throws IOException { + LeaderCheckRequest(final StreamInput in) throws IOException { super(in); sender = new DiscoveryNode(in); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java index ce25d24bce6..496a25e1802 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java @@ -220,7 +220,7 @@ public class LeaderCheckerTests extends ESTestCase { return; } assertThat(action, equalTo(LEADER_CHECK_ACTION_NAME)); - assertTrue(node.equals(leader)); + assertEquals(node, leader); final Response response = responseHolder[0]; deterministicTaskQueue.scheduleNow(new Runnable() { @@ -340,7 +340,7 @@ public class LeaderCheckerTests extends ESTestCase { assertFalse(handler.successfulResponseReceived); assertThat(handler.transportException.getRootCause(), instanceOf(CoordinationStateRejectedException.class)); CoordinationStateRejectedException cause = (CoordinationStateRejectedException) handler.transportException.getRootCause(); - assertThat(cause.getMessage(), equalTo("leader check from unknown node")); + assertThat(cause.getMessage(), equalTo("rejecting leader check since [" + otherNode + "] has been removed from the cluster")); } { @@ -364,7 +364,8 @@ public class LeaderCheckerTests extends ESTestCase { assertFalse(handler.successfulResponseReceived); assertThat(handler.transportException.getRootCause(), instanceOf(CoordinationStateRejectedException.class)); CoordinationStateRejectedException cause = (CoordinationStateRejectedException) handler.transportException.getRootCause(); - assertThat(cause.getMessage(), equalTo("non-leader rejecting leader check")); + assertThat(cause.getMessage(), + equalTo("rejecting leader check from [" + otherNode + "] sent to a node that is no longer the master")); } } @@ -397,7 +398,7 @@ public class LeaderCheckerTests extends ESTestCase { public void testLeaderCheckRequestEqualsHashcodeSerialization() { LeaderCheckRequest request = new LeaderCheckRequest( new DiscoveryNode(randomAlphaOfLength(10), buildNewFakeTransportAddress(), Version.CURRENT)); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type + //noinspection RedundantCast since it is needed for some IDEs (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode(request, (CopyFunction) rq -> copyWriteable(rq, writableRegistry(), LeaderCheckRequest::new), rq -> new LeaderCheckRequest(new DiscoveryNode(randomAlphaOfLength(10), buildNewFakeTransportAddress(), Version.CURRENT))); From a1af2fe96ae67968f1d1c393cddd3bbfe98809d2 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 24 Sep 2019 07:33:55 -0700 Subject: [PATCH 20/94] Rename Painless node SSource to SClass (#46984) Mechanical renaming of SSource node to SClass to better align with the names of what other nodes generate. --- .../org/elasticsearch/painless/Compiler.java | 8 +- .../elasticsearch/painless/antlr/Walker.java | 10 +- .../painless/node/AStatement.java | 2 +- .../node/{SSource.java => SClass.java} | 4 +- .../painless/node/package-info.java | 4 +- .../painless/node/NodeToStringTests.java | 258 +++++++++--------- 6 files changed, 143 insertions(+), 143 deletions(-) rename modules/lang-painless/src/main/java/org/elasticsearch/painless/node/{SSource.java => SClass.java} (99%) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 87058adfafa..3e504ea545e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -22,7 +22,7 @@ package org.elasticsearch.painless; import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.painless.antlr.Walker; import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.node.SSource; +import org.elasticsearch.painless.node.SClass; import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.util.Printer; @@ -45,7 +45,7 @@ import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; /** * The Compiler is the entry point for generating a Painless script. The compiler will receive a Painless * tree based on the type of input passed in (currently only ANTLR). Two passes will then be run over the tree, - * one for analysis and another to generate the actual byte code using ASM using the root of the tree {@link SSource}. + * one for analysis and another to generate the actual byte code using ASM using the root of the tree {@link SClass}. */ final class Compiler { @@ -209,7 +209,7 @@ final class Compiler { */ Constructor compile(Loader loader, Set extractedVariables, String name, String source, CompilerSettings settings) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); - SSource root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, null); + SClass root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, null); root.extractVariables(extractedVariables); root.storeSettings(settings); root.analyze(painlessLookup); @@ -240,7 +240,7 @@ final class Compiler { */ byte[] compile(String name, String source, CompilerSettings settings, Printer debugStream) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); - SSource root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, + SClass root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, debugStream); root.extractVariables(new HashSet<>()); root.storeSettings(settings); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 9d550ce5f5d..f5ff7881fd9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -152,7 +152,7 @@ import org.elasticsearch.painless.node.SFunction; import org.elasticsearch.painless.node.SIf; import org.elasticsearch.painless.node.SIfElse; import org.elasticsearch.painless.node.SReturn; -import org.elasticsearch.painless.node.SSource; +import org.elasticsearch.painless.node.SClass; import org.elasticsearch.painless.node.SThrow; import org.elasticsearch.painless.node.STry; import org.elasticsearch.painless.node.SWhile; @@ -166,14 +166,14 @@ import java.util.List; */ public final class Walker extends PainlessParserBaseVisitor { - public static SSource buildPainlessTree(ScriptClassInfo mainMethod, String sourceName, + public static SClass buildPainlessTree(ScriptClassInfo mainMethod, String sourceName, String sourceText, CompilerSettings settings, PainlessLookup painlessLookup, Printer debugStream) { return new Walker(mainMethod, sourceName, sourceText, settings, painlessLookup, debugStream).source; } private final ScriptClassInfo scriptClassInfo; - private final SSource source; + private final SClass source; private final CompilerSettings settings; private final Printer debugStream; private final String sourceName; @@ -188,7 +188,7 @@ public final class Walker extends PainlessParserBaseVisitor { this.sourceName = Location.computeSourceName(sourceName); this.sourceText = sourceText; this.painlessLookup = painlessLookup; - this.source = (SSource)visit(buildAntlrTree(sourceText)); + this.source = (SClass)visit(buildAntlrTree(sourceText)); } private SourceContext buildAntlrTree(String source) { @@ -245,7 +245,7 @@ public final class Walker extends PainlessParserBaseVisitor { statements.add((AStatement)visit(statement)); } - return new SSource(scriptClassInfo, sourceName, sourceText, debugStream, location(ctx), functions, statements); + return new SClass(scriptClassInfo, sourceName, sourceText, debugStream, location(ctx), functions, statements); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java index 42ffe0a1582..629449c1a58 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java @@ -29,7 +29,7 @@ import org.objectweb.asm.Label; public abstract class AStatement extends ANode { /** - * Set to true when the final statement in an {@link SSource} is reached. + * Set to true when the final statement in an {@link SClass} is reached. * Used to determine whether or not an auto-return is necessary. */ boolean lastSource = false; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java similarity index 99% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java index fd8129c4294..0d15aec23b1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java @@ -78,7 +78,7 @@ import static org.elasticsearch.painless.WriterConstants.STRING_TYPE; /** * The root of all Painless trees. Contains a series of statements. */ -public final class SSource extends AStatement { +public final class SClass extends AStatement { private final ScriptClassInfo scriptClassInfo; private final String name; @@ -94,7 +94,7 @@ public final class SSource extends AStatement { private final List getMethods; private byte[] bytes; - public SSource(ScriptClassInfo scriptClassInfo, String name, String sourceText, Printer debugStream, + public SClass(ScriptClassInfo scriptClassInfo, String name, String sourceText, Printer debugStream, Location location, List functions, List statements) { super(location); this.scriptClassInfo = Objects.requireNonNull(scriptClassInfo); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index 8ba8b79b74a..1f74013d557 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -88,7 +88,7 @@ * {@link org.elasticsearch.painless.node.SIf} - Represents an if block. * {@link org.elasticsearch.painless.node.SIfElse} - Represents an if/else block. * {@link org.elasticsearch.painless.node.SReturn} - Represents a return statement. - * {@link org.elasticsearch.painless.node.SSource} - The root of all Painless trees. Contains a series of statements. + * {@link org.elasticsearch.painless.node.SClass} - The root of all Painless trees. Contains a series of statements. * {@link org.elasticsearch.painless.node.SSubEachArray} - Represents a for-each loop for arrays. * {@link org.elasticsearch.painless.node.SSubEachIterable} - Represents a for-each loop for iterables. * {@link org.elasticsearch.painless.node.SThrow} - Represents a throw statement. @@ -98,7 +98,7 @@ * Note that internal nodes are generated during the analysis phase by modifying the tree on-the-fly * for clarity of development and convenience during the writing phase. *

- * All Painless trees must start with an SSource node at the root. Each node has a constructor that requires + * All Painless trees must start with an SClass node at the root. Each node has a constructor that requires * all of its values and children be passed in at the time of instantiation. This means that Painless trees * are build bottom-up; however, this helps enforce tree structure correctness and fits naturally with a * standard recursive-descent parser. diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index f129984863e..d2832dfdfd5 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -53,7 +53,7 @@ public class NodeToStringTests extends ESTestCase { public void testEAssignment() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SExpression (EAssignment (EVariable i) = (ENumeric 2)))\n" + " (SReturn (EVariable i)))", @@ -62,7 +62,7 @@ public class NodeToStringTests extends ESTestCase { + "return i"); for (String operator : new String[] {"+", "-", "*", "/", "%", "&", "^", "|", "<<", ">>", ">>>"}) { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i (ENumeric 1)))\n" + " (SExpression (EAssignment (EVariable i) " + operator + "= (ENumeric 2)))\n" + " (SReturn (EVariable i)))", @@ -72,31 +72,31 @@ public class NodeToStringTests extends ESTestCase { } // Compound assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) = (ENumeric 2))))", "def i;\n" + "return i = 2"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) ++ post)))", "def i;\n" + "return i++"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) ++ pre)))", "def i;\n" + "return ++i"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) -- post)))", "def i;\n" + "return i--"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) -- pre)))", "def i;\n" @@ -104,34 +104,34 @@ public class NodeToStringTests extends ESTestCase { } public void testEBinary() { - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) * (ENumeric 1))))", "return 1 * 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) / (ENumeric 1))))", "return 1 / 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) % (ENumeric 1))))", "return 1 % 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) + (ENumeric 1))))", "return 1 + 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) - (ENumeric 1))))", "return 1 - 1"); - assertToString( "(SSource (SReturn (EBinary (EString 'asb') =~ (ERegex /cat/))))", "return 'asb' =~ /cat/"); - assertToString("(SSource (SReturn (EBinary (EString 'asb') ==~ (ERegex /cat/))))", "return 'asb' ==~ /cat/"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) << (ENumeric 1))))", "return 1 << 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) >> (ENumeric 1))))", "return 1 >> 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) >>> (ENumeric 1))))", "return 1 >>> 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) & (ENumeric 1))))", "return 1 & 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) ^ (ENumeric 1))))", "return 1 ^ 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) | (ENumeric 1))))", "return 1 | 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) * (ENumeric 1))))", "return 1 * 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) / (ENumeric 1))))", "return 1 / 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) % (ENumeric 1))))", "return 1 % 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) + (ENumeric 1))))", "return 1 + 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) - (ENumeric 1))))", "return 1 - 1"); + assertToString( "(SClass (SReturn (EBinary (EString 'asb') =~ (ERegex /cat/))))", "return 'asb' =~ /cat/"); + assertToString("(SClass (SReturn (EBinary (EString 'asb') ==~ (ERegex /cat/))))", "return 'asb' ==~ /cat/"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) << (ENumeric 1))))", "return 1 << 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) >> (ENumeric 1))))", "return 1 >> 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) >>> (ENumeric 1))))", "return 1 >>> 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) & (ENumeric 1))))", "return 1 & 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) ^ (ENumeric 1))))", "return 1 ^ 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) | (ENumeric 1))))", "return 1 | 1"); } public void testEBool() { - assertToString("(SSource (SReturn (EBool (EBoolean true) && (EBoolean false))))", "return true && false"); - assertToString("(SSource (SReturn (EBool (EBoolean true) || (EBoolean false))))", "return true || false"); + assertToString("(SClass (SReturn (EBool (EBoolean true) && (EBoolean false))))", "return true && false"); + assertToString("(SClass (SReturn (EBool (EBoolean true) || (EBoolean false))))", "return true || false"); } public void testEBoolean() { - assertToString("(SSource (SReturn (EBoolean true)))", "return true"); - assertToString("(SSource (SReturn (EBoolean false)))", "return false"); + assertToString("(SClass (SReturn (EBoolean true)))", "return true"); + assertToString("(SClass (SReturn (EBoolean false)))", "return false"); } public void testECallLocal() { assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (ECallLocal a)))", @@ -140,7 +140,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return a()"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a (Args (Pair int i) (Pair int j))\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (ECallLocal a (Args (ENumeric 1) (ENumeric 2)))))", @@ -152,7 +152,7 @@ public class NodeToStringTests extends ESTestCase { public void testECapturingFunctionRef() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration Integer x (PCallInvoke (EStatic Integer) valueOf (Args (ENumeric 5)))))\n" + " (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args (ECapturingFunctionRef x toString)))))", "Integer x = Integer.valueOf(5);\n" @@ -173,18 +173,18 @@ public class NodeToStringTests extends ESTestCase { } public void testEComp() { - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) < (ENumeric 10))))", "return params.a < 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) <= (ENumeric 10))))", "return params.a <= 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) > (ENumeric 10))))", "return params.a > 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) >= (ENumeric 10))))", "return params.a >= 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) == (ENumeric 10))))", "return params.a == 10"); - assertToString("(SSource (SReturn (EComp (PField (EVariable params) a) === (ENumeric 10))))", "return params.a === 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) != (ENumeric 10))))", "return params.a != 10"); - assertToString("(SSource (SReturn (EComp (PField (EVariable params) a) !== (ENumeric 10))))", "return params.a !== 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) < (ENumeric 10))))", "return params.a < 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) <= (ENumeric 10))))", "return params.a <= 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) > (ENumeric 10))))", "return params.a > 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) >= (ENumeric 10))))", "return params.a >= 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) == (ENumeric 10))))", "return params.a == 10"); + assertToString("(SClass (SReturn (EComp (PField (EVariable params) a) === (ENumeric 10))))", "return params.a === 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) != (ENumeric 10))))", "return params.a != 10"); + assertToString("(SClass (SReturn (EComp (PField (EVariable params) a) !== (ENumeric 10))))", "return params.a !== 10"); } public void testEConditional() { - assertToString("(SSource (SReturn (EConditional (PField (EVariable params) a) (ENumeric 1) (ENumeric 6))))", + assertToString("(SClass (SReturn (EConditional (PField (EVariable params) a) (ENumeric 1) (ENumeric 6))))", "return params.a ? 1 : 6"); } @@ -196,68 +196,68 @@ public class NodeToStringTests extends ESTestCase { } public void testEDecimal() { - assertToString("(SSource (SReturn (EDecimal 1.0)))", "return 1.0"); - assertToString("(SSource (SReturn (EDecimal 14.121d)))", "return 14.121d"); - assertToString("(SSource (SReturn (EDecimal 2234.1f)))", "return 2234.1f"); - assertToString("(SSource (SReturn (EDecimal 14.121D)))", "return 14.121D"); - assertToString("(SSource (SReturn (EDecimal 1234.1F)))", "return 1234.1F"); + assertToString("(SClass (SReturn (EDecimal 1.0)))", "return 1.0"); + assertToString("(SClass (SReturn (EDecimal 14.121d)))", "return 14.121d"); + assertToString("(SClass (SReturn (EDecimal 2234.1f)))", "return 2234.1f"); + assertToString("(SClass (SReturn (EDecimal 14.121D)))", "return 14.121D"); + assertToString("(SClass (SReturn (EDecimal 1234.1F)))", "return 1234.1F"); } public void testEElvis() { - assertToString("(SSource (SReturn (EElvis (PField (EVariable params) a) (ENumeric 1))))", "return params.a ?: 1"); + assertToString("(SClass (SReturn (EElvis (PField (EVariable params) a) (ENumeric 1))))", "return params.a ?: 1"); } public void testEExplicit() { - assertToString("(SSource (SReturn (EExplicit byte (PField (EVariable params) a))))", "return (byte)(params.a)"); + assertToString("(SClass (SReturn (EExplicit byte (PField (EVariable params) a))))", "return (byte)(params.a)"); } public void testEFunctionRef() { assertToString( - "(SSource (SReturn " + "(SClass (SReturn " + "(PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args (EFunctionRef Optional empty)))))", "return Optional.empty().orElseGet(Optional::empty)"); } public void testEInstanceOf() { - assertToString("(SSource (SReturn (EInstanceof (ENewObj Object) Object)))", "return new Object() instanceof Object"); - assertToString("(SSource (SReturn (EInstanceof (ENumeric 12) double)))", "return 12 instanceof double"); + assertToString("(SClass (SReturn (EInstanceof (ENewObj Object) Object)))", "return new Object() instanceof Object"); + assertToString("(SClass (SReturn (EInstanceof (ENumeric 12) double)))", "return 12 instanceof double"); } public void testELambda() { assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(SClass (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(ELambda (SReturn (ENumeric 1)))))))", "return Optional.empty().orElseGet(() -> {\n" + " return 1\n" + "})"); assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(SClass (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(ELambda (SReturn (ENumeric 1)))))))", "return Optional.empty().orElseGet(() -> 1)"); assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "(SClass (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "mapToInt (Args (ELambda (Pair def x)\n" + " (SReturn (EBinary (EVariable x) + (ENumeric 1)))))) sum)))", "return [1, 2, 3].stream().mapToInt((def x) -> {\n" + " return x + 1\n" + "}).sum()"); assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "(SClass (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "mapToInt (Args (ELambda (Pair null x)\n" + " (SReturn (EBinary (EVariable x) + (ENumeric 1)))))) sum)))", "return [1, 2, 3].stream().mapToInt(x -> x + 1).sum()"); assertToString( - "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + "(SClass (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + " (SReturn (EBinary (PCallInvoke (EVariable a) length) - (PCallInvoke (EVariable b) length))))))))", "return ['a', 'b'].sort((def a, def b) -> {\n" + " return a.length() - b.length()\n" + "})"); assertToString( - "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair null a) (Pair null b)\n" + "(SClass (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair null a) (Pair null b)\n" + " (SReturn (EBinary (PCallInvoke (EVariable a) length) - (PCallInvoke (EVariable b) length))))))))", "return ['a', 'b'].sort((a, b) -> a.length() - b.length())"); assertToString( - "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + "(SClass (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + " (SIf (EComp (EVariable a) < (EVariable b)) (SBlock " + "(SReturn (EBinary (PCallInvoke (EVariable a) length) - (PCallInvoke (EVariable b) length)))))\n" + " (SReturn (ENumeric 1)))))))", @@ -270,85 +270,85 @@ public class NodeToStringTests extends ESTestCase { } public void testEListInit() { - assertToString("(SSource (SReturn (EListInit (ENumeric 1) (ENumeric 2) (EString 'cat') (EString 'dog') (ENewObj Object))))", + assertToString("(SClass (SReturn (EListInit (ENumeric 1) (ENumeric 2) (EString 'cat') (EString 'dog') (ENewObj Object))))", "return [1, 2, 'cat', 'dog', new Object()]"); - assertToString("(SSource (SReturn (EListInit)))", "return []"); + assertToString("(SClass (SReturn (EListInit)))", "return []"); } public void testEMapInit() { - assertToString("(SSource (SReturn (EMapInit " + assertToString("(SClass (SReturn (EMapInit " + "(Pair (EString 'a') (ENumeric 1)) " + "(Pair (EString 'b') (ENumeric 3)) " + "(Pair (ENumeric 12) (ENewObj Object)))))", "return ['a': 1, 'b': 3, 12: new Object()]"); - assertToString("(SSource (SReturn (EMapInit)))", "return [:]"); + assertToString("(SClass (SReturn (EMapInit)))", "return [:]"); } public void testENewArray() { - assertToString("(SSource (SReturn (ENewArray int[] dims (Args (ENumeric 10)))))", "return new int[10]"); - assertToString("(SSource (SReturn (ENewArray int[][][] dims (Args (ENumeric 10) (ENumeric 4) (ENumeric 5)))))", + assertToString("(SClass (SReturn (ENewArray int[] dims (Args (ENumeric 10)))))", "return new int[10]"); + assertToString("(SClass (SReturn (ENewArray int[][][] dims (Args (ENumeric 10) (ENumeric 4) (ENumeric 5)))))", "return new int[10][4][5]"); - assertToString("(SSource (SReturn (ENewArray int[] init (Args (ENumeric 1) (ENumeric 2) (ENumeric 3)))))", + assertToString("(SClass (SReturn (ENewArray int[] init (Args (ENumeric 1) (ENumeric 2) (ENumeric 3)))))", "return new int[] {1, 2, 3}"); - assertToString("(SSource (SReturn (ENewArray def[] init (Args (ENumeric 1) (ENumeric 2) (EString 'bird')))))", + assertToString("(SClass (SReturn (ENewArray def[] init (Args (ENumeric 1) (ENumeric 2) (EString 'bird')))))", "return new def[] {1, 2, 'bird'}"); } public void testENewObj() { - assertToString("(SSource (SReturn (ENewObj Object)))", "return new Object()"); - assertToString("(SSource (SReturn (ENewObj DateTimeException (Args (EString 'test')))))", "return new DateTimeException('test')"); + assertToString("(SClass (SReturn (ENewObj Object)))", "return new Object()"); + assertToString("(SClass (SReturn (ENewObj DateTimeException (Args (EString 'test')))))", "return new DateTimeException('test')"); } public void testENull() { - assertToString("(SSource (SReturn (ENull)))", "return null"); + assertToString("(SClass (SReturn (ENull)))", "return null"); } public void testENumeric() { - assertToString("(SSource (SReturn (ENumeric 1)))", "return 1"); - assertToString("(SSource (SReturn (ENumeric 114121d)))", "return 114121d"); - assertToString("(SSource (SReturn (ENumeric 114134f)))", "return 114134f"); - assertToString("(SSource (SReturn (ENumeric 114121D)))", "return 114121D"); - assertToString("(SSource (SReturn (ENumeric 111234F)))", "return 111234F"); - assertToString("(SSource (SReturn (ENumeric 774121l)))", "return 774121l"); - assertToString("(SSource (SReturn (ENumeric 881234L)))", "return 881234L"); + assertToString("(SClass (SReturn (ENumeric 1)))", "return 1"); + assertToString("(SClass (SReturn (ENumeric 114121d)))", "return 114121d"); + assertToString("(SClass (SReturn (ENumeric 114134f)))", "return 114134f"); + assertToString("(SClass (SReturn (ENumeric 114121D)))", "return 114121D"); + assertToString("(SClass (SReturn (ENumeric 111234F)))", "return 111234F"); + assertToString("(SClass (SReturn (ENumeric 774121l)))", "return 774121l"); + assertToString("(SClass (SReturn (ENumeric 881234L)))", "return 881234L"); - assertToString("(SSource (SReturn (ENumeric 1 16)))", "return 0x1"); - assertToString("(SSource (SReturn (ENumeric 774121l 16)))", "return 0x774121l"); - assertToString("(SSource (SReturn (ENumeric 881234L 16)))", "return 0x881234L"); + assertToString("(SClass (SReturn (ENumeric 1 16)))", "return 0x1"); + assertToString("(SClass (SReturn (ENumeric 774121l 16)))", "return 0x774121l"); + assertToString("(SClass (SReturn (ENumeric 881234L 16)))", "return 0x881234L"); - assertToString("(SSource (SReturn (ENumeric 1 8)))", "return 01"); - assertToString("(SSource (SReturn (ENumeric 774121l 8)))", "return 0774121l"); - assertToString("(SSource (SReturn (ENumeric 441234L 8)))", "return 0441234L"); + assertToString("(SClass (SReturn (ENumeric 1 8)))", "return 01"); + assertToString("(SClass (SReturn (ENumeric 774121l 8)))", "return 0774121l"); + assertToString("(SClass (SReturn (ENumeric 441234L 8)))", "return 0441234L"); } public void testERegex() { - assertToString("(SSource (SReturn (ERegex /foo/)))", "return /foo/"); - assertToString("(SSource (SReturn (ERegex /foo/ cix)))", "return /foo/cix"); - assertToString("(SSource (SReturn (ERegex /foo/ cix)))", "return /foo/xci"); + assertToString("(SClass (SReturn (ERegex /foo/)))", "return /foo/"); + assertToString("(SClass (SReturn (ERegex /foo/ cix)))", "return /foo/cix"); + assertToString("(SClass (SReturn (ERegex /foo/ cix)))", "return /foo/xci"); } public void testEStatic() { - assertToString("(SSource (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); + assertToString("(SClass (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); } public void testEString() { - assertToString("(SSource (SReturn (EString 'foo')))", "return 'foo'"); - assertToString("(SSource (SReturn (EString ' oo')))", "return ' oo'"); - assertToString("(SSource (SReturn (EString 'fo ')))", "return 'fo '"); - assertToString("(SSource (SReturn (EString ' o ')))", "return ' o '"); + assertToString("(SClass (SReturn (EString 'foo')))", "return 'foo'"); + assertToString("(SClass (SReturn (EString ' oo')))", "return ' oo'"); + assertToString("(SClass (SReturn (EString 'fo ')))", "return 'fo '"); + assertToString("(SClass (SReturn (EString ' o ')))", "return ' o '"); } public void testEUnary() { - assertToString("(SSource (SReturn (EUnary ! (EBoolean true))))", "return !true"); - assertToString("(SSource (SReturn (EUnary ~ (ENumeric 1))))", "return ~1"); - assertToString("(SSource (SReturn (EUnary + (ENumeric 1))))", "return +1"); - assertToString("(SSource (SReturn (EUnary - (ENumeric 1))))", "return -(1)"); + assertToString("(SClass (SReturn (EUnary ! (EBoolean true))))", "return !true"); + assertToString("(SClass (SReturn (EUnary ~ (ENumeric 1))))", "return ~1"); + assertToString("(SClass (SReturn (EUnary + (ENumeric 1))))", "return +1"); + assertToString("(SClass (SReturn (EUnary - (ENumeric 1))))", "return -(1)"); } public void testEVariable() { - assertToString("(SSource (SReturn (EVariable params)))", "return params"); + assertToString("(SClass (SReturn (EVariable params)))", "return params"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def a (ENumeric 1)))\n" + " (SReturn (EVariable a)))", "def a = 1;\n" @@ -356,29 +356,29 @@ public class NodeToStringTests extends ESTestCase { } public void testPBrace() { - assertToString("(SSource (SReturn (PBrace (PField (EVariable params) a) (ENumeric 10))))", "return params.a[10]"); - assertToString("(SSource (SReturn (PBrace (EVariable params) (EString 'a'))))", "return params['a']"); + assertToString("(SClass (SReturn (PBrace (PField (EVariable params) a) (ENumeric 10))))", "return params.a[10]"); + assertToString("(SClass (SReturn (PBrace (EVariable params) (EString 'a'))))", "return params['a']"); } public void testPCallInvoke() { - assertToString("(SSource (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); - assertToString("(SSource (SReturn (PCallInvoke (EStatic Optional) of (Args (ENumeric 1)))))", "return Optional.of(1)"); - assertToString("(SSource (SReturn (PCallInvoke (EStatic Objects) equals (Args (ENumeric 1) (ENumeric 2)))))", + assertToString("(SClass (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); + assertToString("(SClass (SReturn (PCallInvoke (EStatic Optional) of (Args (ENumeric 1)))))", "return Optional.of(1)"); + assertToString("(SClass (SReturn (PCallInvoke (EStatic Objects) equals (Args (ENumeric 1) (ENumeric 2)))))", "return Objects.equals(1, 2)"); - assertToString("(SSource (SReturn (PCallInvoke (EVariable params) equals (Args (ENumeric 1)))))", "return params.equals(1)"); + assertToString("(SClass (SReturn (PCallInvoke (EVariable params) equals (Args (ENumeric 1)))))", "return params.equals(1)"); } public void testPField() { - assertToString("(SSource (SReturn (PField (EVariable params) a)))", "return params.a"); - assertToString("(SSource (SReturn (PField nullSafe (EVariable params) a)))", "return params?.a"); + assertToString("(SClass (SReturn (PField (EVariable params) a)))", "return params.a"); + assertToString("(SClass (SReturn (PField nullSafe (EVariable params) a)))", "return params?.a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int[] a (ENewArray int[] dims (Args (ENumeric 10)))))\n" + " (SReturn (PField (EVariable a) length)))", "int[] a = new int[10];\n" + "return a.length"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration org.elasticsearch.painless.FeatureTestObject a" + " (ENewObj org.elasticsearch.painless.FeatureTestObject)))\n" + " (SExpression (EAssignment (PField (EVariable a) x) = (ENumeric 10)))\n" @@ -510,7 +510,7 @@ public class NodeToStringTests extends ESTestCase { public void testSBreak() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int itr (ENumeric 2)))\n" + " (SDeclBlock (SDeclaration int a (ENumeric 1)))\n" + " (SDeclBlock (SDeclaration int b (ENumeric 1)))\n" @@ -538,7 +538,7 @@ public class NodeToStringTests extends ESTestCase { public void testSContinue() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int itr (ENumeric 2)))\n" + " (SDeclBlock (SDeclaration int a (ENumeric 1)))\n" + " (SDeclBlock (SDeclaration int b (ENumeric 1)))\n" @@ -566,7 +566,7 @@ public class NodeToStringTests extends ESTestCase { public void testSDeclBlock() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def a))\n" + " (SExpression (EAssignment (EVariable a) = (ENumeric 10)))\n" + " (SReturn (EVariable a)))", @@ -574,13 +574,13 @@ public class NodeToStringTests extends ESTestCase { + "a = 10;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def a (ENumeric 10)))\n" + " (SReturn (EVariable a)))", "def a = 10;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock\n" + " (SDeclaration def a)\n" + " (SDeclaration def b)\n" @@ -589,7 +589,7 @@ public class NodeToStringTests extends ESTestCase { "def a, b, c;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock\n" + " (SDeclaration def a (ENumeric 10))\n" + " (SDeclaration def b (ENumeric 20))\n" @@ -598,7 +598,7 @@ public class NodeToStringTests extends ESTestCase { "def a = 10, b = 20, c = 100;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock\n" + " (SDeclaration def a (ENumeric 10))\n" + " (SDeclaration def b)\n" @@ -607,7 +607,7 @@ public class NodeToStringTests extends ESTestCase { "def a = 10, b, c = 100;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SIf (PField (EVariable params) a) (SBlock\n" + " (SDeclBlock\n" + " (SDeclaration def a (ENumeric 10))\n" @@ -624,7 +624,7 @@ public class NodeToStringTests extends ESTestCase { public void testSDo() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int itr (ENumeric 2)))\n" + " (SDeclBlock (SDeclaration int a (ENumeric 1)))\n" + " (SDeclBlock (SDeclaration int b (ENumeric 1)))\n" @@ -648,7 +648,7 @@ public class NodeToStringTests extends ESTestCase { public void testSEach() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int l (ENumeric 0)))\n" + " (SEach String s (EListInit (EString 'cat') (EString 'dog') (EString 'chicken')) (SBlock " + "(SExpression (EAssignment (EVariable l) += (PCallInvoke (EVariable s) length)))))\n" @@ -659,7 +659,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return l"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int l (ENumeric 0)))\n" + " (SEach String s (EListInit (EString 'cat') (EString 'dog') (EString 'chicken')) (SBlock\n" + " (SDeclBlock (SDeclaration String s2 (EBinary (EString 'dire ') + (EVariable s))))\n" @@ -675,7 +675,7 @@ public class NodeToStringTests extends ESTestCase { public void testSFor() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int sum (ENumeric 0)))\n" + " (SFor\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" @@ -689,7 +689,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return sum"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int sum (ENumeric 0)))\n" + " (SFor\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" @@ -712,12 +712,12 @@ public class NodeToStringTests extends ESTestCase { public void testSIf() { assertToString( - "(SSource (SIf (PField (EVariable param) a) (SBlock (SReturn (EBoolean true)))))", + "(SClass (SIf (PField (EVariable param) a) (SBlock (SReturn (EBoolean true)))))", "if (param.a) {\n" + " return true\n" +"}"); assertToString( - "(SSource (SIf (PField (EVariable param) a) (SBlock\n" + "(SClass (SIf (PField (EVariable param) a) (SBlock\n" + " (SIf (PField (EVariable param) b) (SBlock (SReturn (EBoolean true))))\n" + " (SReturn (EBoolean false)))))", "if (param.a) {\n" @@ -730,7 +730,7 @@ public class NodeToStringTests extends ESTestCase { public void testSIfElse() { assertToString( - "(SSource (SIfElse (PField (EVariable param) a)\n" + "(SClass (SIfElse (PField (EVariable param) a)\n" + " (SBlock (SReturn (EBoolean true)))\n" + " (SBlock (SReturn (EBoolean false)))))", "if (param.a) {\n" @@ -739,7 +739,7 @@ public class NodeToStringTests extends ESTestCase { + " return false\n" + "}"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" + " (SIfElse (PField (EVariable param) a)\n" + " (SBlock (SIfElse (PField (EVariable param) b)\n" @@ -783,12 +783,12 @@ public class NodeToStringTests extends ESTestCase { } public void testSThrow() { - assertToString("(SSource (SThrow (ENewObj RuntimeException)))", "throw new RuntimeException()"); + assertToString("(SClass (SThrow (ENewObj RuntimeException)))", "throw new RuntimeException()"); } public void testSWhile() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" + " (SWhile (EComp (EVariable i) < (ENumeric 10)) (SBlock (SExpression (EAssignment (EVariable i) ++ post))))\n" + " (SReturn (EVariable i)))", @@ -801,7 +801,7 @@ public class NodeToStringTests extends ESTestCase { public void testSFunction() { assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (EBoolean true)))", @@ -810,7 +810,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return true"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a (Args (Pair int i) (Pair int j))\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (EBoolean true)))", @@ -819,7 +819,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return true"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a (Args (Pair int i) (Pair int j))\n" + " (SIf (EComp (EVariable i) < (EVariable j)) (SBlock (SReturn (EBoolean true))))\n" + " (SDeclBlock (SDeclaration int k (EBinary (EVariable i) + (EVariable j))))\n" @@ -834,7 +834,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return true"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a\n" + " (SReturn (EBoolean true)))\n" + " (SFunction def b\n" @@ -851,7 +851,7 @@ public class NodeToStringTests extends ESTestCase { public void testSTryAndSCatch() { assertToString( - "(SSource (STry (SBlock (SReturn (ENumeric 1)))\n" + "(SClass (STry (SBlock (SReturn (ENumeric 1)))\n" + " (SCatch Exception e (SBlock (SReturn (ENumeric 2))))))", "try {\n" + " return 1\n" @@ -859,7 +859,7 @@ public class NodeToStringTests extends ESTestCase { + " return 2\n" + "}"); assertToString( - "(SSource (STry (SBlock\n" + "(SClass (STry (SBlock\n" + " (SDeclBlock (SDeclaration int i (ENumeric 1)))\n" + " (SReturn (ENumeric 1)))\n" + " (SCatch Exception e (SBlock (SReturn (ENumeric 2))))))", @@ -870,7 +870,7 @@ public class NodeToStringTests extends ESTestCase { + " return 2\n" + "}"); assertToString( - "(SSource (STry (SBlock (SReturn (ENumeric 1)))\n" + "(SClass (STry (SBlock (SReturn (ENumeric 1)))\n" + " (SCatch Exception e (SBlock\n" + " (SDeclBlock (SDeclaration int i (ENumeric 1)))\n" + " (SReturn (ENumeric 2))))))", @@ -881,7 +881,7 @@ public class NodeToStringTests extends ESTestCase { + " return 2\n" + "}"); assertToString( - "(SSource (STry (SBlock (SReturn (ENumeric 1)))\n" + "(SClass (STry (SBlock (SReturn (ENumeric 1)))\n" + " (SCatch NullPointerException e (SBlock (SReturn (ENumeric 2))))\n" + " (SCatch Exception e (SBlock (SReturn (ENumeric 3))))))", "try {\n" @@ -905,7 +905,7 @@ public class NodeToStringTests extends ESTestCase { assertEquals(expected, walk(code).toString()); } - private SSource walk(String code) { + private SClass walk(String code) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, PainlessTestScript.class); CompilerSettings compilerSettings = new CompilerSettings(); compilerSettings.setRegexesEnabled(true); From 00f2e7f6270d1c8321f0cb234cb47b858cdbe39d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 24 Sep 2019 17:15:11 +0200 Subject: [PATCH 21/94] Update AWS SDK for repository-s3 plugin to support IAM Roles for Service Accounts (#46969) (#47004) * Update AWS SDK for repository-s3 and discovery-ec2 plugins --- plugins/discovery-ec2/build.gradle | 2 +- .../discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 | 1 - .../discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 | 1 + .../discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 | 1 - .../discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 | 1 + plugins/repository-s3/build.gradle | 2 +- .../repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 | 1 - .../repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 | 1 + .../repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 | 1 - .../repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 | 1 + plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 | 1 - plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 | 1 + .../snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 | 1 - .../snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 | 1 + x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 | 1 - x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 | 1 + x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 | 1 - x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 | 1 + 18 files changed, 10 insertions(+), 10 deletions(-) delete mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 create mode 100644 plugins/repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 create mode 100644 plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 create mode 100644 plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 delete mode 100644 x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 create mode 100644 x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 delete mode 100644 x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 create mode 100644 x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 delete mode 100644 x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 create mode 100644 x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index e7f027a7517..2dcee91c481 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -23,7 +23,7 @@ esplugin { } versions << [ - 'aws': '1.11.562' + 'aws': '1.11.636' ] dependencies { diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 deleted file mode 100644 index ed8ded6a360..00000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 new file mode 100644 index 00000000000..b9ee9c102db --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 @@ -0,0 +1 @@ +84c9f180f8f60f6f1433c9c5253fcb704593b121 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 deleted file mode 100644 index 040d28de70b..00000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0211a055fb3e036033af4b1ca25ada0574a756ec \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 new file mode 100644 index 00000000000..ed737c808c1 --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 @@ -0,0 +1 @@ +d32fc4ae314dbee9717302a3119cba0f735c04b1 \ No newline at end of file diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index ab4597cf7f4..2d7e60ae16e 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -29,7 +29,7 @@ esplugin { } versions << [ - 'aws': '1.11.562' + 'aws': '1.11.636' ] dependencies { diff --git a/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 deleted file mode 100644 index ed8ded6a360..00000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 new file mode 100644 index 00000000000..b9ee9c102db --- /dev/null +++ b/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 @@ -0,0 +1 @@ +84c9f180f8f60f6f1433c9c5253fcb704593b121 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 deleted file mode 100644 index 8e852fe9b27..00000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1712c878f7e9483ceac1eb2356a9457a3c8df03e \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 new file mode 100644 index 00000000000..1e05e98d240 --- /dev/null +++ b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 @@ -0,0 +1 @@ +f86fc1993ac8122f6f02a8eb9b467b5f945cd76b \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 b/plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 deleted file mode 100644 index 8e2d0e1935a..00000000000 --- a/plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1147ed0ad1f2c5a16b8271e38e3cda5cd488c8ae \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 b/plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 new file mode 100644 index 00000000000..70c0d3633af --- /dev/null +++ b/plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 @@ -0,0 +1 @@ +e468c349ce410171a1d5df7fa0fa377d52c5d651 \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 deleted file mode 100644 index ed8ded6a360..00000000000 --- a/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 new file mode 100644 index 00000000000..b9ee9c102db --- /dev/null +++ b/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 @@ -0,0 +1 @@ +84c9f180f8f60f6f1433c9c5253fcb704593b121 \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 deleted file mode 100644 index 8e852fe9b27..00000000000 --- a/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1712c878f7e9483ceac1eb2356a9457a3c8df03e \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 new file mode 100644 index 00000000000..1e05e98d240 --- /dev/null +++ b/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 @@ -0,0 +1 @@ +f86fc1993ac8122f6f02a8eb9b467b5f945cd76b \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 b/x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 deleted file mode 100644 index 8e2d0e1935a..00000000000 --- a/x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1147ed0ad1f2c5a16b8271e38e3cda5cd488c8ae \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 b/x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 new file mode 100644 index 00000000000..70c0d3633af --- /dev/null +++ b/x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 @@ -0,0 +1 @@ +e468c349ce410171a1d5df7fa0fa377d52c5d651 \ No newline at end of file From 64bf1b56fea7d59c4c217d917cfd0f58261f9c20 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Tue, 24 Sep 2019 19:04:52 +0300 Subject: [PATCH 22/94] [7.x] SQL: Mute pivot testAverageWithOneValueAndOrder and testSumWithoutSubquery (#47030) (#47033) Relates #47002 --- .../sql/qa/src/main/resources/pivot.csv-spec | 53 ++++++++++--------- 1 file changed, 27 insertions(+), 26 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec index c7e47a4304b..8858187d000 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec @@ -126,17 +126,18 @@ null |48396.28571428572|62140.666666666664 1 |49767.22222222222|47073.25 ; -averageWithOneValueAndOrder -schema::languages:bt|'F':d -SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) ORDER BY languages DESC LIMIT 4; - - languages | 'F' ----------------+------------------ -5 |46705.555555555555 -4 |49291.5 -3 |53660.0 -2 |50684.4 -; +// AwaitsFix https://github.com/elastic/elasticsearch/issues/47002 +// averageWithOneValueAndOrder +// schema::languages:bt|'F':d +// SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) ORDER BY languages DESC LIMIT 4; +// +// languages | 'F' +// ---------------+------------------ +// 5 |46705.555555555555 +// 4 |49291.5 +// 3 |53660.0 +// 2 |50684.4 +// ; averageWithTwoValuesAndOrderDesc schema::languages:bt|'M':d|'F':d @@ -170,26 +171,26 @@ SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary ---------------+-----------------+------------------ null |48396.28571428572|62140.666666666664 1 |49767.22222222222|47073.25 -2 |44103.90909090909|50684.4 +2 |44103.90909090909|50684.4 3 |51741.90909090909|53660.0 4 |47058.90909090909|49291.5 5 |39052.875 |46705.555555555555 ; - -sumWithoutSubquery -schema::birth_date:ts|emp_no:i|first_name:s|gender:s|hire_date:ts|last_name:s|1:i|2:i -SELECT * FROM test_emp PIVOT (SUM(salary) FOR languages IN (1, 2)) LIMIT 5; - - birth_date | emp_no | first_name | gender | hire_date | last_name | 1 | 2 ----------------------+---------------+---------------+---------------+---------------------+---------------+---------------+--------------- -null |10041 |Uri |F |1989-11-12 00:00:00.0|Lenart |56415 |null -null |10043 |Yishay |M |1990-10-20 00:00:00.0|Tzvieli |34341 |null -null |10044 |Mingsen |F |1994-05-21 00:00:00.0|Casley |39728 |null -1952-04-19 00:00:00.0|10009 |Sumant |F |1985-02-18 00:00:00.0|Peac |66174 |null -1953-01-07 00:00:00.0|10067 |Claudi |M |1987-03-04 00:00:00.0|Stavenow |null |52044 -1953-01-23 00:00:00.0|10019 |Lillian |null |1999-04-30 00:00:00.0|Haddadi |73717 |null -; +// AwaitsFix https://github.com/elastic/elasticsearch/issues/47002 +// sumWithoutSubquery +// schema::birth_date:ts|emp_no:i|first_name:s|gender:s|hire_date:ts|last_name:s|1:i|2:i +// SELECT * FROM test_emp PIVOT (SUM(salary) FOR languages IN (1, 2)) LIMIT 5; +// +// birth_date | emp_no | first_name | gender | hire_date | last_name | 1 | 2 +// ---------------------+---------------+---------------+---------------+---------------------+---------------+---------------+--------------- +// null |10041 |Uri |F |1989-11-12 00:00:00.0|Lenart |56415 |null +// null |10043 |Yishay |M |1990-10-20 00:00:00.0|Tzvieli |34341 |null +// null |10044 |Mingsen |F |1994-05-21 00:00:00.0|Casley |39728 |null +// 1952-04-19 00:00:00.0|10009 |Sumant |F |1985-02-18 00:00:00.0|Peac |66174 |null +// 1953-01-07 00:00:00.0|10067 |Claudi |M |1987-03-04 00:00:00.0|Stavenow |null |52044 +// 1953-01-23 00:00:00.0|10019 |Lillian |null |1999-04-30 00:00:00.0|Haddadi |73717 |null +// ; averageWithOneValueAndMath schema::languages:bt|'F':d From 3a82e0f7f4173d8e58b9e921d34ee50788e173cb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 24 Sep 2019 19:07:55 +0300 Subject: [PATCH 23/94] Do not rewrite aliases on remove-index from aliases requests (#46989) (#47018) When we rewrite alias requests, after filtering down to only those that the user is authorized to see, it can be that there are no aliases remaining in the request. However, core Elasticsearch interprets this as _all so the user would see more than they are authorized for. To address this, we previously rewrote all such requests to have aliases `"*"`, `"-*"`, which would be interpreted when aliases are resolved as nome. Yet, this is only needed for get aliases requests and we were applying it to all alias requests, including remove index requests. If such a request was sent to a coordinating node that is not the master node, the request would be rewritten to include `"*"` and `"-*"`, and then the master would authorize the user for these. If the user had limited permissions, the request would fail, even if they were authorized on the index that the remove index action was over. This commit addresses this by rewriting for get aliases and remove aliases request types but not for the remove index. Co-authored-by: Albert Zaharovits Co-authored-by: Tim Vernum --- .../authz/IndicesAndAliasesResolver.java | 14 +- .../security/authz/IndexAliasesTests.java | 152 ++++++++++++------ .../authz/IndicesAndAliasesResolverTests.java | 17 +- 3 files changed, 129 insertions(+), 54 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 6e0c2ed0bb1..bf5ced4e540 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -210,11 +210,15 @@ class IndicesAndAliasesResolver { } else { resolvedIndicesBuilder.addLocal(aliasesRequest.aliases()); } - // if no aliases are authorized, then fill in an expression that - // MetaData#findAliases evaluates to the empty alias list. You cannot put - // "nothing" (the empty list) explicitly because this is resolved by es core to - // _all - if (aliasesRequest.aliases().length == 0) { + /* + * If no aliases are authorized, then fill in an expression that MetaData#findAliases evaluates to an + * empty alias list. We can not put an empty list here because core resolves this as _all. For other + * request types, this replacement is not needed and can trigger issues when we rewrite the request + * on the coordinating node. For example, for a remove index request, if we did this replacement, + * the request would be rewritten to include "*","-*" and for a user that does not have permissions + * on "*", the master node would not authorize the request. + */ + if (aliasesRequest.expandAliasesWildcards() && aliasesRequest.aliases().length == 0) { aliasesRequest.replaceAliases(NO_INDICES_OR_ALIASES_ARRAY); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 711ca517d98..a6216ea2665 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -37,6 +37,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { ("test123".toCharArray()))); return super.configUsers() + "create_only:" + usersPasswdHashed + "\n" + + "all_on_test:" + usersPasswdHashed + "\n" + "create_test_aliases_test:" + usersPasswdHashed + "\n" + "create_test_aliases_alias:" + usersPasswdHashed + "\n" + "create_test_aliases_test_alias:" + usersPasswdHashed + "\n" + @@ -47,6 +48,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { protected String configUsersRoles() { return super.configUsersRoles() + "create_only:create_only\n" + + "all_on_test:all_on_test\n" + "create_test_aliases_test:create_test_aliases_test\n" + "create_test_aliases_alias:create_test_aliases_alias\n" + "create_test_aliases_test_alias:create_test_aliases_test_alias\n" + @@ -61,6 +63,10 @@ public class IndexAliasesTests extends SecurityIntegTestCase { " indices:\n" + " - names: '*'\n" + " privileges: [ create_index ]\n" + + "all_on_test:\n" + + " indices:\n" + + " - names: 'test_*'\n" + + " privileges: [ all ]\n" + //role that has create index and manage_aliases on test_*, not enough to manage_aliases aliases outside of test_* namespace "create_test_aliases_test:\n" + " indices:\n" + @@ -89,23 +95,23 @@ public class IndexAliasesTests extends SecurityIntegTestCase { @Before public void createBogusIndex() { - if (randomBoolean()) { - //randomly create an index with two aliases from user admin, to make sure it doesn't affect any of the test results - assertAcked(client().admin().indices().prepareCreate("index1").addAlias(new Alias("alias1")).addAlias(new Alias("alias2"))); - } + //randomly create an index with two aliases from user admin, to make sure it doesn't affect any of the test results + assertAcked(client().admin().indices().prepareCreate("bogus_index_1").addAlias(new Alias("bogus_alias_1")) + .addAlias(new Alias("bogus_alias_2"))); } public void testCreateIndexThenAliasesCreateOnlyPermission() { //user has create permission only: allows to create indices, manage_aliases is required to add/remove aliases Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").get()); + final Client client = client().filterWithHeader(headers); + assertAcked(client.admin().indices().prepareCreate("test_1").get()); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_1", "test_alias")::get, + client.admin().indices().prepareAliases().addAlias("test_1", "test_alias")::get, IndicesAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .addAlias("test_*", "test_alias")::get, IndicesAliasesAction.NAME, "create_only"); } @@ -116,7 +122,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { new SecureString("test123".toCharArray()))); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_2"))::get, + client(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_2"))::get, IndicesAliasesAction.NAME, "create_only"); } @@ -124,15 +130,16 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //user has create permission only: allows to create indices, manage_aliases is required to add/remove aliases Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "alias_1")::get, + client.admin().indices().prepareAliases().removeAlias("test_1", "alias_1")::get, IndicesAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", "alias_*")::get, IndicesAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", "_all")::get, IndicesAliasesAction.NAME, "create_only"); } @@ -140,24 +147,25 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //user has create permission only: allows to create indices, manage_aliases is required to retrieve aliases though Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_1") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("test_1") .setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers) + assertThrowsAuthorizationException(client .admin().indices().prepareGetAliases("_all") .setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices() + assertThrowsAuthorizationException(client.admin().indices() .prepareGetAliases().setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("test_alias") .setIndices("test_*").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); //this throws exception no matter what the indices options are because the aliases part cannot be resolved to any alias //and there is no way to "allow_no_aliases" like we can do with indices. - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases()::get, + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases()::get, GetAliasesAction.NAME, "create_only"); } @@ -165,23 +173,30 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //user has create permission only: allows to create indices, manage_aliases is required to retrieve aliases though Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_1") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("test_1") .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("_all") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("_all") .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases().setIndices("test_1") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("alias*")::get, GetAliasesAction.NAME, "create_only"); + + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases().setIndices("test_1") .setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") + client.admin().indices().prepareGetAliases("test_alias") + .setIndices("test_*").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); + + assertThrowsAuthorizationException( + client.admin().indices().prepareGetAliases() .setIndices("test_*").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); //this throws exception no matter what the indices options are because the aliases part cannot be resolved to any alias //and there is no way to "allow_no_aliases" like we can do with indices. - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices() + assertThrowsAuthorizationException(client.admin().indices() .prepareGetAliases().setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); } @@ -190,17 +205,18 @@ public class IndexAliasesTests extends SecurityIntegTestCase { // indices Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").get()); + assertAcked(client.admin().indices().prepareCreate("test_1").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_1", "test_alias").get()); + assertAcked(client.admin().indices().prepareAliases().addAlias("test_1", "test_alias").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_*", "test_alias_2").get()); + assertAcked(client.admin().indices().prepareAliases().addAlias("test_*", "test_alias_2").get()); //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .addAlias("test_1", "alias_1").addAlias("test_1", "test_alias")::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); } @@ -211,10 +227,12 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //ok: user has manage_aliases on test_* Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).get()); + final Client client = client(headers); + + assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).get()); //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareCreate("test_2") + assertThrowsAuthorizationException(client.admin().indices().prepareCreate("test_2") .addAlias(new Alias("test_alias")).addAlias(new Alias("alias_2"))::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); } @@ -225,38 +243,49 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //ok: user has manage_aliases on test_* Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); + final Client client = client(headers); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias_1")) + assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias_1")) .addAlias(new Alias("test_alias_2")) .addAlias(new Alias("test_alias_3")).addAlias(new Alias("test_alias_4")).get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_1").get()); + assertAcked(client.admin().indices().prepareAliases().removeAlias("test_1", "test_alias_1").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_*", "test_alias_2").get()); + assertAcked(client.admin().indices().prepareAliases().removeAlias("test_*", "test_alias_2").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*").get()); + assertAcked(client.admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*").get()); { //fails: all aliases have been deleted, no existing aliases match test_alias_* AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*")::get); + client.admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*")::get); assertThat(exception.getMessage(), equalTo("aliases [test_alias_*] missing")); } { //fails: all aliases have been deleted, no existing aliases match _all AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); + client.admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); assertThat(exception.getMessage(), equalTo("aliases [_all] missing")); } + // add unauthorized aliases + if (randomBoolean()) { + assertAcked(client().admin().indices().prepareAliases().addAlias("test_1", "alias_1").get()); + } + assertAcked(client().admin().indices().prepareAliases().addAlias("test_1", "alias_2").get()); + //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", "alias_1")::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", new String[]{"_all", "alias_1"})::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); + + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, + client.admin().indices().prepareAliases().removeAlias("test_1", "*")::get); + assertThat(exception.getMessage(), equalTo("aliases [*] missing")); } public void testGetAliasesCreateAndAliasesPermission() { @@ -264,7 +293,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { // indices Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).get()); //ok: user has manage_aliases on test_* @@ -315,7 +344,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testCreateIndexThenAliasesCreateAndAliasesPermission2() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to add/remove aliases // on both aliases and indices @@ -339,7 +368,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to add/remove aliases // on both aliases and indices @@ -352,7 +381,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testDeleteAliasesCreateAndAliasesPermission2() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to add/remove aliases // on both aliases and indices @@ -368,7 +397,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesCreateAndAliasesPermission2() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to retrieve aliases // on both aliases and indices @@ -413,7 +442,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testCreateIndexThenAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1")); @@ -428,7 +457,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testCreateIndexAndAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias"))); @@ -439,7 +468,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testDeleteAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).addAlias(new Alias("alias_1")) @@ -462,7 +491,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).addAlias(new Alias("alias_1"))); @@ -503,7 +532,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesAliasesOnlyPermissionStrict() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("aliases_only", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has manage_aliases only permissions on both alias_* and test_* //security plugin lets it through, but es core intercepts it due to strict indices options and throws index not found @@ -523,7 +552,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesAliasesOnlyPermissionIgnoreUnavailable() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("aliases_only", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has manage_aliases only permissions on both alias_* and test_* //ok: manage_aliases on both test_* and alias_* @@ -542,6 +571,37 @@ public class IndexAliasesTests extends SecurityIntegTestCase { assertEquals(0, getAliasesResponse.getAliases().size()); } + public void testRemoveIndex() { + final Map headers = Collections.singletonMap( + BASIC_AUTH_HEADER, + basicAuthHeaderValue("all_on_test", new SecureString("test123".toCharArray()))); + final Client client = client(headers); + + assertAcked(client.admin().indices().prepareCreate("test_delete_1").get()); + assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias_1"))); + + assertAcked(client.admin().indices().prepareAliases().removeIndex("test_delete_*").get()); + assertAliases(client.admin().indices().prepareGetAliases().setAliases("*"), "test_1", "test_alias_1"); + + // test that the remove index wildcard expacnds only to authorized indices + assertAcked(client.admin().indices().prepareAliases().removeIndex("*").get()); + GetAliasesResponse getAliasesResponse = client.admin().indices().prepareGetAliases().setAliases("*").get(); + assertThat(getAliasesResponse.getAliases().size(), equalTo(0)); + assertAliases(client().admin().indices().prepareGetAliases().setAliases("*"), "bogus_index_1", "bogus_alias_1", "bogus_alias_2"); + } + + private static Client client(final Map headers) { + // it should not matter what client we send the request to, but let's pin all requests to a specific node + final Client client; + if (internalCluster().numDataAndMasterNodes() == 1 || randomBoolean()) { + client = client(internalCluster().getMasterName()).filterWithHeader(headers); + } else { + client = client(randomValueOtherThan(internalCluster().getMasterName(), () -> randomFrom(internalCluster().getNodeNames()))) + .filterWithHeader(headers); + } + return client; + } + private static void assertAliases(GetAliasesRequestBuilder getAliasesRequestBuilder, String index, String... aliases) { GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.get(); assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 665b70a8881..72e54ecf7a9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -83,6 +83,7 @@ import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -777,14 +778,24 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { assertThat(request.getAliasActions().get(1).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo", "explicit")); } - public void testResolveAliasesWildcardsIndicesAliasesRequestDeleteActionsNoAuthorizedIndices() { + public void testResolveAliasesWildcardsIndicesAliasesRequestRemoveAliasActionsNoAuthorizedIndices() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo*").alias("foo*")); - //no authorized aliases match bar*, hence aliases are replaced with no-aliases-expression for that action request.addAliasAction(AliasActions.remove().index("*bar").alias("bar*")); resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); - assertThat(request.getAliasActions().get(1).aliases(), arrayContaining(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_ARRAY)); + assertThat(request.getAliasActions().get(1).aliases(), arrayContaining("*", "-*")); + } + + public void testResolveAliasesWildcardsIndicesAliasesRequestRemoveIndexActions() { + IndicesAliasesRequest request = new IndicesAliasesRequest(); + request.addAliasAction(AliasActions.removeIndex().index("foo*")); + request.addAliasAction(AliasActions.removeIndex().index("*bar")); + resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); + assertThat(request.getAliasActions().get(0).indices(), arrayContainingInAnyOrder("foofoo")); + assertThat(request.getAliasActions().get(0).aliases(), emptyArray()); + assertThat(request.getAliasActions().get(1).indices(), arrayContainingInAnyOrder("bar")); + assertThat(request.getAliasActions().get(1).aliases(), emptyArray()); } public void testResolveWildcardsIndicesAliasesRequestAddAndDeleteActions() { From 22dade8e1b2b784a415fe0f87a58fda28bb96d59 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 24 Sep 2019 17:20:25 +0100 Subject: [PATCH 24/94] [DOCS] Add 7.4 breaking changes for transforms and data frame analytics (#46821) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The PRs that made these changes are: - https://github.com/elastic/elasticsearch/pull/44350 - https://github.com/elastic/elasticsearch/pull/45276 - https://github.com/elastic/elasticsearch/pull/45856 Co-Authored-By: István Zoltán Szabó Co-Authored-By: Lisa Cawley --- docs/reference/migration/migrate_7_4.asciidoc | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/docs/reference/migration/migrate_7_4.asciidoc b/docs/reference/migration/migrate_7_4.asciidoc index cc54dd2fb7c..f36382c3e48 100644 --- a/docs/reference/migration/migrate_7_4.asciidoc +++ b/docs/reference/migration/migrate_7_4.asciidoc @@ -139,3 +139,65 @@ deprecated, and will be removed in version 8.0.0. Instead, use `node.pidfile`. To ensure that all settings are in a proper namespace, the `processors` setting is deprecated, and will be removed in version 8.0.0. Instead, use `node.processors`. + +[discrete] +[[breaking_74_transform_changes]] +=== {transform-cap} changes + +[discrete] +[[transform_stats_format]] +==== Stats response format changes + +The response format of the <> is very different +to previous versions: + +- `task_state` and `indexer_state` are combined into a single `state` field + that replaces the old `state` object. +- Within the `checkpointing` object, `current` is renamed to `last` and + `in_progress` to `next`. +- The `checkpoint` number is now nested under `last` and `next`. +- `checkpoint_progress` is now reported in an object nested in the `next` + checkpoint object. (If there is no `next` checkpoint then no checkpoint is + in progress and by definition the `last` checkpoint is 100% complete.) + +For an example of the new format see <>. + +[discrete] +[[breaking_74_df_analytics_changes]] +=== {dfanalytics-cap} changes + +[discrete] +[[progress_reporting_change]] +==== Changes to progress reporting + +The single integer `progress_percent` field at the top level of the +{dfanalytics-job} stats is replaced by a `progress` field that is an array +of objects. Each object contains the `phase` name and `progress_percent` of one +phase of the analytics. For example: + +[source,js] +---- +{ + "id" : "my_job", + "state" : "analyzing", + "progress" : [ + { + "phase" : "reindexing", + "progress_percent" : 100 + }, + { + "phase" : "loading_data", + "progress_percent" : 100 + }, + { + "phase" : "analyzing", + "progress_percent" : 47 + }, + { + "phase" : "writing_results", + "progress_percent" : 0 + } + ] +} +---- +// NOTCONSOLE From f02582de4b03a255c16a28fd785494761a993dba Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 20 Sep 2019 11:09:36 -0600 Subject: [PATCH 25/94] Reduce a bind failure to trace logging (#46891) Due to recent changes in the nio transport, a failure to bind the server channel has started to be logged at an error level. This exception leads to an automatic retry on a different port, so it should only be logged at a trace level. --- .../java/org/elasticsearch/nio/ServerChannelContext.java | 4 +++- .../main/java/org/elasticsearch/transport/TcpTransport.java | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/ServerChannelContext.java b/libs/nio/src/main/java/org/elasticsearch/nio/ServerChannelContext.java index ec637a3b046..e598ec0929e 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/ServerChannelContext.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/ServerChannelContext.java @@ -22,6 +22,7 @@ package org.elasticsearch.nio; import org.elasticsearch.common.concurrent.CompletableContext; import java.io.IOException; +import java.net.BindException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.nio.channels.ServerSocketChannel; @@ -78,7 +79,8 @@ public class ServerChannelContext extends ChannelContext { rawChannel.bind(localAddress); bindContext.complete(null); } catch (IOException e) { - IOException exception = new IOException("Failed to bind server socket channel {localAddress=" + localAddress + "}.", e); + BindException exception = new BindException("Failed to bind server socket channel {localAddress=" + localAddress + "}."); + exception.initCause(e); bindContext.completeExceptionally(exception); throw exception; } diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index f0037bed5d0..170444c205f 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -618,7 +618,11 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements } protected void onServerException(TcpServerChannel channel, Exception e) { - logger.error(new ParameterizedMessage("exception from server channel caught on transport layer [channel={}]", channel), e); + if (e instanceof BindException) { + logger.trace(() -> new ParameterizedMessage("bind exception from server channel caught on transport layer [{}]", channel), e); + } else { + logger.error(new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); + } } protected void serverAcceptedChannel(TcpChannel channel) { From ffae769186f3de39e35d4fef53c48a09997c9d2f Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 24 Sep 2019 17:38:09 +0100 Subject: [PATCH 26/94] [TEST] Rename data-frames to transform in docs test cleanup (#47027) The renaming of the tests in #46760 caused the cleanup between tests to be skipped. Backport of #47016 --- .../elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index ffdabd6d633..14fef433446 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -101,7 +101,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @After public void cleanup() throws Exception { - if (isMachineLearningTest() || isDataFrameTest()) { + if (isMachineLearningTest() || isTransformTest()) { ESRestTestCase.waitForPendingTasks(adminClient()); } } @@ -111,9 +111,9 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { return testName != null && (testName.contains("/ml/") || testName.contains("\\ml\\")); } - protected boolean isDataFrameTest() { + protected boolean isTransformTest() { String testName = getTestName(); - return testName != null && (testName.contains("/data-frames/") || testName.contains("\\data-frames\\")); + return testName != null && (testName.contains("/transform/") || testName.contains("\\transform\\")); } /** From 6720c56bdd267d726a0dafe921a8df4b26990f60 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 29 Aug 2019 17:38:37 -0600 Subject: [PATCH 27/94] Set netty system properties in BuildPlugin (#45881) Currently in production instances of Elasticsearch we set a couple of system properties by default. We currently do not apply all of these system properties in tests. This commit applies these properties in the tests. --- .../main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 6 ++++++ modules/transport-netty4/build.gradle | 6 ------ .../src/main/java/org/elasticsearch/test/ESTestCase.java | 3 --- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 595bd173730..29d6dcd08f5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -901,6 +901,12 @@ class BuildPlugin implements Plugin { // TODO: remove this once cname is prepended to transport.publish_address by default in 8.0 test.systemProperty 'es.transport.cname_in_publish_address', 'true' + // Set netty system properties to the properties we configure in jvm.options + test.systemProperty('io.netty.noUnsafe', 'true') + test.systemProperty('io.netty.noKeySetOptimization', 'true') + test.systemProperty('io.netty.recycler.maxCapacityPerThread', '0') + test.systemProperty('io.netty.allocator.numDirectArenas', '0') + test.testLogging { TestLoggingContainer logging -> logging.showExceptions = true logging.showCauses = true diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 61a54fef8f4..627715c7866 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -54,9 +54,6 @@ test { * other if we allow them to set the number of available processors as it's set-once in Netty. */ systemProperty 'es.set.netty.runtime.available.processors', 'false' - - // Disable direct buffer pooling as it is disabled by default in Elasticsearch - systemProperty 'io.netty.allocator.numDirectArenas', '0' } integTestRunner { @@ -65,9 +62,6 @@ integTestRunner { * other if we allow them to set the number of available processors as it's set-once in Netty. */ systemProperty 'es.set.netty.runtime.available.processors', 'false' - - // Disable direct buffer pooling as it is disabled by default in Elasticsearch - systemProperty 'io.netty.allocator.numDirectArenas', '0' } TaskProvider pooledTest = tasks.register("pooledTest", Test) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 72ee2c0aff0..b783e340bed 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -243,9 +243,6 @@ public abstract class ESTestCase extends LuceneTestCase { // Enable Netty leak detection and monitor logger for logged leak errors System.setProperty("io.netty.leakDetection.level", "paranoid"); - - // Disable direct buffer pooling - System.setProperty("io.netty.allocator.numDirectArenas", "0"); } protected final Logger logger = LogManager.getLogger(getClass()); From 00c1c0132b13451625d160272f41bf54ff7d00fe Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 24 Sep 2019 13:03:20 -0400 Subject: [PATCH 28/94] [ML] fix two datafeed flush lockup bugs (#46982) (#47024) * [ML] fix two flush lockup bugs * Addressing PR comments * moving debug logging line so it is only written on success --- .../autodetect/AutodetectCommunicator.java | 3 +- .../output/AutodetectResultProcessor.java | 20 ++++++++--- .../autodetect/output/FlushListener.java | 9 +++-- .../ShortCircuitingRenormalizer.java | 21 +++++++++++- .../AutodetectCommunicatorTests.java | 6 ++-- .../AutodetectResultProcessorTests.java | 6 ++-- .../autodetect/output/FlushListenerTests.java | 34 ++++++++++++++++++- 7 files changed, 84 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java index 8d49a0bfdbc..7f28a0859c6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java @@ -284,7 +284,7 @@ public class AutodetectCommunicator implements Closeable { } @Nullable - FlushAcknowledgement waitFlushToCompletion(String flushId) throws InterruptedException { + FlushAcknowledgement waitFlushToCompletion(String flushId) throws Exception { LOGGER.debug("[{}] waiting for flush", job.getId()); FlushAcknowledgement flushAcknowledgement; @@ -300,6 +300,7 @@ public class AutodetectCommunicator implements Closeable { } if (processKilled == false) { + LOGGER.debug("[{}] Initial flush completed, waiting until renormalizer is idle.", job.getId()); // We also have to wait for the normalizer to become idle so that we block // clients from querying results in the middle of normalization. autodetectResultProcessor.waitUntilRenormalizerIsIdle(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java index 282dfa2c2f9..c9441e9f60c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java @@ -288,9 +288,21 @@ public class AutodetectResultProcessor { // Commit previous writes here, effectively continuing // the flush from the C++ autodetect process right // through to the data store - bulkResultsPersister.executeRequest(); - persister.commitResultWrites(jobId); - flushListener.acknowledgeFlush(flushAcknowledgement); + Exception exception = null; + try { + bulkResultsPersister.executeRequest(); + persister.commitResultWrites(jobId); + LOGGER.debug("[{}] Flush acknowledgement sent to listener for ID {}", jobId, flushAcknowledgement.getId()); + } catch (Exception e) { + LOGGER.error( + "[" + jobId + "] failed to bulk persist results and commit writes during flush acknowledgement for ID " + + flushAcknowledgement.getId(), + e); + exception = e; + throw e; + } finally { + flushListener.acknowledgeFlush(flushAcknowledgement, exception); + } // Interim results may have been produced by the flush, // which need to be // deleted when the next finalized results come through @@ -391,7 +403,7 @@ public class AutodetectResultProcessor { * @return The {@link FlushAcknowledgement} if the flush has completed or the parsing finished; {@code null} if the timeout expired */ @Nullable - public FlushAcknowledgement waitForFlushAcknowledgement(String flushId, Duration timeout) throws InterruptedException { + public FlushAcknowledgement waitForFlushAcknowledgement(String flushId, Duration timeout) throws Exception { return failed ? null : flushListener.waitForFlush(flushId, timeout); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java index 0028bfef928..2a349ce8aee 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java @@ -28,25 +28,29 @@ class FlushListener { }); @Nullable - FlushAcknowledgement waitForFlush(String flushId, Duration timeout) throws InterruptedException { + FlushAcknowledgement waitForFlush(String flushId, Duration timeout) throws Exception { if (onClear.hasRun()) { return null; } FlushAcknowledgementHolder holder = awaitingFlushed.computeIfAbsent(flushId, (key) -> new FlushAcknowledgementHolder(flushId)); if (holder.latch.await(timeout.toMillis(), TimeUnit.MILLISECONDS)) { + if (holder.flushException != null) { + throw holder.flushException; + } return holder.flushAcknowledgement; } return null; } - void acknowledgeFlush(FlushAcknowledgement flushAcknowledgement) { + void acknowledgeFlush(FlushAcknowledgement flushAcknowledgement, @Nullable Exception exception) { // acknowledgeFlush(...) could be called before waitForFlush(...) // a flush api call writes a flush command to the analytical process and then via a different thread the // result reader then reads whether the flush has been acked. String flushId = flushAcknowledgement.getId(); FlushAcknowledgementHolder holder = awaitingFlushed.computeIfAbsent(flushId, (key) -> new FlushAcknowledgementHolder(flushId)); holder.flushAcknowledgement = flushAcknowledgement; + holder.flushException = exception; holder.latch.countDown(); } @@ -62,6 +66,7 @@ class FlushListener { private final CountDownLatch latch; private volatile FlushAcknowledgement flushAcknowledgement; + private volatile Exception flushException; private FlushAcknowledgementHolder(String flushId) { this.flushAcknowledgement = new FlushAcknowledgement(flushId, null); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java index 0bd5a11609d..519609d4a7e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java @@ -122,7 +122,26 @@ public class ShortCircuitingRenormalizer implements Renormalizer { } private void forceFinishWork() { - semaphore.release(); + // We cannot allow new quantiles to be added while we are failing from a previous renormalization failure. + synchronized (quantilesDeque) { + // We discard all but the earliest quantiles, if they exist + QuantilesWithLatch earliestQuantileWithLatch = null; + for (QuantilesWithLatch quantilesWithLatch = quantilesDeque.pollFirst(); quantilesWithLatch != null; + quantilesWithLatch = quantilesDeque.pollFirst()) { + if (earliestQuantileWithLatch == null) { + earliestQuantileWithLatch = quantilesWithLatch; + } + // Count down all the latches as they no longer matter since we failed + quantilesWithLatch.latch.countDown(); + } + // Keep the earliest quantile so that the next call to doRenormalizations() will include as much as the failed normalization + // window as possible. + // Since this latch is already countedDown, there is no reason to put it in the `latchDeque` again + if (earliestQuantileWithLatch != null) { + quantilesDeque.addLast(earliestQuantileWithLatch); + } + semaphore.release(); + } } private void doRenormalizations() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java index 9f6d5295073..4562779fc29 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java @@ -108,7 +108,7 @@ public class AutodetectCommunicatorTests extends ESTestCase { verifyNoMoreInteractions(process); } - public void testFlushJob() throws IOException, InterruptedException { + public void testFlushJob() throws Exception { AutodetectProcess process = mockAutodetectProcessWithOutputStream(); when(process.isProcessAlive()).thenReturn(true); AutodetectResultProcessor processor = mock(AutodetectResultProcessor.class); @@ -123,7 +123,7 @@ public class AutodetectCommunicatorTests extends ESTestCase { } } - public void testWaitForFlushReturnsIfParserFails() throws IOException, InterruptedException { + public void testWaitForFlushReturnsIfParserFails() throws Exception { AutodetectProcess process = mockAutodetectProcessWithOutputStream(); when(process.isProcessAlive()).thenReturn(true); AutodetectResultProcessor processor = mock(AutodetectResultProcessor.class); @@ -144,7 +144,7 @@ public class AutodetectCommunicatorTests extends ESTestCase { assertEquals("[foo] Unexpected death of autodetect: Mock process is dead", holder[0].getMessage()); } - public void testFlushJob_givenFlushWaitReturnsTrueOnSecondCall() throws IOException, InterruptedException { + public void testFlushJob_givenFlushWaitReturnsTrueOnSecondCall() throws Exception { AutodetectProcess process = mockAutodetectProcessWithOutputStream(); when(process.isProcessAlive()).thenReturn(true); AutodetectResultProcessor autodetectResultProcessor = Mockito.mock(AutodetectResultProcessor.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java index d76f87b5e54..66f145d405c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java @@ -220,7 +220,7 @@ public class AutodetectResultProcessorTests extends ESTestCase { assertTrue(processorUnderTest.isDeleteInterimRequired()); verify(persister).bulkPersisterBuilder(JOB_ID); - verify(flushListener).acknowledgeFlush(flushAcknowledgement); + verify(flushListener).acknowledgeFlush(flushAcknowledgement, null); verify(persister).commitResultWrites(JOB_ID); verify(bulkBuilder).executeRequest(); } @@ -242,7 +242,7 @@ public class AutodetectResultProcessorTests extends ESTestCase { inOrder.verify(persister).persistCategoryDefinition(categoryDefinition); inOrder.verify(bulkBuilder).executeRequest(); inOrder.verify(persister).commitResultWrites(JOB_ID); - inOrder.verify(flushListener).acknowledgeFlush(flushAcknowledgement); + inOrder.verify(flushListener).acknowledgeFlush(flushAcknowledgement, null); } public void testProcessResult_modelPlot() { @@ -397,7 +397,7 @@ public class AutodetectResultProcessorTests extends ESTestCase { verify(persister, times(2)).persistModelSnapshot(any(), eq(WriteRequest.RefreshPolicy.IMMEDIATE)); } - public void testParsingErrorSetsFailed() throws InterruptedException { + public void testParsingErrorSetsFailed() throws Exception { @SuppressWarnings("unchecked") Iterator iterator = mock(Iterator.class); when(iterator.hasNext()).thenThrow(new ElasticsearchParseException("this test throws")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java index 3343882d581..fa506ced23d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java @@ -14,6 +14,7 @@ import java.util.Date; import java.util.List; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class FlushListenerTests extends ESTestCase { @@ -27,12 +28,14 @@ public class FlushListenerTests extends ESTestCase { flushAcknowledgementHolder.set(flushAcknowledgement); } catch (InterruptedException _ex) { Thread.currentThread().interrupt(); + } catch (Exception ex) { + fail("unexpected exception " + ex.getMessage()); } }).start(); assertBusy(() -> assertTrue(listener.awaitingFlushed.containsKey("_id"))); assertNull(flushAcknowledgementHolder.get()); FlushAcknowledgement flushAcknowledgement = new FlushAcknowledgement("_id", new Date(12345678L)); - listener.acknowledgeFlush(flushAcknowledgement); + listener.acknowledgeFlush(flushAcknowledgement, null); assertBusy(() -> assertNotNull(flushAcknowledgementHolder.get())); assertEquals(1, listener.awaitingFlushed.size()); @@ -40,6 +43,33 @@ public class FlushListenerTests extends ESTestCase { assertEquals(0, listener.awaitingFlushed.size()); } + public void testAcknowledgeFlushFailure() throws Exception { + FlushListener listener = new FlushListener(); + AtomicReference flushExceptionHolder = new AtomicReference<>(); + new Thread(() -> { + try { + listener.waitForFlush("_id", Duration.ofMillis(10000)); + fail("Expected exception to throw."); + } catch (InterruptedException _ex) { + Thread.currentThread().interrupt(); + } catch (Exception ex) { + flushExceptionHolder.set(ex); + } + }).start(); + assertBusy(() -> assertTrue(listener.awaitingFlushed.containsKey("_id"))); + assertNull(flushExceptionHolder.get()); + FlushAcknowledgement flushAcknowledgement = new FlushAcknowledgement("_id", new Date(12345678L)); + listener.acknowledgeFlush(flushAcknowledgement, new Exception("BOOM")); + assertBusy(() -> { + assertNotNull(flushExceptionHolder.get()); + assertThat(flushExceptionHolder.get().getMessage(), equalTo("BOOM")); + }); + assertEquals(1, listener.awaitingFlushed.size()); + + listener.clear("_id"); + assertEquals(0, listener.awaitingFlushed.size()); + } + public void testClear() throws Exception { FlushListener listener = new FlushListener(); @@ -55,6 +85,8 @@ public class FlushListenerTests extends ESTestCase { flushAcknowledgementHolder.set(flushAcknowledgement); } catch (InterruptedException _ex) { Thread.currentThread().interrupt(); + } catch (Exception ex) { + fail("unexpected exception " + ex.getMessage()); } }).start(); } From 71ec0707cfa196d09837b979e708f1c799ba1f2c Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Tue, 24 Sep 2019 10:55:49 -0600 Subject: [PATCH 29/94] Remove locking around connection attempts (#46845) Currently in the ConnectionManager we lock around the node id. This is odd because we key connections by the ephemeral id. Upon further investigation it appears to me that we do not need the locking. Using the concurrent map, we can ensure that only one connection attempt completes. There is a very small chance that a new connection attempt will proceed right as another connection attempt is completing. However, since the whole process is asynchronous and event oriented (lightweight), that does not seem to be an issue. --- .../util/concurrent/ListenableFuture.java | 13 +++ .../transport/ConnectionManager.java | 104 +++++++----------- .../transport/ConnectionManagerTests.java | 6 +- 3 files changed, 59 insertions(+), 64 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java index e3d9b356bcf..7874d58b1b6 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java @@ -43,12 +43,25 @@ public final class ListenableFuture extends BaseFuture implements ActionLi private volatile boolean done = false; private final List, ExecutorService>> listeners = new ArrayList<>(); + /** * Adds a listener to this future. If the future has not yet completed, the listener will be * notified of a response or exception in a runnable submitted to the ExecutorService provided. * If the future has completed, the listener will be notified immediately without forking to * a different thread. */ + public void addListener(ActionListener listener, ExecutorService executor) { + addListener(listener, executor, null); + } + + /** + * Adds a listener to this future. If the future has not yet completed, the listener will be + * notified of a response or exception in a runnable submitted to the ExecutorService provided. + * If the future has completed, the listener will be notified immediately without forking to + * a different thread. + * + * It will apply the provided ThreadContext (if not null) when executing the listening. + */ public void addListener(ActionListener listener, ExecutorService executor, ThreadContext threadContext) { if (done) { // run the callback directly, we don't hold the lock and don't need to fork! diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java index f8db0d96c54..03a9c0f40f3 100644 --- a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java @@ -20,22 +20,19 @@ package org.elasticsearch.transport; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.KeyedLock; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.internal.io.IOUtils; import java.io.Closeable; -import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; @@ -53,8 +50,7 @@ public class ConnectionManager implements Closeable { private static final Logger logger = LogManager.getLogger(ConnectionManager.class); private final ConcurrentMap connectedNodes = ConcurrentCollections.newConcurrentMap(); - private final KeyedLock connectionLock = new KeyedLock<>(); // protects concurrent access to connectingNodes - private final Map>> connectingNodes = ConcurrentCollections.newConcurrentMap(); + private final ConcurrentMap> pendingConnections = ConcurrentCollections.newConcurrentMap(); private final AbstractRefCounted connectingRefCounter = new AbstractRefCounted("connection manager") { @Override protected void closeInternal() { @@ -122,40 +118,37 @@ public class ConnectionManager implements Closeable { return; } - try (Releasable lock = connectionLock.acquire(node.getId())) { - Transport.Connection connection = connectedNodes.get(node); - if (connection != null) { - assert connectingNodes.containsKey(node) == false; - lock.close(); - connectingRefCounter.decRef(); - listener.onResponse(null); - return; - } - - final List> connectionListeners = connectingNodes.computeIfAbsent(node, n -> new ArrayList<>()); - connectionListeners.add(listener); - if (connectionListeners.size() > 1) { - // wait on previous entry to complete connection attempt - connectingRefCounter.decRef(); - return; - } + if (connectedNodes.containsKey(node)) { + connectingRefCounter.decRef(); + listener.onResponse(null); + return; } - final RunOnce releaseOnce = new RunOnce(connectingRefCounter::decRef); + final ListenableFuture currentListener = new ListenableFuture<>(); + final ListenableFuture existingListener = pendingConnections.putIfAbsent(node, currentListener); + if (existingListener != null) { + try { + // wait on previous entry to complete connection attempt + existingListener.addListener(listener, EsExecutors.newDirectExecutorService()); + } finally { + connectingRefCounter.decRef(); + } + return; + } + currentListener.addListener(listener, EsExecutors.newDirectExecutorService()); + + final RunOnce releaseOnce = new RunOnce(connectingRefCounter::decRef); internalOpenConnection(node, resolvedProfile, ActionListener.wrap(conn -> { connectionValidator.validate(conn, resolvedProfile, ActionListener.wrap( ignored -> { assert Transports.assertNotTransportThread("connection validator success"); - boolean success = false; - List> listeners = null; try { - // we acquire a connection lock, so no way there is an existing connection - try (Releasable ignored2 = connectionLock.acquire(node.getId())) { - connectedNodes.put(node, conn); - if (logger.isDebugEnabled()) { - logger.debug("connected to node [{}]", node); - } + if (connectedNodes.putIfAbsent(node, conn) != null) { + logger.debug("existing connection to node [{}], closing new redundant connection", node); + IOUtils.closeWhileHandlingException(conn); + } else { + logger.debug("connected to node [{}]", node); try { connectionListener.onNodeConnected(node); } finally { @@ -166,45 +159,21 @@ public class ConnectionManager implements Closeable { connectionListener.onNodeDisconnected(node); })); } - if (conn.isClosed()) { - throw new NodeNotConnectedException(node, "connection concurrently closed"); - } - success = true; - listeners = connectingNodes.remove(node); } - } catch (ConnectTransportException e) { - throw e; - } catch (Exception e) { - throw new ConnectTransportException(node, "general node connection failure", e); } finally { - if (success == false) { // close the connection if there is a failure - logger.trace(() -> new ParameterizedMessage("failed to connect to [{}], cleaning dangling connections", node)); - IOUtils.closeWhileHandlingException(conn); - } else { - releaseOnce.run(); - ActionListener.onResponse(listeners, null); - } + ListenableFuture future = pendingConnections.remove(node); + assert future == currentListener : "Listener in pending map is different than the expected listener"; + releaseOnce.run(); + future.onResponse(null); } }, e -> { assert Transports.assertNotTransportThread("connection validator failure"); IOUtils.closeWhileHandlingException(conn); - final List> listeners; - try (Releasable ignored = connectionLock.acquire(node.getId())) { - listeners = connectingNodes.remove(node); - } - releaseOnce.run(); - ActionListener.onFailure(listeners, e); + failConnectionListeners(node, releaseOnce, e, currentListener); })); }, e -> { assert Transports.assertNotTransportThread("internalOpenConnection failure"); - final List> listeners; - try (Releasable ignored = connectionLock.acquire(node.getId())) { - listeners = connectingNodes.remove(node); - } - releaseOnce.run(); - if (listeners != null) { - ActionListener.onFailure(listeners, e); - } + failConnectionListeners(node, releaseOnce, e, currentListener); })); } @@ -296,6 +265,15 @@ public class ConnectionManager implements Closeable { })); } + private void failConnectionListeners(DiscoveryNode node, RunOnce releaseOnce, Exception e, ListenableFuture expectedListener) { + ListenableFuture future = pendingConnections.remove(node); + releaseOnce.run(); + if (future != null) { + assert future == expectedListener : "Listener in pending map is different than the expected listener"; + future.onFailure(e); + } + } + ConnectionProfile getConnectionProfile() { return defaultProfile; } diff --git a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java index 4677567ab5a..c31e6d3e7d9 100644 --- a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java @@ -151,10 +151,11 @@ public class ConnectionManagerTests extends ESTestCase { } }; - CyclicBarrier barrier = new CyclicBarrier(11); List threads = new ArrayList<>(); AtomicInteger nodeConnectedCount = new AtomicInteger(); AtomicInteger nodeFailureCount = new AtomicInteger(); + + CyclicBarrier barrier = new CyclicBarrier(11); for (int i = 0; i < 10; i++) { Thread thread = new Thread(() -> { try { @@ -166,6 +167,9 @@ public class ConnectionManagerTests extends ESTestCase { connectionManager.connectToNode(node, connectionProfile, validator, ActionListener.wrap(c -> { nodeConnectedCount.incrementAndGet(); + if (connectionManager.nodeConnected(node) == false) { + throw new AssertionError("Expected node to be connected"); + } assert latch.getCount() == 1; latch.countDown(); }, e -> { From 1893f6adc290b4308b0bd0a3549ae514a4184071 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 24 Sep 2019 10:35:06 -0700 Subject: [PATCH 30/94] [DOCS] Merge monitoring sections in Elasticsearch book (#46885) --- docs/reference/index.asciidoc | 2 - .../collecting-monitoring-data.asciidoc | 5 +- docs/reference/monitoring/collectors.asciidoc | 2 +- .../monitoring/configuring-filebeat.asciidoc | 2 +- .../configuring-metricbeat.asciidoc | 2 +- .../configuring-monitoring.asciidoc | 23 ------ docs/reference/monitoring/exporters.asciidoc | 3 - .../monitoring/how-monitoring-works.asciidoc | 39 ++++++++++ .../reference/monitoring/http-export.asciidoc | 2 +- .../monitoring/images/architecture.png | Bin 0 -> 266678 bytes docs/reference/monitoring/index.asciidoc | 71 +++++++----------- docs/reference/monitoring/indices.asciidoc | 2 +- .../monitoring/local-export.asciidoc | 4 +- docs/reference/monitoring/overview.asciidoc | 39 ++++++++++ .../monitoring/pause-export.asciidoc | 2 +- docs/reference/redirects.asciidoc | 11 +++ 16 files changed, 126 insertions(+), 83 deletions(-) delete mode 100644 docs/reference/monitoring/configuring-monitoring.asciidoc create mode 100644 docs/reference/monitoring/how-monitoring-works.asciidoc create mode 100644 docs/reference/monitoring/images/architecture.png create mode 100644 docs/reference/monitoring/overview.asciidoc diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e0cbe106681..1967c043146 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -18,8 +18,6 @@ include::setup.asciidoc[] include::setup/setup-xes.asciidoc[] -include::monitoring/configuring-monitoring.asciidoc[] - include::setup/setup-xclient.asciidoc[] include::setup/bootstrap-checks-xes.asciidoc[] diff --git a/docs/reference/monitoring/collecting-monitoring-data.asciidoc b/docs/reference/monitoring/collecting-monitoring-data.asciidoc index 3d5a85b3012..a2c95014b33 100644 --- a/docs/reference/monitoring/collecting-monitoring-data.asciidoc +++ b/docs/reference/monitoring/collecting-monitoring-data.asciidoc @@ -1,10 +1,7 @@ [role="xpack"] [testenv="gold"] [[collecting-monitoring-data]] -=== Collecting monitoring data -++++ -Collecting monitoring data -++++ +== Collecting monitoring data If you enable the Elastic {monitor-features} in your cluster, you can optionally collect metrics about {es}. By default, monitoring is enabled but diff --git a/docs/reference/monitoring/collectors.asciidoc b/docs/reference/monitoring/collectors.asciidoc index 64d56e81cdc..568d21e8354 100644 --- a/docs/reference/monitoring/collectors.asciidoc +++ b/docs/reference/monitoring/collectors.asciidoc @@ -110,7 +110,7 @@ For more information about the configuration options for the collectors, see [float] [[es-monitoring-stack]] -=== Collecting data from across the Elastic Stack +==== Collecting data from across the Elastic Stack {monitoring} in {es} also receives monitoring data from other parts of the Elastic Stack. In this way, it serves as an unscheduled monitoring data diff --git a/docs/reference/monitoring/configuring-filebeat.asciidoc b/docs/reference/monitoring/configuring-filebeat.asciidoc index fd77dc860ce..b1e22d38f54 100644 --- a/docs/reference/monitoring/configuring-filebeat.asciidoc +++ b/docs/reference/monitoring/configuring-filebeat.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[configuring-filebeat]] -=== Collecting {es} log data with {filebeat} +== Collecting {es} log data with {filebeat} [subs="attributes"] ++++ diff --git a/docs/reference/monitoring/configuring-metricbeat.asciidoc b/docs/reference/monitoring/configuring-metricbeat.asciidoc index 34f027b1553..ea3aecfac2a 100644 --- a/docs/reference/monitoring/configuring-metricbeat.asciidoc +++ b/docs/reference/monitoring/configuring-metricbeat.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="gold"] [[configuring-metricbeat]] -=== Collecting {es} monitoring data with {metricbeat} +== Collecting {es} monitoring data with {metricbeat} [subs="attributes"] ++++ diff --git a/docs/reference/monitoring/configuring-monitoring.asciidoc b/docs/reference/monitoring/configuring-monitoring.asciidoc deleted file mode 100644 index e129999e3a5..00000000000 --- a/docs/reference/monitoring/configuring-monitoring.asciidoc +++ /dev/null @@ -1,23 +0,0 @@ -[role="xpack"] -[testenv="gold"] -[[configuring-monitoring]] -== Configuring monitoring in {es} -++++ -Configuring monitoring -++++ - -If you enable the Elastic {monitor-features} in your cluster, there are two -methods to collect metrics about {es}: - -* <> -* <> - -You can also <>. - -To learn about monitoring in general, see -{stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. - -include::collecting-monitoring-data.asciidoc[] -include::configuring-metricbeat.asciidoc[] -include::configuring-filebeat.asciidoc[] -include::indices.asciidoc[] \ No newline at end of file diff --git a/docs/reference/monitoring/exporters.asciidoc b/docs/reference/monitoring/exporters.asciidoc index fee09015dbb..742b24608f2 100644 --- a/docs/reference/monitoring/exporters.asciidoc +++ b/docs/reference/monitoring/exporters.asciidoc @@ -158,6 +158,3 @@ which is used to determine whether the resource should be replaced. The `version field value represents the latest version of {monitoring} that changed the resource. If a resource is edited by someone or something external to {monitoring}, those changes are lost the next time an automatic update occurs. - -include::local-export.asciidoc[] -include::http-export.asciidoc[] diff --git a/docs/reference/monitoring/how-monitoring-works.asciidoc b/docs/reference/monitoring/how-monitoring-works.asciidoc new file mode 100644 index 00000000000..283ed0412be --- /dev/null +++ b/docs/reference/monitoring/how-monitoring-works.asciidoc @@ -0,0 +1,39 @@ +[role="xpack"] +[testenv="basic"] +[[how-monitoring-works]] +== How monitoring works +++++ +How it works +++++ + +Each {es} node, {ls} node, {kib} instance, and Beat is considered unique in the +cluster based on its persistent UUID, which is written to the +<> directory when the node or instance starts. + +Monitoring documents are just ordinary JSON documents built by monitoring each +{stack} component at a specified collection interval. If you want to alter the +templates for these indices, see <>. + +Each component in the {stack} is responsible for monitoring itself and then +forwarding those documents to the production cluster for both routing and +indexing (storage). The routing and indexing processes in {es} are handled by +what are called <> and +<>. + +Alternatively, you can use {metricbeat} to collect monitoring data and ship it +directly to the monitoring cluster. + +To learn how to collect monitoring data, see: + +* <> +* <> +* {kibana-ref}/xpack-monitoring.html[Monitoring {kib}] +* {logstash-ref}/monitoring-logstash.html[Monitoring {ls}] +* Monitoring Beats: +** {auditbeat-ref}/monitoring.html[{auditbeat}] +** {filebeat-ref}/monitoring.html[{filebeat}] +** {functionbeat-ref}/monitoring.html[{functionbeat}] +** {heartbeat-ref}/monitoring.html[{heartbeat}] +** {metricbeat-ref}/monitoring.html[{metricbeat}] +** {packetbeat-ref}/monitoring.html[{packetbeat}] +** {winlogbeat-ref}/monitoring.html[{winlogbeat}] diff --git a/docs/reference/monitoring/http-export.asciidoc b/docs/reference/monitoring/http-export.asciidoc index a875e5a0169..eaca9904d04 100644 --- a/docs/reference/monitoring/http-export.asciidoc +++ b/docs/reference/monitoring/http-export.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[http-exporter]] -=== HTTP Exporters +=== HTTP exporters The `http` exporter is the preferred exporter in {monitoring} because it enables the use of a separate monitoring cluster. As a secondary benefit, it avoids diff --git a/docs/reference/monitoring/images/architecture.png b/docs/reference/monitoring/images/architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..769618c0ccc6753f0c40aa812ff963b4d64e82a1 GIT binary patch literal 266678 zcmce;byVBk)-@WeKyfJU-WD(JP`qfd;8LJaytoHxad&qqUMTJqcZcHcR@_2zL!a-v z_ni0iyno&?_6UJ7Abah-WUe{aj}R3l8I0#7&jA1chMcUVDgf{d_7<@O<>^b-b@m|a z8{$tA5-L^_G7@&yb`EOx#wKPmX0~PypG;I`-U0x85fK{t7GxSPM4BDVY3WAQthv5A zeG|o?$B5J9%Bg7rEid!fCd%Td4;to zQp1+;oPJ@N>ZM4{ZGg&NSm9xcAZN;_rm7XQ6R(Sm(@f8kM)Omoy~Yx)#F!Uw1gRA) zgl{$+gQ9|knWDc0WfMNj0lp_^(l1tk=TSy;t<_zX_~~glKeG(UDzBF&$EW&C`QgXT zH?-g=xZgz@LoRS0_c4^B2RanjANJ}w#Jbr$8rDtbx*SD(B0hcGV&0evM$%BT3{Jw1 z8t%E3Mt|V=K5Lk*Xn9z&x1IYT&3QSy%Kp?Cl4a9r@%6%{t7t4GOXSO`vEBm{H~sGV z`)>J?*eq!<%7&GVcOBWN=xE;(mP$c>$!E#&jk@XLYj0_&-xr5Y}7K(cef9?Q-?g=CBgSC?y}e3 z4q5!i`(Aq&UlLi6!39b20r=ek!Jr&z@$20UNY&CiYy@IUl5IHf3B9tszVej%f!Z*Bpd!6Y zW)Et8uBe58WBO9;*($*=i{b#xXK!AVzaS1Z%=t)2y4xXiE>M#)(%JSi9@5`$7;x7x zTJzd8NW`C@ngkqKMlfY|R{B-X1W(k5nHOG^ftU(+K48q?jofw2=QnUP?x8}8*KuUk zc&4ZKzus9(83^&tKDLB&sure6E%VnjXB$B|Un}UT&6P1@)&Zw2=nR_X0C z^S-a!AkJt4j@}vG9fIFnGl4Dh*FamqvwDX$iIpNG%Ps>hgKC&t8NFXV_O_0A4|ihQbugEp+6#3f3@xtjO zOvB@O)NdMq7dbQu%G;`+u+;?B@QSOZ#j%2Z6uM;Fz(0V=XNqmBRSzajR=nZ0rvz*d zF-GuREdI|Ts50CJMz>TYmHt;j~?zyv7BU{39{C((MS z&U&adi7Nv4-5$&II3Yn|Q^!x%W@eI@uU|#UAx;EZiKFmC@Y*Pd5tDd=pQAhzY@CJV zlv`U@Z}j&0tIKTV5(`chGe52jXXuJtfTaa?mrikvso`*`{(o5wn236FIb+&0W_As& zlJ_5HqWO}=5ryPBC&ld=<>BeX{`+EAg1?xB8`mdg4b`)7@`##~HIxqu?h{rrE#o;x zq?w?g*)WKvTft9BWE0I3@`%^{Urq9>LUrq_cq3!QRi`anbeMnPhc^i%B{=ht)Vr9a zeA8^}V3^h5Me&)wI~cSKzdswJsDwJVeWU+i+nwU|FeKgm1 z`Na}6J{5q56H0waNLU~3iB?vFs!7ieAvM$YCC4a| zt2qFgd29!#egYqjVXMqjE_yl@p=R+NbV^>lsbHPC5x?kPK z1*)om)6To=O{(x(%6)sGR$_zl|CHtmGbuf5TZ-RM4RWdAZUvGSMO9)>fqjHS)sykA#Wkt_qdI( zfm&u=8d$$bqHK$Hmp4hZO&x@kUSh>U?(0|yppjl7&7P#Ml<{nN{u~t^wS7fyE*+U| z)xv^6S;Z*p{=q?FMW*e3Z(UKdjEC{#>G5$7JZn+|Hwcys3@U+OpRC*OSNAQrczCo` zRpN})tfSg3d{IjNAxnT+`u|Mte#BHA_yA6TyMfCD@?raHXns$qt6b`BJu5D>Nzcll zdM*Fy#EcTLzhQlSmzt)VpVQXqbHBgoP+YBZaBx6RW}9$*?ZOAQhN7|wQf4Cqv`!Y4 zHmub-H!s@*_J*v&p%C9OB4`{g9=)*e%;4x~_4Sdkg;_p>U`WWB(z|!>?wC{F1d0D8 zF;RZwyi<;Kfg<9+C8ft|{*Wu5rs)6z65+S z4i3tztC0%|>%>V&-r`zX`>(f4Ei5eLtK~s(9xHL6-u7$P##fr%d=4{li>uYT?Q`oW zm3lfniO zkl=mf-YLK~9oua0$7~R9C4BxXQ&v+tH6uCs$kfzcqr=Q-=`Y3HkC+DgQ4JK>lqS5) zj@-eu6b82$?M97PMN}d@nQsLH6-5{i>O<(sA$`;<=X5xF+nn+g~BJt6+;>Uv*> zl@q3Ccx4?Oi6$V&lk*ew!mYx=mq9IKVjQf#ew0lilI>$796!D>>EP=+zYyAF9xplI zQ%Rw!1d_wSg<$k{5J%JPe|i=BgG!fRn>Aia-XTNrFa^;ETlVDX^Zv)`sKJ9J<4-a| z$^yP{FX78AfE`RHmu_d}`c!@!@PhVM;`>6>9iRT})R|sI7p*vJhv2aP%;Y+Sb9Z~I z_=f`pCLrPAla(Fkq=7TWYNM z^p`2b`_2d1=mX1tRMzl$v@;hh6@|cJBc0;EzLl7iaWk?_ja)@6Q=%P&7HHN?-H z#rjzPlH-jGuwk{)U}@)5YQ9bq*SXp@E$)YIU@~o6d03BbwC%(5H-kmRz<;tX9RNN@ z1?4?-H$5OIvR};;Yfy^r*2XVJpZ03To{}Xg5Bj-d_d<>a_v)i*zMPU~q3Op$i6T+_ zi90zZJ+Csn!zdh3xZ!gkLA-P&Ws9)GmZTgI7*so!JTlGh?vX;Vi)lxzr`t{XXtpblLtV9^1i!69DXtwPr4)lpIq|iduqi zD*AC(3x&=qMyk9IFM7=+Qx`hFbN?yCqKI7cr+yMLFz3S@TzL%2CwX?`!{X!4a8B9l zGINE62#!rnr)hLY3%`a;f~AzIoQLA27P0s+FFo7UPdh^J%tKi<4ICn~_*jafzuYpD z;YCD@L}MOsRf|2!1!`!%q@kvJesXjad#@s!n%cGTx%qf-@LifaTs+<1z?cFCZ107E zn_x>bvHL#!O?+1m|687|P`f@{#_wiT9DorvqI8z0TsKVS=ZKF_kBmuCd5oiMNt&MBHx*XmGxx2 z+s?OtskU_X$1wcY;1YOIiiCH!ip+0b-*BNCakkp*o`dH&HMwzzZD7l) z3D2H19KOu%BKaSCW;cI=d**h>TnY*iQ1;ni${6T?_*wZNF0@Wc>AM?#1oZ!wGO>6u zGc%iss?DNwn4b*5(LLIraHps}#$4tK+;7}G(7k#Erz$#RrnvrGyUDt$)vAw&yW6lH zo>B5Hg%e*>Q!~){=ohLk?c-QFza1{J%mw^jsIM&_k~;3*J}u*Pi1G0Peies>h;GGV zhDqOlLIlUJFeu}=tu)##WCV^9yIGARVQQP<-&lOU3B8z|6ywA@nn#AkXA6UUarNXp zXzdff$5DO=IEdGU>fA67@=y!moSZTph!vWzF)lCDH?+WypCQHHI{MXjmcxE+ZP@t%J$F6wPMMv?*FC^1DDf^30Q*YVW@lEl1e> zCp*?Vr7|FPwYyP2;1Oagix=M3vGhLowf0Q)F!}M1VF-vy=|XDPw|L5fnMKz_1tpfr zkOybTRgE+RIe;TJHTCu3TxBBpVm+Vyxx*3IE3Zeil&C^iS1Wu5AccB%ru0EI_h=QQ zRbpkS_|_z}sDZikXC7?j^0oIckNMGKC!JKbZ)fj$?b zg_^)~#AYN=mu6qJf9}#Lx~)t5cvZrzkC7cKHV$%cvI$Qd6z;eA-xT(-Jy# zDC6e`Duq5F){n$~p=b%$c%Ih-A#p_!l94;cpQ-olC~2UraP6?%J2Z%or0O)px?8Bf z__6j(G*mMK00RjGlLUfr!^Ssp5BWLNrVhhy3U5tfupP)ODtxRt!0YoYi8IX8a)6U13@N1x zu$Yf!$V{$Ex8e~|kByi6nTQ`0G0dZ&T+jDe+sD9gSz zdgWO5zImR`G2M`v?;nt43@;m~>u|Y;O8cm0IA@d#onfz#s{dRiuKk@#rp{)5O$a(F zXxqQ%rjPpFW9E(3RBvsy>F3N_aGReOK+7#K0kAB*>~OM##P6|0CsOyC$tvvQEzfts zzeGaAIxMl+cdh>Xl{DM=#@2Wr3*Q0k{I8f931el zeB$y##9YYlM%0e4S4m{k-LPJ4sk$X;O3_IAXkN5PI$IvovZP0gv_%musM;qZBU7Z; zU@tcc)QdOiQ;Huh@pEMVTj&iv)q^3s@o-G#Jm`D9=%nA}gNeQxR2BIAiTwSP|NP37 z62O6%es|1Ja%1)RCmWmQvXDyh=4Fb3Vc8^qGx-^n6NLw(kKYjz;NPkmd2*za4#>$0 zoF0xLV=JQQQ9hS5)2P)eE30OD{W|QUy~WX=X#O8wZENwApT_I$o+DFoLl}{IexS!d zNeMzdAK{rAM2ftgdm5|#vza_>3v26mG(x6=>S~>)=4Qn=dgUj*gJohJY+)A;X2%B! zO2+F;ab{EeEC%sR_n}c?t?6iDyOm7T2M45Em-`o!0aT#&KH<}qSe|1^vXpO8>1#a(;l!r}xaO-n7Pub-oX zYV1?O`jt<`1F=ce}=< zg$(<9M_fMIP?nw+%Uvb%53iAh3|{=L;LNehij%sd$0c%^qSetzySNZ2uu(};;1^ZVE4S+aTj zo4tbrf3R}iH1C(K?i5a`xg}!KzjamaVozaXA19Okfo)jcAj*w^YJKB??94{@2r)~8 z&%A(MdaDEP$vxQ7P^Njg`U~VWCL?8UEVcw)!f}L<&P7Wi+Krb}`A@SI6%_*!s1zqB zRl6$(d>`&5=in}{FNt<#UrxV_aE`cUZZIQ*|Bt>JcHbVbqZq!|uFJzvNRM&bRV%wolg9ZzhgO_A8#*SwMr4CwVzI;N%qLVvf=kJZpG% z1{=e}!_O}-)dxn0*&Qjh$^tcTKH$>w@&<8Z`8w|?AOYcq6VgWA|7Uv-Kvf`p!pa!O z%(e}LnlH4HmWn_3-k!FbHE!y*kEiy~?ua-QqR$MjlUDWdK6P^J-cS!NQ54XxFSUrdO}zV$;*zOq8r+^OTg{Rdarary>gHK=noD zom!Z+#f7VPc0v~!{u}n9@-u#-`F)j^(D1^Ph2Yir3~Y~@dq^h;xstBFin)6?A{($0 zjpZr%8*7b1B)oAfUUhxX?}aYG0=h;We({3E5H14x1G@A13(Qi-6tM<+>;2chqZPI$ zw?LV40f0IJWv{)Qf^728RJ)}u6OffjQC`sxf$F6#+tTIQ1Qd}V0-V%a`0+>U!CQ*I zxGfxRgMUYhGr&n+QipbV5_>;d=M!Nts;q2N{=hQ*NG8RntLt~oY&7H|7MPnehf-XH z$ZWr;xPlGBNa&XC{dJ9gzxIdx_;FN(^S+nQWbIrl{xt*xt*6<=591hmuFa#08T3>v zcJTc|L&NCH%WW9$?67c`rE_y{ve$Z^`+oW27V?$at1Y3Sfsc`gM;cxP>U(UM)rPb0 zFR4oE=IrdEa}j~M`&ZmU5BPBnX11ARNtk3Xr^T40DDHa>XulOQxHvaCboHcP`#A_8 ztZRBL286r(ASI@Z$OIpqJh4q-dJC)seu z3;~mjQnTZf6DF9j@Kr=BH||~- z4v3Nrfc@L-bYKP;h8l{tdA>y#;_4iEJIi7Z-<&vcE6ShS@~5^H z#`}`g2pW&i+O!`wtir9iG)Ew+=vk-VBFC;Ped_zsQbp8m=%4ILg^fM)2;^bqTcwSb zRyCJ1H{WH>f(w%QkG2iQfo46~^rZ@Z&6QZyCAX6Uq2(=^llw;r-chf3DSJ(im zue}7>R}MoUKyoqWVO9)A z-f&gXmO&3tl2I9b7a&*Lowu6^8@dTBK2~4xS{$9vuAqCL5&y{r@3Je4l$$&~3J3#_ z;j)g+*eS5chJt->-XeqA@I}c0ene%X0qC(JZyuKCqqh)>Z2JMlFR=gC^GA2=A?}nT zh<){F%AebULoun;#Q?MaYF3zrh+P((bRF1huTnZs6R-6R`pU;>hTPl^Tf*2LY{^TI zzaZ5>1ywp61>Xyy*sTaZH7aRb8j+9yQYy?7KsjN@P}xfrtKN8|z^ zkx#a_D~Si&FaeAbfvGC#V6BKhEk&?c)>^xP#jv;mtByK+|5l10S+oFLi~YN?`i-j( z8kxWh%3BR7%3g#O9L318Qu=$K!Z7dazk6Ny(+yd0QNK7-{L-e(4LO_K14FjLUe5%) z?8;%jqKPL+|8{B{AfCHwQ8~9?0+)auDqd%l03X`P)D=fH?^Yo7flk6d_Bb-ZDI_M} zfDiHeJ27(P*GykNswkZHqGm4(nLL|xjC&~nHxq;D(E~#RFs}=>+I(w;%W{VM2SfOM zV#r>-n*+Csj+mtH?`XiwWl&O-eVCdYD}wTumjXPhEd9U!L71?t{IAQ%=}9AI$0gwI zvLe>bMP>;8_LZW!mkJKehbuAWB%(Tq}y8Gnp;sUB{k)hlek=*waZ(DH5oM- z_8do%O}ujF%o!yT8CMRIi0b)J+tzAC)TZH#4BRqPKTtFrWX}<5Dz5eX9n%QFDya0@ z)af&#lP%w{4)t*`AmDj8o^bFxt{x=!oE6p(Fn$vn!3sEAv&2`Pc%ntRK;D}K*7 zhgmk3rYSKJyt$70&lSB#{T8EZX6r=C@+fUf1X7A!xlvA3|nb1#1q-vPHH^!#TAwGcCJJhC9{)2-)&+U1TgOr*y zi5%@Ca(>L^L?OlPHxn@+^7sG553rwvE5X3j#GS(J(b`L;m%pjr;fRJ$z>mU7s@DBX zo|u?oSwF#S{5A3NO!j5^-gDX13KU;Sl?pa-f~E~RL0qO+tv+-_fr$S1FQ!otf&LuN zF=}{^*09@(dqJnMTzPk=^DV!gAN8|Q*0Ey!qfqF%!k3($%H--Qiip;Hn^pw6-qh?b zXQA>Ykf8=0NsM4Pa4o&;J1aRkicUOX-G95bZ=q~_(=cs08m78TK&`}yG z=&x=-6l7z~aoYP;dDY4wb272jpGvvz;Ac&d>1Qw!29rX~UjJ3biFqP>aZ}_dD6P|Y zt&pCP2cr|6c^avH>N@bfuLz#ty{2{*^XH{1UeVGf>Kp`YVQ789hv&CPF%_eE*-3{T zSS8_zPsACHSphQZ+%_g5#qhD%0)6y{<^J)05ThGj1T=~G^PhVBks2=UXm%H`{w5{Z zMSr+p0xG;L48v3$ESSEuBRSRWvj)|f7T;pxSQHt~+c5^51-TDa`p2)jCi}uf2Al3Z z3dnnJu{DwPEbQVW)}zbhWMj&U_pkj_%4>Z>fe22<%*zO)Sk68&$Y-fv z>k}mY)^BP*^_#>k81FZ~x}a7CL4*)``==P!D=5U;`h{Yd7bYi<;`p#jxlBu zw|l!6i93r2FvgMkWIR2QcUGL;xdfi+zgq7K4H4_8wA22#l=gqqn>01dEneCe9;&yG zK!^iErFlwPlzGG1Y+_hHIk?Jt+oerQEfg#)GH9F-G*X?#hRwnEBNWgE`7&TchM?17 z%&NR^m!U!B*wDq;d>Zl4&$LyZyPaqK$NFg;t@Qfumww2UwKJ$|0i0r2kgLXQ1~+}U z@ySUB^5YD($()17tYeF+AuFcsu^GZGAugyybiP^)IVH`ZhJj3TWCh1z(yENAI~j1- z*S$C(8!FDAL0*m(pF5@01LSSI*}@Rvo2+v{hGD}IvQTA#+}=Rdg-^`Si572j&F zet!Ens(#tU^B3cdSn(Qk?A=($ToOkW_2DelCnP&XBCNXw6VNQBX?mw@uFfrVzk+if zMYU$|e2I+A$))y_LW*{Gwl=U50ZE4BpO&edOSU%p! zA0<0+GW---IgE3%c--yzc^CerJ;hHe=Rc<6j%(>p6(jK4=++cB!Am|N<(*0p^)ki}Y_7(~}Pn~OFsP?=$iOre&{RpQ0 z-8kmW>Tt8{Dx;Ln*E!Y&ZhlPX`2P^*k_uu2yb-~21;lp{k|n^LaJwIvP{27!Yg1wS z2~Se{+WDQnVOqK~?;uSXiq>9aQo519Df3uNzRlf+PK?=D?yJu)|2ex&s%Y$_HN*h% zq0^g`?)s+Z9T+Cf6#xrc+uql{R*ryg;*i`BaAm9~(^ywyIL|~~?|tb-CVFg`k-532X zi*9Pxm}>Sy3L_7ybJNJsdhRegaIP^tRaJlzw=3en&6L zD*k*U&XnIE#U8%f^RY1V00R68XJ1IqtjxRd|dQiSxd{>M(kF9%BYz{s*&8zrWK=;S{tKPFfStyf|)lXZNgLIR|D%2N*go{?Q zWfO7=2ZHa|V1#3I7$Mt+;H$ko&wnSZlFOe?u-x_Ic*7+d9`|@Mp(fHL(fb|DVh49R zS7Vi>#fzRFpL00(aR1Moz>*ju0qgLq$1`P+Szlkzl@&zd#vr3ZShyAb6&dPR68dSP zI*tNkj6_;R4LnH64m2+s+*n!oR;h-I_Oo3*$AQ(u>&LvpD=PG~FcZA3*-PvB#MoD^ zJIo}rFI@~Zq$IcfgmQHD(RqpU+C|ca-vO)-sgUzy-!{F?Msv5};`ohA_e?3p=)>Cm zeYCp9j4Wv^-p#JWURDmPW^hzk!ex!RKHSdX;`;e(b<*F? zlW0%lxh(hZLuwfqT^Gwep#flA{5kqqOSp~72N;s|uUj0x_Cj=b8h zX5m(I$u||KH{A-q6?zpeUk9G25S%%!pZ2qFs?~lQ4V}nkg1gibM28mXq){=NYF24C zPgawq&-VRlcxPktZI8_?@-sj(j`C*rr9HKbzcOwdE3>`b?}uivPt)Fm zrg!BG-oE8$W%Bt}SIxXMWn%2+P*eGS!P9f31l}3`G6yS;h;Q;`t5#Lv=%Fx1tyx&W znM-ejCj4b`0+xU|*Vjolj;$*XI--CU!~wb3m`vM4Vgg(uEWa_I>mO#6Fw@I-MQ$1~ zofc&t!=s8?JDnHK?2a$@rxJJ2gCimmq{0XkWr1zY&CPo-Nmx@+5kurgI5snrwXv}= z6;Bdc9??uD=u#%Rs6t#tFQ2GjfsT)lzq0_Froh9me54)g{c=xflRdb%H4vw1Vv_d> zv_DzUHLAR;0t7Nzrq2m%&g^1+eJd5bSFiJ3=e3B)%CIOsz1i^D%`O44&DL0YX;~R_ zqy4%+vtip3vGU@W-m5omkdTm&*o+JoZEfv>fWz>bsKyi@ZbAhIhicQ0A0yL-WNps* z{=c#l-2tYBVHgaju^G7Ri72^XAHAv%jYq$lX0&x1dfWU8J97R(+Tmh=KpR0u!Kugq z92_3I{jcwrgYr8RTjw`9ajJM{51=&_~5PbZ>j^={Pl z$Y!xl+a4gRtDDB5*Cobu>bd$LNsj66?oJCyn>++vl^V|1FT12UZ}sDp-GAbM?rc&a z)LGBJx$EU^TL&+%J}!UXlsex)fn@Z99_K)f6drpy)v>X-Rzry_g%~5s+0S-vU>}O9 zDfA1bRnkVn@na9RUDsA>lk!-z&$&1|6EpVuBcUNW$c+lF-YI$J_S$ais;j@yuC+|? zI@DctFb+3cthcLQ1E_o~Nn$H$jcOy`GG@PcxX6O)1umh_&X(&cVj^cC^s;2#PZynQQxl{sl3=^Fe${`%a091jNelK}WZ*-lPw6YIoeq+13zpo|I8* zHJAEj`n^j7<#`U4x9rMA=8e`}+%PI<4O<)IzsOq253nNCAbwH>n`C)T!^88El&35q z&E+_{)(SWuFV-M#OBE#dlv+)&wHm9l|Dfd9I6RgJx)bq@Wp$$FBsP7T(&vdoEX{R< zQ4?RLf3D@Df+))?6Z=R@6n`2hu36DQKUYb&Sl&;ngTlA^82c-@`XTvd^NXr)!o{u1 zuJd*s{TT!rmWmrqtuEV1z)@T`#`Iz!!v$sDOx=uVWC04>7tXld4Z_vU6@|>atQJ^$ zlr|?N$NopmrQhwpPNDbtYrj`n_3hsQXv&&Wy%jhvxfg=5+&H%zgzZ|-Op33{%!P;i zV%q7yurWPC>9trIsA^;cI{1nG!8=(q3)ZA;!a%Ook~Ax$%%s0{B0fryX%%EDLi zNZz-`P@Bh1QAl3i6<5qr+A8=XSOjR)^j_ku67*~>lr8JEm5JIX8m~v%tg5wJ6)CE#n1h+-$Bw|2)#QrHc=P>s2Mmw&>k} zvJU3+a7Yq4hy4;?a#8ujCG2ICfREN%ABHp|iMf1?qyqQM*T8od3+?4MFE;V`w5N0I z8#7m!S_!-G^%H(K&9#o+joA6*&80RLo5+ykV+sBCy_(8`Oy3{dfvh%Lu3%vODD-Z> zmd$%#J_9BgIT!McxsN)MJ$mgoz#Ow(D+>zVWqI_%$Xv8HS>yOO-7NVA@tTs4=xz`S z9qAvb7h$aC11XyJlqkj|d`&aoE;ZVg=Bzjt45O`VeJGEC-a@cwmC5;3xK8b8{*3>3 zkkQ(2bQi~t`K(*T%(d?8#{k<-Y+^unw?#BJkBw#My(Jv&7c;k&a<0&JlLwkH_cg2- z4d~4e*;y#WoKDTK{ZX6_fo<6k{6HyXaJT0jQ<)|3!ktE4@ME(H^$Lc2mNL2f>+`bn zRjI_~hd9{-sMqSH`Mq>jlm2-SrHsMvwbq!)E>D^$F%gS0o+~hHh(@aY;W3M*0D9fn zk7jrs*iX80p4mUwe)rga!g!MuzUwiYVu(07IY|>e79wf>Sit68j2`(7CA=ajN@^^k z_G=C=yu_u(TxU*pRx3*Q70zCt40bJr-x(1H$yogC>?}(7golmkRG}gc_YUUq>EbHH zB_$IyAa9VnXpBbFMH~Dj>RXJL1rj-Fz7I_>LzLGBURFbicR93>w{_ssPxiL--WOdp zB$Bd}utI?F_z=I+?Q0_i4w!zzWOMnzTP)uLG3F%Z)ce#^N}@7geep7C?;a)Jp2iOJ zuoy&+Z0Z}>3L13U9nHXLZ72D;+i-DNJ)9gcht!1S#Q)-y!V^YoRvW&K% z_M9t;sqb4jx#i2kk*eQN%c+hB?GTS^ATLzfb}j2WR2RrmbPaxyIj|}VIxL~NS0Jfq z-KzqFTh29@fLdw1wdzdsyb;m3OhAqBCSODc5*C9vZSqNVXK z;RYtlI6n&`9k*qityGdTqht;GsyEbhg09}xG`#dJftGlA2f_nZds{JYeHj=9*U6r|h0X#cTef#`8;v6$ogu8tyE4P4P6g-fBKn09$?) z1pc02__lgVn0*Je(2oGOou`volq6~Ho!&@A%eZw=9lS*lkCI935`&f<>Cruoe6Gtk zjgOo?ksMvltR25WT~ zEtp*8ym2L+tE>wx*WKR9(9PHtS%i-QzjqIxVHg|JhYz;ot>!%Sm}_PxQYM$I)U}ih z;#l@qSC~*AAq(-*FadS%IFUU?tfG>yA$WuEdOY;CL;#0@vzNW_u>=~O*E339jhNX8+|K_5i8sa-IQ4MT0@{@uQqQ$o*j4GbZ@s^7Pwn% ze_X^Nj+hP4tPfz4Q3S`#guG%n2YFg1{@~Ds0s*m5Eb1FIz4;_nT6}-bMb8L7Bg>gGIy`j;R z3$~Z721R!$YkM!FC`g-TOgnVA&Q4|NN#-k2Q}U2fALvtm-zHRg6&Pa+i!5m#78Dj* zuE1F}T$qq|dDmf42=QswmX4atd|}pWPyzad54x{$u1^X*3qae7AcLU=!EcNBCl8fKBjG?1)I1)58J+{nEpl`kvUngNKCvBn{cBT3U(FXU2WMc z5e4*ePX0J%iB!{l)hto#kkhK+flaWmv}~gw!JR=qdp+&4tHozpsUVx8{;)}maQKfN zqQC;j4cUBHUcu;sQkK5namv4E5zdh(GJ!mm@uuV9_$?Ad0Y6zy1*sGhTFxccq+1 zo>X6v6n!+TgU*7D*#j@9@pZ}T4T_j>dN-VUz6Pv(!!yZ6!VHMTAm#Z6j*7`j-Mi9( zW}JEzwVIDnkWNYwEL0iZ{v=#*U2!i$2nzi1c~csBG|pI5K5jp3?q6l6VXUXQnV%13 zpcjpLwvsFb{UsVm)$UWzcD3tud*X->itW|p+`SXxX$3B|x;vVb^jh0fc8m%*Rt^zq_54 zby>n$0d?vEj1PZn%NnU7avhL`n45h+6I~d8YR^ceU3NvG7oseRnCRXQ8+(ld*4+0# zLQ)ZHI?rUjA3mI%oY3&m9kGG#h7w7_NJ&U8zthptp=`NR-8D!V{PrhA8BO!Wc68{+ za%R_)7ysgp8auLZU_G9Ir^5yEi`2f_j%m>pxkwoc{{_|Ze(-9mDp5G_a&&|~&-FZQ z1^yv;4%So{eJi-c-fYNS;kSbD8nyb0FtO=)GyBbe)8txL>fpOEP}^2gJGdZ12=n}5 z6}s}4-fE^q<9rHy&(OWE2{M7hqQxyoNCdD{ANq(n{RmYyM(eW;-{jz zw*G$JuW=!fu7SK}TQ64gRV8G3^n6)ISGO`C z$yjuIFcQWkdS^tJ*Fd5kL-b1(ffx5i$IQ%(UZOIi-&4_EFcUXn{5^4MC(#%qW-GxyX6d?2VP!gDg|7+=Z|H_*~5QR1B|^q57+3O#=72U#&eOQ*;0gd-iDmzPdc^04Q9zGe*R73En)Y-L$=mcYrF1x+_{ zX8O~O(GPl@Z4By@j{o2P1<;c>ii885xF~pPVne?_?+`aYI~)T+90z`6Js#~fo;eK& z{3~dCR@`{qUs&{2Kc|GcX;@)Y%hfSTmZ^6n-W* zXZIv~hrB{~cTpUvr;c7l<}~Z4sdS3BX|Dv;ejKHQgDFFwj|NR1zeStQCrT6&_MF*Y{FHb;hYPb}fE z%F*6Ob!FNP-e#-Mdpt}Qy{?Lq^0~~TjNtoNw(wFW)uPi!AbSI zAPK1i8TIt_rNMagG~NrF0xE3-v>lO2iSyjJ&gXvy+&W_K-cOdq2zB zqlNUn5=n|X*AgAp9Pn_+-$^B(Mu9fmukX+3GVH;4NxUZL^&~%k;HhOq<{WN<^VB22 zsdJ8@U=YoLU_fzy{yMQHTvgadc!(t$!Q=hthuzsrGHpz`T(^F>PyR;U*ZXYLFyH2E5_etMKGiiF`^cpP|qO-7K@p$N7roIQ|73g`i zTAWig(qn7Cn$?|YU>)ycBQ7t0LTF7J5`#lS850MJSB-=)At{pK6(kCJi{)8U$L(Pl zCuyRjR6`Lfm;^euGK|CW`5mBn`O98+1V0B!RU&bI{okLtB>LJh4bk9uCnLly1oud* z95%KqMW-%Tk#@-v1i&hmv6Z@EHW5rWiM>SulV=K|NN?pF`B8N@} zS9LOF{#up8byV`N_^8QFI_a6M7(TURme%h&Kh@P&ev&z2H#$5`>tW_xF zMo{7b*!Mh^14~$l{`CK$>not5TD!0*K}0}8kw#KL0cmEWq+0|*8l}5Ch7b_x66ur< zsi7G{>Fx&U9=c)vGv52%|GR$wzm|*T8eN=o-t)eDKl|DHc_s?hqLxp~@B3`Fqx=_H z0G+gvr=`wC;q!{PB^16)XHPhqKa)LtCxqI&)V@=@ZYm;p_2Ni@317vS1AN@ zF&~VPye%FCD7UVhux-P(OPT=q0Hi4;b9LCszA*~j(D5UDRh*4k7B_QcZ+A<+* z2AvMVo=UxzboJS;aNL_Rx<1V>ThW?hzTHW&-|2R*=~FT{(8YQzm&Cn%F&wrYN_JCr zy>mEu(n#Kq>O@HYit%atsxw>K3Ldun3*X(D>iD)!D;U7rK^JcJTxO)gDeUgy;B?%8 z6=h_CkBV))dS21NkPOS;#>!cX^cY)W!+GV65W{YUhaY;%=n zF<~2yBXA^7Ejn7u&CA;jH2-+ab!b$w2}S!OAlC-l8$(%%k83dSh@@7n7)RDs^^O4Y zwnm$0KkEL3-)$p$0yUPv!{i@7#M=n)9^=dw2L@tZwBUQ|5-T6^w&>>Frie01p z$9;k$)|Ag><`CiQU--qO_@w#;{hvPw8#IMbs#<34)-n2~_EIT;fnLtwb z&;tScMU?Nh)U>5;eHJ?#pTlrS*gBCkyH*PKfctm_MO;Y|HW1M)+zvFIopm9 zfJ|h^zb}9tdF%U2&$`Ymm-yivC|_@d-MLaAf^G{j6iAkJwSYZE9TzZ&V0?2-#`7bO zXpvj(!SR@?IeiXBqb|83CfGmNm&q^Yw?l@$?A<&9gfzdQl|r9w-6H?|f-3vN^YC#m zr&%(8ko&M9d<$hBUCI`VHkiG#n!l5r2FGBj`1)gl(^9JcFY23_vnTHlqHMD$BvTVK zJBOX}pUpLAVOu=LOz!jU3J#W7TDo7$@LbD&X=#TUwL@|Z7Zo?}vEr63Jxb@D*PlCW zw|n$%%U_HJzIwvS!0=6bTMC^!P?uccUQ$xh_RdUo>G@cxw-c#7_B*o9Ujaw5VfC7} zx;(d5m5?#GqdA4-apO14od_UhpwX57`0*nzHl&O7yVJIsR95}3$4`R{o@BP4r+{6i zku19F`fBx>~^MV#2?^#CDX4QJY z%mb;h*o2 z@~H4K<-VH$Cpzx)*Zw9okAj;MZ1XnEq#JKCwq6pVs-La9iL&{4V~N0e)K5;v5|Y9V zn=kBw%PAM;$YVwfonC!T*@}pY6-jplC5*X1pYnt)PC=b2W}UbD&a-SPz6JL&%5FQ>)LQB}G@f)6;2fIpqfG1H z{VeIPyWPhVmB2CnqZK=noSdn;6?=a;Swc}%n@}B1{#de12-*LYCqQ*V+p)2Bbf)V@ z0JC#+Q#pmu;9O`h7Nab^`sdP@l za$g2#kz|FVQES3DhyBCF04pBu8}fSa`|F6rYl8P-tfR_NJ2=f<$uu+i*Dwa29*wwI zOA`8HsrM1w(C+SO+RnJiPp4H!M@Nj;vlRI{RR7)m`&;thTPy8DD8l8bp4F4d#l>YP z(PBSU<&b1KRqp+!9q}1JrKy4L+e*OX8%p-W>G#(inYKh4ZpH18pP5#svBv7z6ZEN` zgco-r-wm6~Y};Wf8npcdBsgL)4zF-mO0Ejo#p~Yg;mS4jw`@=a5{ZOuDU%Y%VHHRS zq4Z&=V_szSJ$XDs^o>SHMvb0c1AkQabM`s9Gi~&ptAoSV_dZLZ3@14Rw`3iqGHW~& zq1`_+@K@6FB?vDh1*V4Sa|hEHj{*5XpbFUl;V9n#!%LOQ^Y6Is4duk`2ud>zdm`O| z0S#?S#qdLP&gX#RL`u~_3QT`>#ZVw`dSUVD#6IzVb?X1)$I)woq&*hr_=XawB4jS> zF=QasHVd@dd?nREa*Dj;-+{82g7bj~_a=KN+Zab>-f6|OTFx+8Cg4?ZocDfH)uJN! z*-2Y0y4Pm;PiYQ;FBzsqZu>8PU#WTJZqX-&@(+((pNS97;EUmHNIPVi-;yU#^3-gf zRl89w2CW6;|8U5xC6}2$&}m{)MGsC-H^lUF#x(RDPJf@egD;&q!L-T~Pw!(Arz>2O z=a4}pi2%m*9dix+*J#e) zvx@o;fLUzJmaqLfJhv_`XCfxt%7^jKqKhMMYggAOA60(nVH$zgj;pOVZG{|S(9>)Y z%uR)z29a4Xj_lb^EGuB$2j$KrGUT+Nvy1&;ksT89aarg;Yy9_Y}W zYW?ah7Nwm(;%LQszXIrAWKOUc`rv%_FIh;5v6y^P>vu$&Pn5f;6rNE8`s$rrpNyeo zK?GO464NhA2_3gSsY&_jT8tB!uA^ITJ}%X+LMi*H{ zhLMkhVqV-%K&weotnk4RG-xVW2qn7c3Q9~;o__gU^mJi&E?Tk!xALm4*K+$8#r_H> z4BOGe?cDRdD8|l!6o+E~TUAvxc4c*SE^M)P4lIyQ0U) z>Gx?74#}y6REF)CH&*nwPVK{26So#&I|BkH11eXm!E;J7Iy(Ls{x8t2?-c*P%^5fd zk(2(kToN5&Ef|tt0Db7@=4QFOLdL9`V~D|=v)qAUUEc2A_a1ZB)hG1cqZlD%cGw;v zFZSef8iTd76j5w`gyjub3!tT}yNU;2YFl5~sz{I1e+xCsPL$Rw`pavIVLw>Ov(+2u znSOw*&{3D!G+nHbuYs|nd7}gY50qbYQmdm06}D@iE1^&sZSA6)jb)D=hLA_E;DD%= zn+m-U^|K4#Xe>jUK+;1ZZoH+C-D2rxK;eSk)+iKHnOCd3I#>6;87`JeV$jqfaFfc1 z*z`C0nmLhVQAJ8Zd!t;xa8aaw0)BMN>4cUS`uZ8|ty7#YBdu=uHF_bDilNnq&lLcP zqq-gTR`qm!C*PqzMu{>~EGVM;TQ>{fi^YrQH1g{m?%$E+-a77c=K7tn|ehiKQlV@`=2Gqbi9wbzXZYaxy>5t(_zrO?=z zF&!<|_k`b+e1#(#p++NPE9Qcgc{{ea0LOS8(@3@@7%z6(SAD%Pci;a0F&H;Be}18$ z8&Si6$%?JI4zaUrD3X=`QYDjL&MDM2&x==#FwrCy@Ytg@gy3~A*Qb3vfK5L}gmDk~ zWPPyJ*^Bx2V*PbcwUNNSFhPDQN{AkOOoPTLAfWMuN|X>^xwZbLl=MMkc@9 zt*V#kt)_=eDqfI&=O$-M#LY0Cn1vGjDsMDW=4I%oVO4P#%vma|;Nap0{Z|*qr~Tu< z2|PzMN@?jUfVPrPK|P&wS+Ifuy}AmQok?ZT!>~I^wRHi4m5cn&_)?CsaHXg$G3oGl_gT z*m#D0Sc7+%)2uxs&OTfcC=7!aDF(6b-*(_1jqo$12E+N7z=E&b|xmgvezf z-dZz#{1`&k;wBi10MYYcnPun7N^$Ey^B^Y=wRZeGV6$xdLiZVx-s4!tU?VTZavwLn zL^iMwfq|TMZ-8DfRl@+l$~VV$EZ%G}1ye1Jw-#_W^R$MEKbD-eR#3gj>%?^0LEx_=BDZC!lhn=N0|(Y8}G7;{E8( zQgJZZ4c0(GZtg%qz&u1jhf+<>KD63`Tk`)?!y@(K&%RiIAM?BdSTq^yJ+!G> z19A$Knc~(~zr$cC#g;W-EL<|qBDR8FPj21={bo$8H&9k)1K3+!$k1HQfgMk+tgH^(sBd3;1Le_-jn}r;?pf1W<7tjU zl5Eg;QvR9AfbMs|nr8syx$;$YLgW-{=r(83qtQkrS6F~q`O9-HTBK2d(S=-*pWpSN z1*zV9zu+bpO39^Dp2%fvC+Bw=du)Xng<`KbO85+TMC5F$D^HGPNhnaaJU8CtqO@lT zV2>`s0~}g|;uiu;A*Jd|1i@)5N917;9qb&%MMH7KHj7WbEFQH}o?g$a+i%yPKXT}A zXeNI!7eSVe=Ic|WUHi(p`5b1v%i_V=jzE&AO|LuMvYVziZq>!0E#e}lI9&1volGMX zvnU+*^L?d_7^sR$hxcFOloK(aN9D$tIciTd0Q^P7&QwLmiuz!7+>LUDx(!wuc>{H^ z@NC&e?S`{$O3PJSN(5Dibx@)3`K;+W)m-OOE#nMuan*UcrJ;Vj&}#ba+cybd-Qit8ze$dK;mN7T z{1KH$TOcmb=jDo9=RSljz53?0xJUpT5LPyAzcmO`=FSxz;^b-$cdAC8qn$sZ+ESB! zd%WoTcq#j{v;w;#8QGBj2}>n?BM}3W+_~8q1D+?p+CtA7!zN-*h~|_N3KiZhGt?rzu(4mru5~>O6Y!VC)*^843PxI{lx&FC$$#Ym~!u zUYK9wWBN;9Cb4Z3)YckwJTjS~r)T0mL`bW|96b$1C_UMPGk5Zeq|`1v;FP04`QCF( z^0<_nc^{QNNtJTrn;rH@D)ISXh8QriGQV-R#w*Y#K*vjGg3w8RgCJqxn#aPZ#X5nI5wcG>r?oVV?e1oPG0F5Up2 zRcDP2InX7$*v^u6TNIrkhajrDzcNtZ^|&-0QCdizY~(S5$;ZA{ z*|wlfL9Jpery7Q)YQ1KR+PYoq?^OXn(Avz2%6|=}qRwoh+17qu%19eGpI{|{>;0Wb zhNv=INfa-S!a(vzp1;@#?_}4;`vG2S2e+58yI*@;TqM_wGf2D?EPzD&#ReF&Pvm%) zf-(-CKfv<%xq?qh$7ZZL-eyRtdYM)anD9Xpq z1_(%=nw(7Mhh0;{lMjiaWjnN7cqjZFa#sKs^)i#A#{#_nml``C3=%Sgtv- ztpA#7?$t%&ke0?^GBEmIHVR{f>RqQ+S}8z7?R2S&TW@H}T$rFBZqAibY6ZetnV_(V z|1d>9X2c!WQvH*TMD|lPv^@DCl??Z;eXKh&OmgX9!RN5vS-TEOXGO*Egm}q5bJFrE z+s@DPNl{RDonviP5AdaKwf_LV5LGdF_@c$he+AG&VlaMb)F$rZo{L{ppNcDp*%dko`AuF7}Me9QS7wQ=jG zmE$CeDLWCHmZ>*+VoUlu%X>64K9q-mVYLat1X!nua)m>KIqLy%yIqFN@8PdIT(-Yu0%%4h{ zXOW~09Y9~y%uDa$3JM+HEb?Yv5l#7(Q999@%?x?XnUh|7H|*794!iKK)PNpyv=kt4mD%k8=69uwz2{U0AR;OmBEZ#b~VH zVYy1oZHb1NQqI)KueI_Hv8wSbV4^4Uo6J_-^pHGE^3-rTY13j&0X*VJcD%lYqcfD-H$9&msdVoHroOmyGe+2OPoX1U7RBaaKkS-gmX(JMJy&SC5# z;zmj?gj34~0zj}7K{S#mE; z&LSCQ>Agc$Cuq-d2AReKa#p6+j7(qg7?nZvB;-|ne__+V5`(-nLOrZ(I0&<;bYdU? z#{FvzTb=q-h1QWyV|Y{`mUy8h!sYl7{vCGL#@BXg@QD2Rq8D0ck%lz>7sDW<+ZS#q73_WEWiS#98XAWDRc=h7^ zz$NCvUB0__@7-fTqov7POli^58klZ#KPfjae5+L~thLQnRcvK-HKJ!xT-C7vg}$}0 zDwZnlV4KfGL$ePce}(pO_KXLy?iMqp_+_q-kv`J0X@spF3#M0MoXiTXI~EO@n41 z3WxocWy^jDoDsH+3^BJ)bVz!H0T*pH2^dUgEF_?^y z`@)7_`mT1m0nzd%rrJ6yzn}mLBk}d|r0bq=H3?&tMvoLB`YEAWEw;ld*|<6h}X~ln1Uc@^>3D8-5fOJavGosYc$* zGtprQ$Mmfv7?c@v6xjLR)(a5WG;rSiO3)-q{#>iZAU;1n z8FBdVX-Md`i;*$$g*7!dM+b$VSAoF6$K~EoU>6C3Qwcq+elt#o(9F!oA(@ zMC|N%bJ{K6b(k%y=<8xidn~Z@DL9Ha$yCKk>#f2{*j&I`@f?`pT+f&VsF+B&ZB)ug zU_+DfiiX;(%wgOQ2RnCejfa~>!;3VZ;y(f|lP-5BoPX)A!{ugf5VCK_UUSqBuCLq` z58LcSJSR75U3gd+D#U9u&te8KBy_px>+Jk4h{DLFB6%w}ohH^!ZE4(>Y#knr4arqV z>OIq^RUM~89)bUQ6FwsGKru6j+6_pjxZIDQav2>G822X|^xmNsw(-W|jZm9=qe0-^ z=o;iEX1`n--Rx@D7)l)XhVmlKIz;jAEWPh{E1tK;Z{6~zxQm9F^YTB1_K?2XXPsl? zUH5PHe&l-oE9mSZ-t-oFJqQkYl*B+?!VwSv4x0~H1T1}o46f=Rh+E_7Si0UF1LMT3 zh(y_ah%iKPQt;n*iGt=O1JwJ(CD8EefzD120Ri;h%6ygH1e0FAkz6IaHV`KzY16mXGk($#VsO^6~h*1l~o>U zztN>|2VN3;Ot78H*jOUt$TKHJmD?=SII749a-vcj7t08y`5& zGW~x33m!_!V|A%^iw)tqpw{h+O^xyH6k-dO?q0wuRb}?wHho4eas@WZ(B!_9+=YC1 zt<9oC;M{ZhT98NvK3sq_(@BL7f`9f}ZgMl8!{dMP{P}9{?#Kb@m(bK=AC{jlGkm-9 zwJQccezvcocV~)=?r4n?M$^KGt!}X|Tv|7)cv+v9-MN!j{Gw_Y%BEZJ=_7frjBgH_ zc{CnMtA;QPDoG%}9gWoV1FX!~-lt9O8ip`c{MMiG# zQ+zeR6EyS_ct2leJRaIV_hLMVU*PS(4*2gJAqm{y(xSUSGSGGD%TuQ7v+JNTP~`u0 z-pS0A!Y{0Nl3mC`W^P|SswN2}v4jbFIO22p2bGaodN+iTe1lN5b&62HrD)6rhk}uc z{}dY$8bnq119V(fYIyy=01>cwIA}@LYgnv>1@anhwfV(y$%rPKgZl}C5>mI+To!fN zHwi{Llx~K(!#_{%Nc9u^vExuNPQy?fP=45rMX4xg}Qfd9tV(q$!yO$XoFufcA3b%7!Bh zH_VFhI__3GCMCrRkAh3{&CfpWk*gur{)nnuMZGij2U%~Sk4!qo$=YB9xLehAYAqzqoe;X%5md;tx zVCnLm9c0@N zh+z3>lTy~^hX5z`YMs4^YghaSi2K&(&P2i@u+>9==CAos~){M zwiY+LgOzrPw^Hk&q%S_ytlj7h#>?}o|L^9h1pr28Rn%md!fIqxlv&_?D*7xwe69{E z@C=Y%+)anGWpKdm7f`jg80ueuL&rL0B=kMzc3gH)s||2QvIuk++B z0GI0J0~_kfM|TQ}alUnJ+K~ZkC3WYxzR^fGzUhW?L5f{CH|)Dga0_(C=B6=gP-XhdT$eFdC zIG|bniCY<9bFv?DzXE5{e zKtC@qp!Pk1l@4JtkV?8KzYojKV01Ctw04OCvQh^o>6q?wfjuqB+n7tHc|DYyMIJe< zP>@w=F|Qk^@?k-Fw0=Wf5ps>){YB*N(fEz&xE1dl3q(3IFk3zOcNRZ>;VU$QnoftP zC@=5b2hK^QnN;G%kpmsIFk<#1rS=vSysO2zg{aaAFm{s2UxAiZ(9JrRPGwtlz6RBBeyy8@P)dT1Ex|VV_D_|ZFcS3yaO$0ZKQwTI zpeKVSDkw(A#)(5&Qd7x@bt86P^o*`iK5FSb69Gq`H8ik1N37mw~ zQ%gnUiG$>MRn@-goH#w1G8L^Se}jt5Bk$2J8y5x#^;uX7diz^b`rG@otUlLLIe%?j zvvqWA?Qf;cEiWG)4YBCa@t7}_EET}TBmU!_d~etWO~5GS64)-pya2Am9srTAY?@1QMZi>BBo+;_E5h+~da6#J5I zJ>)ggmJDE+GVfoUag+zd{@&TL5^lR#r zmM{Ad^YU>S5UM$YQQ-WYbV?NPR>7I-u`kr07-h!PRa7kCMT7kxc2NdRkeKq{AyZl>eZUzdTd~rlPltzd z)8=Atu%7T-PgB#7S5197{bRTS{W&aGy_{!jj0QEOV7$wxdQ1(hwaVs$J$olIG>r52 z`y%^lZD|>ud{UKDQqmMesbeEZp+5YSU>{zr9?<;PQBI~Vvkm)|eKtWsIJ zOy>@x-taBA+@jRT)Prj{jj@H3lLetz%6(S@r+|bPy*Y-OleOcM_mI0o2sLR;s;P7* z5-~nm*)*1M`+T5pm|&2TME;CYV!=!UUNTs6!kchX1nFVe023xN{~0e*hyZYgZq0zp zQws!kI?y4cY~_04wTAVU%;79*%lX@?qil^^byYnBsO-J4VRy-a_V^Fa(e55&poX1o zH#<_M+%Cj&-?HiBzPnmD`DWgoBbEa2BPr}(oPh# z*iZ;%I~MJ-K6>?wx1y||b(D{n4?eCy87wF% zvYye`+si#Qbt8|sor*rpWuQ@LfL*aiIYT$HehSYR8W}kw6sOSte9%U1Bw}V?EI5qb z2mh>3K9hA$aC&Q$lbO*HE;PEx(}Ex-AX4oy4NHD2GaWi-PtsfxSQpug{@^{84~Bk` zB(UWfBjch7NP#*iltN%uQC1GCxv8Gmoy2ENqr0<{n=TNa2i?K`bgqAj=zlJ*io7_R zwy1){-VEV2#0*Ha4GojAX8U^n5AI*ciU;dFy8~}8obf*#LtWMKP58wK0AMnTm2?Ej8cW^IXxA*gokaaFU5pbVr|YD;4}v0 zys(iB%)C(KtD^HYFs4#iUlA%d=KJ$K(tLM8i|+f%5ak0P(fk4yVrCW&8xlD8j75Qh zf=U+U#lQwa+}xwOsi}5~xSJB?&Y}LFQ0boovC#lb7+~+n0&rXSDO-3$okhKdrlzGo zLLvG@IHpn<`^Tww6##8x@lZ%_*j-RZzlJ@0SnILC^8CLmxc^S|UMfDsB6cP9m6fwpbjt!+~){)<&3^7y}3~!WMl>2HcC4s@4l;YLF+4bYm~>( z9)8UP+-zdz<#eM(Wbyyh%zr(h}zqArE)}Z`}D~5=48SjXpL|IRd7Ua3b=aaY=V{j9@d`# z@SjZ*sV(An(sFxqwT=7g^o8fSExWxTZUS|gjZHncZpWa5jut{cPo@rV-EiRNjQ z!@B1myUabUxhdH7ScoM2@$Y*H!p1;Dby)PpN^EU?byN+zc}^cRkRwL09&yEia%p^(L zmnJF!?gANsfq^wAbspd5`Fl7K<;sb+@uYMy+Qx1^a$^nMmc?6$KdR&B_3!az4H?RUFM91m*B+aNUqd?!i3ot zU4kcXY*JETglR$%&elq&K}UF4D3u^+S~1gM2C!Qx>6iL$Jy$mvP6-a(nW3=ne3B7N z#FVI_B*-&uow-X?i~tn3e#rAXRc<6F4c0t%&5=(a6j+T_nNHTr+%=epkBwcC_2@5n zTlfm_vEc)4e%GXgTpvWa9j!T?^E)lgpDjQZn4dniU3pB2yQK7$;|cw`_3`7?{>U2V zErugWe)nRwrt=RTUjeW=zFa<`1kPbpNF4pO94x3e|33`h-$&m|R=+QiM-=;Yz5VbW zaQcWxV-US2LIw+v+oLr6+|Omec}vIeoO@x=z5z8}ZVwYpKyQW~E&AhQ##CkbE|Xi= zw8VvDY3Xf2-KcoXV?B=g_<|U|CzF>_ZqnO2*xS!~-&{`Zt|+3Y`yqxp3?c*{9ox7Z z4@uQ?PWWD3ZkJ*)C@AlxxXf>E2{qi@A|1z65P*(48EiSp0c>=*RmJ>5PE$!8h1*BLccwl|K-@FfX_q%B5XfPiqFQ6E23#SXEECQ+uDke~q?Wg3-DR^#EFfTU9tc zyv)zt`#DQRUAIQ_?PgRuxCUO1TxJiaJEpF5m(>kFA*dEKYl}O6Q?kdU>#?+T3SU^z zWv>MuGNU7XLUkey_};*}@PEo1Q*{wc3UP#o_-*~O$vsTfzv8++`<)teeRk9-3S?N( zU%)XXOJ_=TrCkZjQ(C>73jB)5@K)igoeD9)HY{zYb5|9r1+~<8x5sGHGnX$-kK$6B=sFcX4rH3|L?}A!7qVVvuj##DF!`wE%#rIBES%f*J^AFoSxP>6#2nH(^ zFmtx`3LoquY;F!{_L|U7+TaNuzXqTA>tdaFU#!N5xdz={uM2|#8K4OfG~Y!Nuw}Y< zQp+Y8ah4gv_PO`#3p!5gS$-8QtwKssM#A8a*{QXs(-xEo(gN2va0pDnqa5A|7~W7e zs)2&Ujf}oUDUvmkw{Y|DY`RJ5dYz2Dqx%1D{k!x;pIoqfasr2xPs&rS0JUz@T$GCD z0{O{pFW11z)-CrpMGqI0jZO$ZaUMPUPS^WQ?8!6Vz1fzP$;_XIx4_k#( zc14TA=od%ll~kv3jrV5F<5q(6g40KPD1#1hG&O}TYMy|_mP1cZ&slR+I98)d^H@Dq z<(v(m$C~zTUCl|dEYXEJMcePy}^%(<7tS-a|ARCU%`rDs!NASh!>>?vykK z(?d>)?b4ES+XoCgd6A=XQ0?uF<=ZQ`lB7WovdycduCmr?dZ9gK9U<`sKY#}}%K1?z z+-jKDC>%q5&HlSpcgp?Ch7Q2l=$hdvtJPKxAP?)9%Y|eiQ#+KYqhRKz4gj=_w+B%l z#0SOwy;6hW07BnIQAU;pG=r;Xq&o8-*iBzE+Kfh}fn?qXg{uue`dA9Pb(c~hldeOR zLc_;d=AnsUnLc&;@Y6K}Bg(Qi$K|gAU?bTl)HFhx%9*bACQQCPk-LVKZc2<$ftN(v zR`lsuqt|{5mMHclIkqG@X|VOkH2sX)$il4aau~EbD?%vo(3`L-zk`)-s}E_$*Jv&$ z)gnh8zPCOs#~vol93hzS&fwRLG~xx(ag~?eBRSA_cHZU^P4}bMvlYKB!Njg7S(J_DMUAR5vV^U@X(i)yQ zl{@ZZxehCO<;KaAX|r^J6CMc6k@v@=4lj1P<=t77L?t0*&BPjV#&`K{mqz`5A7Omh zycQLjhcn!el;8zDd(`@H7*uxGaxKHoSa^SaMD{I})^8Vnp49z>I4 z=1eu`fX{OASt@{F=hDBD-h_?z`<;5rIf47y*i%4~U^zqarU}*K3&1l6R|g0-8HFWv zEW|nqD*^JnR&uLctMvJj1pfwrKnGjW=dRI+I+9vggoYY^~~(Nxh0io7L1{3K)}d z=b^dz6+{h!`Zk`6N%l&eesHMIm)Ep_dk=OwDuH(^V+Ztx*Ma$c>PVRg5_%A(OunI^ z--`H8kD~aSqztj3X_=tax=PYAcA`sA^thD64tYYhON13s@mB!uKaZ{#c~$0Q4dBE( zkn=X`N0lxU{?@rJvZh!VNNVt3mC`2tASbai(6#Kx;z=GZVwv4Aotr+8C*+J$v&FJ`*GcqEeZ(aO^G&N)j(WIHnA zjy>DMDJ@jUyOOc3phqBi01X)$F6n3G`%G&cD}oYKn$<>*z4O_IpAbKL_KX{89vBfJ zbotb=n>p7sSD`@RH9j((|5pv?s|$Epi$wR(i_(!_PY^#KyN;(Bq4NDkpQQ5fcP!{b z<9xFHN&%HFrh}|&f;xJx?+pG1t*(Vg>E%#32mx3jmNn~r*>SS7E9-k(>V%BSbVMW5 z$~RrmAx?mdYq+V~nXMfWg7;Ayw1-Y5_3c9E?KV=db*eTf@#-9p&$$~ToKcE&>c=rJ zZaS7Jb7x8Q;V9BoZ4&VQk#7JvyIGSKh=M4M=zS@4YM@ycDlN3sxaT++ z6;9Sm_R{q`Cf^%8ABOa@LO`749~T!#rW&&GF{*0|s?I3-*rPv@+P(E7wvN5$?rzLF zVdv3*ya4Pm9!OLB4tTC{J=sLLD~Py0QryuHJnH2-^|)RNVGqAvrbc{vu6VAC-WC=X z2K78#=`sdNNoL@i9rU%qIF3>M*jEjFkWP5>yrp&Z*uo2)hU?Q)1(KVLBNj))Av{7G za>%~U%zn7TXr8iUpGqydNaG0`ir0osvNOg{HhchnyaV}C!0}KUg@~9GkyUfekk=+9 z`LxUme|Xffm-DUfB(H14Kc(?s+grp2sCsDn<6?rTt@dFB1>Kkkr1}l-P7M;#)gfzb zB%YFH$`7pw;VmR1BePHQI-QCP)pc@I{Q%&u=g6jS%Is@@Ep=@>iTd4h@;Gji7Nq2v zcg}z9&A5Ie=41r_(x=FkSl;OIr;g(qCJdG;d+wdch)IFe-_>sBBvDWr71BL5;xtTI zXdVjYa~l1Isl7rGhB*Nkunq)BMo^_hj~%Y|_ygP6bOR{Qp>%@ZmfIAsT_@iM1#st9 zLK$isiKqZWOgDfE@X`_)z7@w4I@hlNG@iJNn2m6`8!8~ore=Xa>LOBrGzjR=z+xO{ z06ch=0ZbW`wk=3;PHXcm6n6)ujx97})sf!mY6&8s!sY-zVy~9ETS0 z5lfB207R!fyy95U8=snJ(dY1N!{y_zYwTa$2M|S9r%9`GU|z_4)S@0xkNyu0JuH*H z3-cV=>;<{80oPw>o6vR$%R)T@#>D0}P51R&3hC~WuFq9G^`_ zeF4=eOO7W2-%B-@Hq?(TV?$iGQ{Iyd0|qSY{GV|ijv+@4E$1z;lLdemuPe>y?g&g% z8%tH2Q<+c_6rwWpWjv$A3-6785M?jb5B~LHixMwdk=cG&hH3L-$Z0aqsvjo&IJv43 zaP;{~qU-v^aVtMJ5NSnS<dZLJQ)0v)M>``{@ zTEh@*uxtBz#fjYV`_w`&Gh6g`BjD+KerVYE?Dx|Z=@*k?lGVJ$@ zbVS22JnZ(tXh9>?#_!Ay>9?w$do)y!Y6$5Ed`iZ?S9|SLj1NhGX$&(Jq?lSpc^O0q zBWW7FI(1Nxo(~+^sGhsWk^M2B=AO1tyUf~2tpG5v+65npm7e53iqu!qU+s=Vl^J+2 zc)Ei3ITABa7jom|01#ysl*p||lkekb8pvH0B9E~HNU)~hG8c*ya8|mkGcg&yaFx82 zdmz$cw&~!(C<#Ucu59W>Iz|VPx1LLOV`DL^i={**uASYnz}PAvYRF-AF{}kZOEuB0 z;zo8YWo?B>c#AXH_6zwmA;Bf#N1HG4Ha&JR_Bt~u7q%LW`x4V3;7w`gV@00E@ZUqt zz}OHpnsZ*;ZR+7J@hP@Qa6ShLq?zMMnDZb>0w1Ppog~bGlqIeAOL7GpeVJQ499!!&tQd zKDL^yp7sP>(JHSl&pU*v)}@z9qrsRvyz$!h!9pE6^bD!RbSb+nH4onf|0hY%6AU60 zV?SICBCHkO#L@l*8tTM-z$hP;C2N98>$X^=R8}@<>98rgglBh!e{fEf4ryOTPn$$& zcb7#5y5F;vW%IVM9qivTnpiyMrI3wmg@GH~6Z4{}Dk=}1c!;QkW8CBAo{Sp0)EiZiDUZD_eewSVepAK$xRn<&@;Q-Jrw> z|7LAXXJq8Qg zhK-3HdL+1N3|fY;D1Zw5K8KPxKH_Y+l9x45*0VKM7?zl8?^#!jS>zbyP%&`r3%qc8 zqR8;JPncc|XD|iz948~4r&GLL-Qj|#lXnu;6t;mqDQq{r{cNoMJK*@MHwCz%%K5sgVD-{Rdi*|av0sqQ&xq$oZ59|)qvgmgj zh(&@Z>_35xdmpO>V{d?aDreAtwSvi(mX()S6(6Ul8IN;7vsy0 zos7`3B4s>+J9}5V3uZYJ1AgFl%jvbNgi6f*P2AGUi{0goDcIV={iEw!P^F^e(e!c* znCz6l36DqI2w;)N(DHLohGSxZc;z9cI}JZIWSZB#M>D5()Z?+RnfOKCf1NNn+?&a) zfTC88IgiEx=_ zyILTe=rsOk6TrsUltdxBLBGyI71vlrKUnoZ3p?xqcxkpcHAYiOay|j+?V{hto*BBa zpqar5w7AI%2X$lS8mucGta(KdT*=HF^~>xHmv@{3)-|kVUJ0KBclstW*Rv>b-KnC> zJDp`WKwh0c;O|R!T?gXzylab@T<%WZm@)Z;hE{J^8t4pI08DO4_Ji{6N`(~iU%<(C zvTiv2FcB;@nRF&v9p`3hnS@03d@f;D>TkLOjLovGMcNW{E_ z5eCv!IXs@Bi6EJc+_gfX(-rB1VEKeM$wS#oi+Engy}|Nv7T@?70eX^y;XaGpdnFk5 zJJ9IXHmZ0RPe&Bj)|YgHe0Ln_q2BnOfO{k^lrOmW|FHKKUQw^z`|yAuAOZpcDyfu6 z3DVu&2+|-S9nw7@t)z6PpwitTBGM@xqtcCZ&b;?{>N$G8|G>L`Yt3?vD4)1v-+N#C z+Sk6iaaeM<_#F6Hg51eTHHrU=^frDXlz@ z;Z?E;DRJ$v@+v3Xng=e@Q62DR6As9l_D6BA3$6h0I%n?e^}X(l+N+(k=;@a&V=(ls zzM(c~4ouSC@T`cOxj`7J55{*dus|W%cuY&!EWZu{LMRUE=%5)>VioDr(8lQUY@aQCM)&9FIeYz<> z837LJ5LQTafXJXKKY?@3Q+q>!zf&09fZ;BaFMj(E>_MOAs|!S?OY`{jFJde^>=aHE=1w=B-=E;z&UZ#(W7#WCd z9K?HPHXt%CXjTu%A$QV7eKta6?Y|TuZgD2%i{6o~{MK_={#hBr5A#=u>%bG4WxJ$Z zM6L~g1N!K(jmSD|JS`~!I@_%e7?jR52nW--1{XeFBMHab{OoF#{WJlxq!tBbj|eg4 z@ceB#=gZ|o#CW*5Cm2{*YGSwKGK{I$us2OXZoTz|#{Hk`;dHtY&V_2;kKBXy>C-Px^}J4`4O(IriO+YNP=#0v0G7n zhn+-l&tqnSgn$wfY!4&t2W!rU+LG`>g>F-I2!|0+&&dR~sz%8?xL&)qU+f^@ zr7_1+BZf4ysR~rdW#OoQ?4n-}cQAU3sJFXcF~qALV4H@$4P2aa(^bfaw~~P zQF~{#Vd0}lO5;Wc92V52dUov`{eoPCbi|#pe1Un=X7#MHvWEt$wMAYaV`JM$pXTtQ z`3hBhx^o9z{L$NhzdXf%Zu#@u0WDZOYE9FxD>{>To$^83VvgEbM|o3_BdlkzODRiH zShq%&`qDEUldN_jkkNtb*t>#^J9~+1Y@8e%lNu>2!tqN%BnA)p?m-Qi=Plrm`REUl z4z@!<+m)@tk0!#u9rAwy8q)0oUc-_k^{pqLSZ9$hU6RM@Xy5ch$_v&d-p1SFfUHrIfL?=O(aKV~RM&*f}1@3{{aAI^?%9ldq@OzswF z_69f~Y_rHX>geQv$G&lk%Wb~Nqs9glN|oeE^~4|6faV(6D>rUhGn@a)SIT;Ft&9BP z2-fVlyA4(V$!0wL2hl%lGywev1ojn3n|5q4{1z7m1y2`ax1x?fQe7`Oje7z*;8@^i zE3Asq#`c-aq!*d+IX7W-BV5b?h*^!GrFqoKM_m5COVHbF{23+u(xcQ&$zQL&kV?S& zWLI!{$n{W|qh&XU6|MQ~n*$Gz<6@VEert#;L?J_9iozVgR#oZ}wfjf-=iY;Y$AOpW zcRcmx#kX%bu-1KjQRcbo3VPPm8_)-iC-b>jwYyde{RUzSY6Fr_hPMk37qC=Rg6ODg z9y-nYMW!P&vq9J-d;}*o^bdi53@UEZLh}ZvsF##09_dwEX_Pq2O}f5(`7*rbk(Jb!4ER86zoZMBjcZ z4f^M*mQ(<=zoRFigd`IG_e=R}A^BMUZ&-dAp#L{4|C9XrXKeoeI4l*$PdZwl0NQ@R zs9982h;jXv5dfU`&<)ZfNV%7#?%{v-f-WbUOVKGLm8rmxTqPl7?#9QbnK?tq$I}!L zt7;UU2wWbFegGJ{t}7tbNeu`iE-a?ngRlcY53e$_01P}X&tcJ>*l zx}RETi;9YF1114wL4EyH0;Shxc?{?yIqeEME+F$L+DLAH0y%IiVgL*+*f2<9=*?E= zbMC6k1xWl(;Bb_j(XTL-bpX7W8YV`@UOksFjp2%7-P+|Akm8MGjuTb_Osg&+9i!K0A%dGs_dfIl$8~Gk?09N&(&81!QWAj&j+`?W7Z=|K6r+W>#Kf7306ZZc zM?i&>j-n^S#o)G`Hth|@qqJkGUwpkPl*npG;SID}ZIj{OVCF?1u;|lnBy-uO*w*Ic zwEN<|FD%@u`HR!}YI`4$E3m;Jk0wOi;q1|K_<%m_b0+LHH8r*TlCdN-;gjzX+2=5Y z6prqinV7+OQ4Dg^0YH(nXhiZ596B}s$4{Hsg;9R&Du z^Au`tEbIk`rGi&0&uoson5Q}4q&;sQcKXeIGRnl9bskkL+3XUcp~@W`UP1;mD)SP( zoNsrCtak5%nnE2{Kx&iht%^jm&NU;bvrz|5Z=z%`$nq7J=JtaAK9tO{@wPo2(W4Cm z~&crJ4dc=tnLM|df(OIGVO}yGD9ZA0@T=a zDCT4gEFk@cu7J|C;nfKIXLQ0qG z&H6N2gk&HWs?W+ei%bI}f8|4u#niAg=sTC&+s_RnV~(X#Nv-%y2^F|cl)H4t^at0M zfooV_C3-==Ji#J)JVxYkRS{sRrrzOgv6w=^Aj<;3h}4s}rPrZgjwS&^kL=m70e0v1 zoG8{q>DqH7lFvMPjX>dLf^3_dp>*Po_X__oyY^H*(^2Iy;C>0M+TKSgJO)OGlay zHXYXu`)r|YEjZV~vUkj*L&pdPVt9bigOC{tOd^<}z5V?e@1sR8S}@Ku4!7n?#&XTX zPxm(r5uQ~F^XnU{1m{!MKK8xUl{ugUe%KZ``khA3;vU}+o9O^rw4IK}Em?IX2)9>1 z8WP8|PD#DIp`ku=+>NyfegJ<)n@pkfuV`vB_!D>K~|+saSGYPA?tB?f;9JQk~?ij ziPhuP8Z-|n;v$Ox0-o7CZf)O7t5OOUJ4j1}+R-!4#94|oyQCNI1Gjh0t8sNF0#WG( zQgYJ0Rbm@EDRWAEIfuLr-k_Hla&__pnmCskp74c(%7j|X)wQ^lb(no^H!|T9e7?#zICp6{Vln9Ik0l13@iVuNn!_+sg#9)!An#mqUm=dA z_`<3pR=*?c$-zP>I&>3kP)6)Tz-uEBmGm%j+am_@t>LY_D|w2I$d-{gX2qJr3*XTH z^vLr{MK)S@wxAd%Cd6xAooPSus5UsUOcJV$bX$*w$&(jk!zOWUA_vtiL#mGOPY46) zt<#MS3nfGjwO+HNG}##m!V4-`OpSsI9pfr>$sAd`p+6DhBbW84L4Q-|a_b~Yk$X4O zY12!P0;&slttx5lYlSjzz*<{{ral=eKk3-6e52W$*`SR4SK}!9BfX0qljGZ7n{b~_ zVEeqlk9{mi;US%oq&47J#~m@B1`s(=UNe! zw{<6A2~f(0X2h+F!>T@5B!aA(Y3lr_q#7y=zqD@jb%Qp$pMSY@moI8;B`;pE??veJ zUeJ}B`93M$Fm%p@o9ue6dH#9h{i8o_ykV*BwLSM^O1?&!d7rWrCK5FPwIgTSt))g$ zvVG7%!=G_93)n2s;)wIXk2`P8wzftMVbINnVyJNW#HjuE(t?>!HMtv2l7p3vNDF%o zem66bq7pG{jB~LK_iWJjIzQN4Z`%L&5++@P`^8Ck+VeiG~m9f!ApQ=YsD2vk#bmIS}`u-Uy_$y&oMSML^ z8FuDjZ->>DfYkOUxVYXGs`}E_@YH-MJ{&1i1bTP?nG|BYzhMOFaf;MgEo0~s6G74ZKGvRXu2=|N9 zYP*VsADQx-uE<%8sv*@iobY%B%`87`u*xjI+Ax@(Pv?bMe!lsB;m1gm`^p+D3Q?VK ze!MnzoYB~mMSXrisSJL1Vb~L_INqT{U$;4ynilBHW3Vs;z$I&Tgf$T@O%sB)nflKK zJO|FtD@isey!tOz*7UJ=rVNQ2(_8h6NgCY`4E%|ZU1DF`%(ykHt1d!_Uyay4Gi5-2TME0ogx|IPOp_bop{PN&MBA?+!#saG;**`-OS^ge{=bo9%sjfU zb4$@WwpPC)fj11Po?gp38Pom;oEjnrWG+urkZicis;_EiXXmyV%=Jq`{w!Lg9oO%t zp9vtbNG0C_-dbd~cW>w@8}Dkt%Ri&pb4?>ahQc=6pjc3w>M2MVT>OtmG0BaJj*earN*x?Q`VM(+2CyFC2zvb+vKi6z zyAPc<&{4YnXC&hp+lKU1XTA6Pvqh+k&W?9VLzFN?>>K>Z82@#DkQM~b2~DBEngSK@wemry)Qs?!XaBx4pv2oFuMkektwf(ait!KNBEpT$ z!ObmwJTx?P!>1qn*YKQ=F}mzZRwYh_{ve@)^)<%`OIHP_X0o*K`!;qyx3DIlD!H>nDZS?=PG9KA>)W?w2dYlYE=7tsSB^P>2- z>vH9%-{AlIhW;%-?+1F2y8$Ub*L!?R$%0;7C@_Nq-sjde%Us-={QmhbqjjsNlky$I zE;n*V>h>NgauOU*NbAdo_*uTL+~-4KkHC^C6CPsyLa<9|gqqAta6)OS(E6d%6o<24 zvNTv>{Gqn6N(P2BDlZ}N(LD-WqMx(jPeD($Msipi2OFhuGvlH9*MnspU!&QCxNj8K zrXh=5T<%6a9%S%JAJX8_$^pYdN$qWY`lO?nkz+g3 zoMyFE9~C|m@s9PibU7Map7tv%gD)Uk7QXXo8os=9alAFud7yjcyk$(*(3AXX4f&}! zKN@Hj6umi!c7W?|L-;nP@k+%(iQl&owukLW@Z3CcBj>IIO6wGC#ND0Ga?&4Al#IF- zgJh2-|175TQTd8&p0NuEU)R>&nVXxZHZ?Wj!nKY?Ih1bygEb((QN2T2nRn_1Qb>0b z?7x)M)PAIBR347%qz|gy+;$eu?vvseZ@6{%J00uIN+4Fmd15bpDj#i_djXF*o8J&= zP4GY0e;#Tp2Ks-ct7y>^k4Iw6wI1+SF^%v>^LEvx5Whw65O(o)pj}nu-2O^UZ8!Bg zx;tS2C7nMDhRJtDfc+2J=vkGBrRBS+CXY(auD&=9@87`j%{R!0wH(d2G)nloX@I)Z zzuZ4Y+Me%Oxv+gvz&4f|uG=Unbt(Un66cItE(1z>0wzDdo^zK~)U6Rs=L&ppw)j;o z5PkDvqJ=kP2V-TO7U#=^)aFO-_2)x+o*MuZhNEwR(C$ZkCsQ6rL9K5xyfbMpO6q1$SJ8Z z;9gJtRmSA1qSLHC;(5Q{bM_}~_m{7QTKCBzpUS_53Z!2byFQT^l7SNGhoI~HGUz0} zie1pp`^!n^oC^@oxzL&6{J9>Sr?rmnOddSBRm-j{)n^}_g-{ARmbZhF%1jKy8>$Ry zEQWlb=m_0Nx*wU2defp$?XM3hU3<6I6WSR}P|qmBhwh9U;K)0Hf*~*6_C>|0OEtgV zTbssYweK`+CJ*foyK``p2jDm)J;`eUzj;0^_+TUN0{0%hv~jy3x;7DT7cEW_-n>HJ z3`HzEO4IG*)YD??I-WoArpaC6mKXAoP0jU(;-p}3kT`6}K>62OU*YA=q{K!=TrRp( zQZ_S+Y#UjW#75$*VZNPVAV)c6UU@)x2V>opr=Q=@`3mpCQlwwZEdG$~L{RWAC+(lW zE!7-y3Lk4~-oZL6I6vK)*mnZw!rU|tzIuz6+NHOJ!lye6@3{P~q11)-E-b#SF8{dP z-_b{@_Ew#n&>7RpdQV;mUv{~WY2=kB{AhO=6+}iTIqeaJ){eMQXjj7%!va1t1F>!K z4WyF;Be*|V%~)GsK5S`c!-YIq4JgiLwO|%0DXHoC%XIWqt)l)SQui4PX<2=07V{@A z4sM?biDy(+-dm80T0XDG7UE+CWu?2Ivc(L538M!a6N3UE4>OPVjHF75@E>&i=Qkf! z;8e7o{8;EmC(*mXc?w7sL_Ynm2Cyf;LdVuabTkM>|9scmMH#Fu$TQ0 zm4nNEpg7wXivO`c=QuHG7$e*fF#6lvoC%JPUXB;*nxE_xAI07OB@?n5iZn-{hLSlt z_~65cBx=9eD}A@)UgkC5oY6%;O53;Eo$M!a?q0!?Jb-6rm@8p zlB9O>Mw(Say9bPg3gXsnt=g!U{f;C$B7C`r&p1xOA-dN&R%+%`z_^8dDL#sdt{^4V zW-8l|vI(Dh_O*@dk>iQOBhSA3=Iti|Y2tL@+}3;JA^8eI-u`w2BQ0(B*JP3fJSw&T zkv{=ogw?J1NR^d;orW^^j1UZ&d1*P#Hys$c1LxkyN9yk7FPeqNb5A<5uq}>p%9uajW5QnLObe7Fd^F%+uTPY9)fg^ZL9d% zvV=1DUwXR!62~}D^}j0~Tt`GibhNnx$gD$KwXl|~C^d)apR;=|L-mauf~1`9einw8 z_n#Ib>OfWSXI*Nv^VO~qBW^o0gUMp8?{#l>?zkO`-Tu(ho%D=#H#1rYXP~K2NmR&@ zkN$;JRthKWkZbq#Ds_Vm(+@|&N%!o=&Qy$U156L$;C)QS!So^)Q4O~W*uwjgV^-d2?fmr) zf@p!bw+qn||N3K#N)8?S^oW!qM>CH6I_fljR`r{`fvuU(7G>iMO^|AZnc2gma z(rrcP?`owLk@n>hEumD4Xuip;a9?iGrP6G}wNhOQK_cIRSWme_dT}gvI*pP=etvXb z6@Qn?X~pll$X=#apSka>YPWv}1aws2^nebVZh zwQe7^c$&DQcA2@nGZT%%&wBT0YN7WOzv(nM~6&1O54Go{zThaKa z*y6CBABTX`>R80jH z^3zbibzwNSP4w&`)7G`RYHR9e$JWdr5HE??(bS~gTT4htJ*i>3&~4H0$Rp}t6&I?R z&dyo$>)BZ9e5odjQzgEvt#uPOJc(a;ZTTkBnX;sbPON2Xa$gGpC$brr?fkvm%~uK< z8r3-MUb#OG>1CwXH|#xxu?|)iuRlbJ)qvWjht$&7xj!x)4&4>+ZxiJm#`W{PtPq=; z(grmsh~yzAe_x-jRYY*GO1dk69ndw#IP>+SjuI2%XX?>k%F8Jxqn*1nyJD`|XU>Zn zMTxgGR6oiRQ^POTZk&%xh)Ygye3@pcPAAP}H@mKo(lHA;EB8k7ije%%Rr%ivUm7Hk zhq1PKdbP+=7GARh8dC)lMz>7PTz~pGi7i)JmcA{tEBYHyC#O?3ShbB^+@CEUy-7qg!;4{MWvyr;p1b94A1Sb9MmgdzfmG0f_vZs~(k2N1pslsoM+8$LPVx7U_N26RA7`trgE z`#QwwlYT7VU5j|FNah@{#Leg4V<+V4dXH0&~)3=ne;C@OR5X(cJz7tN1) zd(lb|^~&y~5jQg>Kc)hYCp>9vp70C1Nd`saHaJ(zLgE;@zE4*#KT)Qb^*%{CB;}Hw zPIn~bNT4Lgs_4z0!t#04UzROy^D;94_l`Mk`onpJ6z9;}&>6RdL)0|SHY%0Z!xRT&O_P-FV+9B-s~?&-9x zVIn2_r`LPZqwvQ}v3C>W5#-4%A8sXV6W>_y^A6YV=AKhwnJ@}^PRdAoD`Bh7tT({_ z>7$cb*JIi&AKcuLxpFKGX8B*~U!VUVV{o4Z~h zflI;9RaLsTshn&q{ew076w47ie-pFVAaw;abBQzgv)n(fXBURxCm zjia!NiI3NzHKy}GcrvLdn|b2|$OPY5ee)BAl^4=m^QIz%+a+-DRTuN_+h!JAaG4)( zm|w-AFpcE#&PjfLp!%#Qj`|)wnegYUb6z71lpY-t^yVUn=0#XDWl226G4SO5P9f;?@{!$aUC^+cCH(v8N~T-O*;r}tK}jT1x@l?zC3<-x zZ4@aF`H%C_po=_BBuVu15UY!n8EjA5l+JX2h<3$yKU4f9-C5jQ>og?i7WUg_JswR9X>cI7rXUPJQ0hsKLt(P?WJB^q0&#g^tWv3U~zWgU?Kj_pUwMjvtCuDB*k zj?1WbGb{xRZc?kEP6?Oyvpztv;#SoW<#Z2$pU?C4K&G=!{#QQU~#kffT!Z#)PQDh41 zT6lZEV*fy!|CPS?V;c&RZxrTP;+y#KUb&K_HXV}oi(*8$fD#j;3^o&;%oCwwnC%*L z>v)0lUpP824&?Yqok1PD)4ej?ql^Vjro^yk+_Ia7beAqyimxD+lFfuMWxl}Tm2Q!r z!l2=3h?P5!*e5t}GVrJIFg~(?MeEN(IN4B5bsX?Uo}B!&l^NO?9%*0Yl#am;1az-Y zT)&fKA`Ms0+9S1@43)} z_`?;h+FZrCvuYF1Awzg3@*oSO1s?5zBGeFP3c9W9r=SLCC+5empsGj#GIL=u?f-_r zeOGEB{dwnYY`*Fcj;XGEI0F{{_I4TD*oIk~N3|5gqUd_&rE3_i`Y41`KjH!8jOo~meO7T=GNnLPE{PDwf>ikyF%jNu(LUx`nQxRwDm`cE_E)#Eio?X^=o#o5|mT>C@ERD$4u}1YHa$s0g(!KX{neymY#Cnda(Lj zh`H*wc`-%?8!2AnUNrMfQE?nM184V+BXrCA>Ns^xA^D4`^g9UOh7&-G$7R3@GZpg6QOQpu(*+ABFglb4%?!T6SXSj9G>}03t(gL$sp6PnyPAyuAUxQ^SL8Pl3Dt+gpq0eQhq<=f|}AAOdIB>c&T1J z{~RIgwSqD??lFIf@f9LMK=Xk?Er&)2=z)Hmj$Ha6&tE=TH&7(}x!+=htADrKVw6rF zizW7`!!~8yQb~Cvw#G7bnWki`Lv{f_>V~s`neMCcJQ}$i8oL)%GMhgH2eA!q<)DY% z{D6L8cuqia+^2|8wrmqX z)4&C;U8JYAR=Hm5QA+jW+M-~B3|uTCKWF+=NP}|bQ)R!(FLR@#?9$>|H~WPyc0W#o zo)?E8=eW*2>k!a#|7wt^9|ZH450fRTr62@UVr-rbxV7YpLAtLa!3A2uVaD6u0-JW( zVd{K}WKTDsN*AP?E6=p<-z`U|E+_eEofZ&}Yb0a4mpfL#%p^Wp^G>nq!V~SBlF9jX z)Q>y(kT+t|Kn<{S>+NOT99MGK^eT1J3;GxnEu4dPgVpa^NI;2B^Xo(nPZJZ9&uZx_ zU!Ip5wwBl)Z#Uo3J-0UZ9B>-A(z+)W#~UJccYT90tI~XMR0S42@cG#mCv0UOe+8=I zzSxxe%ljbz+Gc~5=hK*WlgTu~;oD1*AgQX8$&Ds5s%tpqhaS;Eh~UwV$0&zHhkgO` zVt)0VQ`3gu&kL0au`hYQDEne1^UUHA=_}HmOWiy7eDbA6kG0IcXA8Efd-C)OxV9SI zLm{q`!?cYzPEBY5Psh3Gw|NQR1zA~snniDOty3pdB_)Hb83y-_O-c)O^Go&WXEmUX zx6@Y~M96UIZy$AizUJ?reV5NAUz&f~rM;d--U&tG-sTdQw%SC5?Fkyn4ahGa|6~H0 zLP(OO_*5NKz#-BEu|vJ*QoalOtbX%@^F6Fw0u}RxJQ5ovrqw6&M8<1_ufm`ybhwAH zpNSFZk+OvdI(8!N*mtJQc8)gIx@FlpCdfM^QKQ<``0Z?M75IO68W}>l+?%aOdaBli z<@f5#A0mOg#hL+ISo zy^?J6M78PULHf1Cn0lKv`ldc6{;I2}RME_64UNkDcoahU5p{Em*l1dVX}cV;yTYGn zC99fzZ+k($s5?0k!qO@#zkJc$*cdMvragUbpBXquc>Q_HWlHzTKI5R@w{Q508m8Ye zb?uuq25iSOU`zsLNd&*`9#7<}uq4VOd1EC7eOb(Ag6B_YzZGd^&fL-7Z4Xe07HPSXwfO-Z-gnQ(uD!p{`rNGBc~mL)ef|rp_l0kBD$=fE1oPZflL$h6S*B)c(E z?|yY{)XnhB4>;5Q%B;C#v&-^sh!?{zITo?(G-8OsY-q4zWz{pkD|~e_c&T^B_iWX~ zQ&EF9!lP;N7$Xi*r>>96l(*yfZSY@#UJjEPbgzYARP=* zgdd2BrnFF0l*Tls$l>%v-ezX#V`Jk(QE*j zHPkd=Xa*3d(+Co~ysDOMME>hYfw3#`6VX9$8%XEpuM^Z(`Fk*zquWkScsx8}Bx=ho z+OH@ZBjhRkDn`^C;}TWX)wkVqHR2K*d0_s}Dz)PeTm21^6vEFfEGz;$uLzku5)%Dw zPQZmnZU9RpQz^_9@Yg|Nn&kJHzeHK#N|s^LMi#+T$~*2iMFhy{l@b!~0SU@yJd z{`k1W)v+-R7Fjf#fmkvD0xW6bdzrAoyMD7Y^)D-mfo$g}C?`_}^)^zNSvINnmaibk z@L!*YgJ@ZhB=fJUxPK0S8rH$VL5*`dPqy(~h!}p!>s7?X69EVqULC7~@As6VGsNGN zXJ;>*k{I4qg~;FrTF^X@aAyO3rB6E07$IWX(u-FpPee$S<%s{wNOfVNa1}PXdwdHb zNkdlJT>zgi!a5}(T1wCRi4FQ+Cb~`xHAYJ!c1RdS2~HXz#WQ}HTB|3ErfbDY_o{!n zp?x`V^C_2ZjKO15TKT8hju0cCP_>s5_waG!Z)Q2%&tskHe){!a@A=1TKU78=@q}Ez zetihYFK{b=6yg5!KSigaNn|m9vuXcUM&OXDK!Dm+)D9u4K+USF3$zIeD$&a|sU}v# z_kx6?w(a2U9Z1ea<9#A+4AbjMMY)AAz;_@TMjk zf_5`Xl?Km`2Bk*3_;Humt{$UiM&+1QwwQeT6_I7ucVLg+3ewUhFs2o$A^Oqcy_y~3 zTsc)Va0x?P!srPoO=l$7D1SQx{5`0nHBnSlRYi4c?Pxu)i2OS?N?V2O1H`2Nde28_ z#!O$916#4aAyW4YR3S8IA3RJ4lu653t=X1HmMg#KieR-%?%JE(8$P0Cv_}3`d>TAC=#mO94|;TuwbHjflLe%}l0^4C?Ee}h(?F!f0?2+kQh(G1Um=w0 zJ1OiWn1Nc5r9#ScAl-?AfBKCe;(KqEJq)F$K_0WjEGZcBEC$D881J*G9A>2r7i0WQ z5_K~TTIRxclZW^+xdQ@B*m8g$<^;C()0EGd?_;*H7)Zb*8Gini>f$&u%(K_qf+nm+XojGceh zAd{5&kubN!Fhf}U-i7eez8bVmc^&MZ`ReX{r@4ZI}|o9^A?0o-HKKDUXT|gr(Zr7i9h;x}?}5%xe8b=AU_?gvP$7I&i>5sl4)GB4T9dbIaq~h zo&QWVIxr$Pmtybyf`f-chF&Qnk2$q;G!qJ^-2$qt7U@2v7AQy?lt6(}$qO^9)ah== zGNQdcmpU=XquUx{Y22>-K}4(fOJ74X#ftpL%QL5dl{Ko63*H4vdP9yJ9|hSrqKEj`h3Bcx(N2@piy)W|8Gcv>l(<29AAbT{Wnc=-By9FAR7iF-Q*2K9K{cX1XiLBbfkt=!DWkP>%WQrI!#C&#D zjSrkJ(O%ARsQy&g2*PWSk39|y{U+rR@kbqe$t03F23{t{#HgB0b-$)5 zZoaHP8XwoyiRCx>X?-07Ma0GRKlhe4Nol4^2B8uZLdEBqHw;*@SGAv~ggMUgkNdXq z=LiaaktP>!<{-HCUZVBh?BIe|*3EjyLeuT0XUn#7x7(m50{0`Y)PEPFGT+IF#kH@k!Pa+6w>iYQp}ZK(<*;RMoGDHs}~C|B=D;zL%-2l_?Sx3 zt#5AveV$BU-Pw1)e+$>~-g4r!{WQ9=-y0_>HZ^bo1^oLg4CA(%sRnKBwm%YHR%kTe zegkQ{ zqr_!}DauNo65xicw!yi1@&YtwF4wKM_}y$m=-Xy9uN?rlr7RRPu1ut{t}e8A`qMhU z0zPqFs&+dI1;oU};Lg6MIp;LXj>WmbVD!<*E~I&zBG2E*?*$t%+^MkBWy)$G*O9nn zV(O*En|>uuVp3@Bee^aS#(Ol|2oHnB(T@{bRE(&PpGx{gZFrdoO@FlQJ)Fxh%%U`- zZEIkBjOzaR@@%Lq)td}1qqWG;>%JZODo7jwO6w|6+upaBk7l|xZ|Ii*4eON#choM& zm2|gfqX!R3{=-9{0v-a@xZCT{0SD<;&^dU9*vQ*y+RWa*bRYC&T-p^zp*51h6eU0n z=H0p#!RW7UkP&9gYEDiaH#cQ@6BBWahyK6?5GlEFpP`~bP7B3o zVTgT1@?!i%q>y$V>0(U_9?#|$9E>DXWDpffy%6ob5{+3) zl@P7XR!#c|OSK_%*YbrEY8wB9dJ^_$?}szC!Ki~PPvs|$~{ zza@pA?xRtad1=$%mW9U-csPhUp>_SejzWD zp3@|tqhVSQu|(wTq(NTH>at4O`+#*?xxh=JmpirkdlPC-49faSnaFFj$=!RH&u6YQ z*&SN%)v70nArnhd9j+LAJ4S#)OhhCVGe39a=q< z67mIwvkxu*y+uH9dc7B6bG0oD%GnAlK)X(0e%lO=b<2Q6wZ`$#G4vHs{Z~iFZY%jR z;sQ7Kqd*2zRFBZeh?X8d(G@p7Wf8MaaARbB=fERH5tUerH!_^7Kd^SKh4*%;Q>(dz zSH&^Y7CCv&UE)6Jj=jto4R#l}dfWIp!TL)6doyQ~L-y&%dfA+M?Z(jD+??tV!D~Hx z*!l#h7Kl~M!;|Kob>S$Cz8;in-vQy0WvZU5P^R}`B<;?EC}$WU>B?np^If4KVjmuU zRJ{Lksa}F$A>sK+=JdLi;G-`jPZ$FN0+>P1F^%~|b{Ht>73(gqQ^J?}Eh3YT^b15Z zTf_`8O&MIsp|m}Zz9Jg$tQt5ROk`3s=)=~p9H`eW*vX61@qND|%#IeKt6 zO0eSid-Qt~##Hb_>SFGVBkrNGJa~|^SE2UaAx?2-{SpluVMum+DDyUtG)6PEP5e$S z75zS^7(7&^Ihq+!TAnz{JO)(MHHtT;dodAXStn8VF~<|*H|$IGMY$i35B1~_9Np^z zvR{GE{I!N}6lAOT;kpN}1!W6)KFu3E>|B*;9PWH0WL?NOL&mgNTJruY4}!S5aF%k< zz3C0D>k@6bnY`<@PD-Bx)ve`J`*Hh*HM-{NYK_*L^k0x$`j=0eGiKf)WU|GlaU(l~ zVnp(Uss}QF@-Y1<;foLfs!W0dO`p>uPba|$#7q>kYexZ1-R}5)x*#-z2;$@E=2lzC zhL7hV+8L;XTqZ_BSaI=_hYe=cEd2Cn1B2%Nd@!$KF2=EPHKVZ}qQ{0$N)^oofM2)h zvatmmf~9O<$B+P;Y!8~nC`U%4UW8c7^2;RzZsaLUr!sB6z9`#;&zAI@V~X53dHzUc zFsv?W|B6gk=A)LtG2!DJ0qhrKciu|dQpZ0`+B)ILeWQG=Rad=lufp-+}ti0JidHpf!~By3Dn&Cg78& zHx!Yz|26yLgC9lshVJE&oFtvJ6c9&>7ayo1A|h4o4TW)V>RmFPlPpa!N{*SCaT#A>vP|jXxMuIx2-Lzh;wf z@BJVe`9x5lXsIKQO%{0)CDF9CLv>o3B4=|c;?v+M)z%j0HT`FuhyAvR0W?@p*exe( zR@~!Z`>S4-9Ar5|rY{V{yR8-;@{I*)PU{)CNG2fEKbJ)cqIg~!km#N_qNepq3ZM*B z)QUb1B4sb1@K3%-Gg+~5e$ls$awHM@nwiybCu3K|u2of>HnE!vlvd~ro5Eui{)BC+LwR$oBG^mKZnC4p$ao#uD0CUz*z&1j~d!xt@&rgZ{4 zN+mHE?(OY)+&5wx(9YWS_OKA~N-=@AetTY6CahY1kys|I*Fj(?1}XmTe#!lq zMO_v;wI{5~E8phAqaaM5U?k*tkqZi%0 z6(x1RQkqI9j`fE8!%?xW4xV$%uwA7NCpJCAz-i2?xxRkw^~->&2xkA6`i-tGyARDo z|I057NBV{CptDjH>~an^rtaDOp2Y6s6{>cYqX$f4Jrd;&4rmfuw0Cbb*=$*Mh#J%l zXPN(;YbYwC=D0N{r^a+cKIdq|4#kg#8ACTbOV-GPQR{8dJJ+sG?im`x=?RJ|H`C>K zWbKQ`q;$#0oFCC&&Z(%qH*S_4%Wm9~Rc>#~A+(Lcd8uDmfpv zOf^&q20D|GsEP_>{p23{Xjh(8E;|cB@4DV@{yH$$F|N{&e>G)4En8C%m_xmA9$mTw^!?ENdMt-qRIQX)H!8n68(#y<{}Ig)PXK`S_7NJ# zSx3F|>dp>*#eoR?mf+>`Hhr`oIbXTTKl!8=huSTew%T0oOlxfqkxp7cgFiNGZoi(b zo!J;83ku?MD{Wq;i$~+>GSJ6T_LHb^Q@tK2WxTmXR4}X}v%))cN0UnLeSl5PS?iZ3 zRgC|%JXH1zB+T!^R#_T862Q9lVgi?1J7`tb@Doo0D@;$qc*gRYAjS`Zof8zUkN))S zuM_gHFWW&S3E;R#s=(XuycDXbi>mmVxnoUm7Lxe=0=1#jKx*R2mk}0XICo-S#4apJ zhnf*3MgGB|?)!~3+j!3ZkFB?kin8sZ#)kov?v!qj27_)yK^p1qZUjUaP>}9a8U&=f zyE~=3yOAE4-#tFh`@Y}$e&1UDGfNn*`?{|Q(?SYF|`s^PRYqc1O&&TJ!WgPeDtc6m5 zOF*hKlbUHc72}pJ#7-ZL@6jhbC&<2}K$RN?DYQ2`(9s!y5-EcnjSa(*VH~IkRe``xB}jHRnHtLnQY(u zzjnkNyjV3`Wfj}xZ*de_$G=j9$F}XFYZ})rDaKG>x)v2~@?pWYrkj^53<;k75~2FR z^McS$#Cf?+Z`Y*GM-0ayNcp`Y(gXKdUN9S*kiL$}8y)fwt|n7FGcRp#MbkSK>7RVG z@tZ|U43n0nVtSGmB&8+-A~lI41k(v!y`II-7)xP7L!Uvoj0_+HJ$MU)lRNvYD)2zASd-==e_!Ea}A~L;E8V~J){k@2WZ^*d6UNaQk%15Rh zL_F0@YEBeJP>}bCFOZldSVyJSN7#Yl571Hf-Jc3m=LsW}wA@*LnrU2?HaAyo8)brP zZdYt3hFbmqHz^x|U&;qi&{N+j>zpqV!SZZbC~@6lVg#UDTXyX==Dd)Q^KLHsu)pvE z5i&yXEevYddX=0hBCzYkx(10W{hr_dxhv-J$At5|vhuJI#99xX0jh{x(^93oF(z7) z1?{5i6>C%6h<4;bO26T#E6@z03+z;?($$`czF2SC+&kY8#X%;Iw8?!C!!&4s#dTk zTP6fZ41QH)Nf<~;cK)qn7>c9*L59xjk8(BlWkNndH&WgwN_GW-sTmAH78eqT3CDBK z(X5}dxUlc~G6@nZ`W8o6KRolQKYw3V7DL;!kx(9bC!F4U68UYf$U&Fk$+9rd+Z{0e zoiT}E&)-1h|GK#4QsKDd>?{d|;Kwj5N};!b*i`!a7oP`j0IjWsB6ak}Sz_7guG&eH zPADGEAcP-1peT-ooi(SS!K})p_m!sj$cdGh1Msb(abl4x3wUde{A-$VUz)nq#!_mr z)R7LdGjUQcZo4GKybdVQE6uo8f79zmN5&K3Vu4L#?(O%RHU1#PO2*Qk#;}q67oi5i z!vFp=yn>g;%vb)bJG5JuJCjhKfti)gK1!p6S8ySztxFQ!yxmPFWZlpIs8!s;MJHYg zf5fsIlozvy6gkKNCEv2}8s_!i5C*6#qzu+Nw%F$`*VEH0i83w3I|_I$(anc`Z9ZaZ zqTG=f-u>f0a|<BgtQXAi*w)YHlsyb7XcadzvdbED3>HQPY!zd^5vha+E!r}lAXi} z2BG0lY`{2spWOaUZ5dwM&b6u#Q6iE*>R?wW)t6!yDeCs6_k;V^d;2dn#(5qeGXL2a zP45gXq~#9=>Aa5sQ2J=^v9b!{n$*LW;1ef|c>dmeOPb+8I7Lmvc>ntCTMDoGnqlSQ zW&xgzqt%TSTf5_#tGcHfqgW|9-a;c^P3-qC@B8qv^3R@QgDAmIxw9+Ygj~Jw*JTfg@Oj>j z>=U0#*wD;{bfLEm1Y2CN5MYYmi1v!s8D?P-lY@@AxM=S?{En|zV(NaKevoSJ~nCk z^MtWVW+W|araWUw*>HRi{X);}@B>$>NM55NXCTWnbm^*SMmpoLvE$ zxDv_Fb)v><5fKslQN1scS0(`sr;@^`y8dD*@_b)PAg{FhohzT{{YsV}qwwFA9A!GUjN<_*XwCa)r}#(mVmDVOqP-H<5B!=N%@<*t^n?jyCfgsUqk z*f<9vn79p%j8)Gwf4lDf5LYZG1wAiGc#X}|lcR9%aNzY0(*oDqcSVmVtyL8at;dmX z_%a>*C2<1O>N4BK#I`7L0y=NokXY%#n_H(+-z>g_$;>tM&)!h#447zjpFO@#_%L=! zSY4Gll2{Z@PAD1HNBasGt}WSQpRC1GK9 z6FL)ZA0H6|vQXXda7>ECA1BpYg!?(CbB``(F;Izl6UwY?{nz0xH|Zo^h(T>jLqyl) zjxqrYXZ{lO0PBHM>jE>tH#{CBtu{ElO;|+E%vokx#5`sz^n`!9?o3VxuYw6Dx=cd+ z+1AHJC`;PY2XU%!q_5=G%tHfyDAFFHa|RLee@0nVJiI7HRcE#4${P9j(l{@)a6|}=?vl#!2fYIevuNE9m>HtYHy-4A!q*uSA z-v+IV0<$u$g~0V&vRw3E+(I`Fe#iC4GVQ@nYH93&+;C@?>IJzt!iI|2Fhx16XnB-v zJC1Ktt(uRaI6=s|RrW_y@XKw!#xB%m2GhYNwYEKj4xYtmN+Bs+|KYnn68z;(agzI) zXk^Ak;!jD5eKBCTyy4ubtD{{oH)QT@rd(bFnH-VzRTg;3oy&ImmE2vFrVnzlOk^tl znI12XIuHfVOx&PNk!kj>HAr$z>jKS#J9d+}PRwKUD^Q9~TDW)@fl%}Kn&=D(<&DX6 zr07Ev6qwm!*0-n=yvM*;V%6qq&F=e^wgecNEURu|F+;8_>#>Vl!Q2l{`H%Yc+!Enttp=De5D;(s+VtOjD~8 zRiZy7#?USO$^F{u3Q5zg-(@Q5?e@a_kz1{!CY1&Q`g}3lkmvzWfPjhA8s+ z!o7*TKrQEaMIz^a`p!0fOdLvOP!vDmTRN979amS8?vD_oc*`gPCg)f5mJQRy9z7KRR@`-~_)}~un)Fdc zLae}sJl(pT3E0QgHot_32rPMX+B9mosVk@FapGebH{!pLdY?X6V;!0LNl5zr;?5t~ zZ91%9pDT&IRq&H2p^@LJ42G+HzlAUM`HJ%i9%g(6>`!B;w!pUYN+oFd>k%sKXac>G+ z?^sg}-u@B2uN3{gau(sfy0QXJqd~p@?YSDok#NM+sQ7y}DKTJAD(~iQkEiu#c%AV4 zTBl4t0Kl7#s%9^Zk8fOBzYI{fGw}@Y@faFj22l#MFM|*fR!rYN`a9|qAbbmetgF@( zzG#Mi$N3Z5L0#QfxB0`K+(IoBPb|d4Jvd8z5&`)$C;J*7Z185ep`k90e($s)##tR4 zw>OAZ%rOTM*gB0ax$?djtWGrWOl?q1HCluC_C_i3^{v>l9vvP1&_vH2I;hwYHZ%BN zHI9u3%zFv;y@b&U`jKG4C(;xZAR>VjH0d}~wP5T0Az0p8cE$Rts?u08^|A`g>h3BS zYv1m^i2i1zrX@8a98gb3Mov+SF_8<+k``jOwkfo_-2dQu`J+%*!@#@Rr=Dcy_c4JW z5W~Mu_N}9-6yy;GEFuJY8K+tS4P7x@F1t_P7ov;rQ}tvNcAsLAXMGKfh?x;oaJHA~ z6FH>JXeEE(XiJ2w56g-Sag=mz@9ZzzyUWk?bNx*r*!~fb)!tw z_;7ShKd`vlHZ=oUf}F#|F68K*??rejtn}0<#?rWJO4e;4&lKX(R~~5%a$zK!omm> z_EPQY7$HjRKz+kZHzPm1Hq(MgT^pGu=5x&(hYkOjZUzQ5EiHwYQ&Z13u`8!*ZR<_y`f4y@^g9Ip4nB9}0!%RMl&_mOh zL7R&nOnm;MzXBJVT-{O~U%eb=v;8eU<;Hl0VmYU-UwrxW^So87#=$mH+foZR5T#mj z5o83TFdc_GD}c(Ei35C-W?a5QgPG4-GBF^kEYz2b1Bo`m=no8wQJVLHe1DWAmAQ-8 zky>Lm-OZGPLoe{^WQPMv-gS3{F<)O}3qUTzLPsvIqDQF=vA^;Kc5_?gZ9-zgQXpDk z_q$=fMt@+Yjr^J23dROJKQBqeJ&Hj6fD|7#0-)yfC)_OFu7|H=Q&j@(R_pZS`d;gXYKt-Kz9MR18RKc zWAJ679ROXS1j%TfQZYd22bK|(8BRGjxd|)U3{W4L*dOJzZVcKbAPNFSgBC8|={81D zV?^J0kc32srG-h)YU=5w3Ptw)58RKEM&j6kZx%Lzxum~z7d8p_Q+iz;j>8qy4_Hh_ zZbH3f7c~F5GLx9V`A$8Y(`E&0&4_4JoA-;f4Rtnsr zXHN;~J-XI>ssjAVJ`l)nt+;oK`B{@q$tgYCq0hF6iCeqqPLaaqz46U_zT!JOZ0GHb zs9RL{N13M%vC^_S>n2t(VaHXmLBK#LgZ)Tr=xBHA`YKrQGcD`L^~f5sa{3f_Z{ zHl4J^@*fC*S{Q!P^GoR-W7c{FcZUKcms52xCm_44=J37t*q@!c%pdjr2c$*!74MiA zBwsqM(o}IDPv&OwPXrx-irHwC#ZaN~yqv9fur<{V-+chN3}mW+aI&eyNG;0P$Qj$Vw7QJ}D4f=?5)p6#G!rwAiR;yP{*kv;%oHm+yhSteJ-=I-cAOTr`B zy^8xqpp1tiP*T!q`4&W~yV?S?@kdj)btYO3)WKd~POw&Qr{Okts0%u3Uy+zbQ8Jjg zO;=468t|okb<4er^1m;Zl#Xi(9W)Iil??s%PtM69PF)PnXlT$z62v*y1ydG?iJ=6= z6qS|XaETfA~2ZEaPBTvmGzoE!MVI`o7N z;C&Vt)~ss8IOT;*9%b8w0E%u?i?~T5W8YiX-?ji2H8b_A=DKT;&Q@ z(}A}k%$9d8xV*IFv)jjg)aEDYdOD{3j4D0Dkq`G`MF~VOR|J&JGq!C?a*K7Sp`)AF z+p&OHa!eBk>~5yjey$bL%7}(>X1waCL)u&mcdB*w*2Z5w|@egH_;|(wDQ?{JbQXxu{`1L=61rvDj$A`801hUa_G(a=tP9v=EVgXfHbJDYZ}w zqoxoYm!}*}LaUEyN_6%}%*vuI$S=vloY9v|E;U($aiGBRWQe_n6WZeaAAi`T^HIW+ z%n4kN^?FpN@OW4BO)Ueut~c0f)> zrW#nH-~7Yppw@?V?if(KJ`(bQ))la_v*%!Y?@IPJs6AOHy?VTEU3p2Go9N%JswkX8 zI&&$|u~nGOI^*x%;^tfiJJEEvmx1xe%n z{=UGbmxF`DAg9k+C_UmUO;@}5s&`ZG9v}}h5Polw^r8vCDr653fRL*hoW52VvC7#W z;K%+Ar2WZ78$ucbWQxzXYOmpw1wPJiZLS5Rg-ddCb4}CXj;N_UCx#c>!>)i^DE)#r zoW2hrE(N1^cb|+0AxlZ29w18LPinCP`HQ!&=Q>*DKQLXkJ}k5zSzB5vc@H?804^U^ zG3~(XJehBBvvC#z(3{BA?Cfl35}6DEgfA zwBG*bj!_LiPnUB_zdJbr_yW#Yn3(yJ8bzAofO&~V#i-oFl}i1fL!|Jp*hS9kh5I#Z z$S%M*)9{Ad)c=WaZ=##C*mrZ-Z@v8vy~=e~Y(73~{fds^!RfXn_i)tTDq251e{_XK z3e73;Qh}Ai?n)Ilp1p|o0)9bG@A_wIKJ23z8E}KQ3E=^Ul6zhUfuhfd$;mF-Uy#|b z1m*Or-hQ=k{RsBn8C?9>`ADW73{E+%-AYsx!iQD1-e^M+si0qP>`s~lOPV(x&ZZYu zsAd=wbyjW#&mtcmEjD21OH^Hui}m9x)?G#mti7Wn_s0bYR1u1w{up}$d}zIqFLvi1 zy=Ezaow$qQ!bV|_!O)M&=3=V`k6Q+tE47ZHE_jBg$q0gnXh6xu6U^47IzwmA-(o#g zfL`(rzIcw}j(R#ItZqvNCa=|S8=kqH_sd>yIzes%As*sO097%u$V5E9h-olS zPhWo-*)7(a{y;j)ioDMKL|;pB`T{rFpouYJ`cyYc0iu$sH9UF@2@$aEH$uUp@*72y8Y! zk3Fbp)qdPde`L{O5(IRz84g5l+OY>dRaDi~Fdk^qIEgP2x11s~og@{V93IA<6a;a7 z*NAbW-Aygx5s7hwXW6E}@R^f~7FxXTk=>{sPXkk%vI{KyfDifC_H#0E#BZ`yAzpa< zqrYMm)nEL2<@u6>MojfU6Ezl?DB!0I9`-d-ox{qT{!GZ7>k%rGpUPGoH?BO`S15c; z5(+GMG@-gr`pk&VDizZUG!-6y5qP}b*`)X&jJ~+R_9yYA;JvfDTN4dO$5#LI4LYrB zz`1wtOGCepnlNj}Ca;#}UXl>75l(bg?h>kEi-^tqoL1>0O_w%X-n=Y#rqk+0dj+4H zgXPU?8m-s-SDWRrI~r4p`YthUS(fi8`&jd5n&C3w#DD4m!KY29*qOG?%+C=&DL#x2 zCP%x^H}Ukck-dB7!TPR{8jJCuGgCYdwn!NCvcxOF7XOCDv_Uv zoaIEW9AtVRb$j-gRwt&8hhw9f9ptWSZNy-9UUZv) zGJCzveuM4%C5?HmqN}{=U=4$P5>s2AWkarC^9pt{uYQiOyN#$M3XD!ZL*>$aj6p?a`+ul8gjVdW&T0DYTbMfJX(0CDek8D-*zN-y1m zyczWZM(F+ISzmd_?~Sv*eVv|Ifp7VXs`qhDibtw1`ED(&(|I0)gHb;u&qvt4HH&KC zaLeC_eih~^Whqrr6`HPM>_t;c)w$zmd=|=~*su^yRE?_UEHY ztFV=%tTolSup37H4)CkJ9B#Zu)ElE+Zk*Mlz<_U5zV}^!`2rqsB1)J-@8MWc-ANFz z$WI=Ss)1LAhJ^jl)6-P$Ol&~JHj8ezG91`MaL+GQ>qC4n<}* z-dA1pm2cbu^ks$K28FRC_Z0f=*0~{W7}Js7CPIC|gn066yNrWZ4!)9x#TY0$;W%gn-&i8;TOn$!N(_BTq;a>^txaKPVr}ddqQ71|H9!QQ;rq zo4$!N`PtVzKl?FXB&Rj>(#l_xz{tq@-I40uTlReKXWnF&JReOKx_jsrHa#T=x_eB> z1>sXfa{u$-HxBbN@1Mu%^La%E)1AKxnePic?h>t9j%@u-Dp-z z?-AJ}AvrRL@Z+YGq!w@!%E#cC$>UDQ_a4y9jbMtO%eiYJ_&WLO=cwAT>cLOQT*BUp z{`2(E%kKkh@qS?%SFhuYRCm75+gp=4rySbYCeK?uN7Pb0`fX@vn>?U5&oiGYkTxk2 z?rqAZ@>AF(xyq=@VY2_#hCr+~)1``q?se`Ji`}!MXX(yiagzd8bk#p2Wa1Qw4^(U9 zzH2AW9|I4B!58+!pXv7;YkSVKcA&AykDmeL^zm7fOw;jI^Rdyl?R2nQCd;EU&hHv!F=&S{+{Ppk_x1=H%cW=J%<` zn_q&!9y}=D$)OYeH&~|T0@}BP;T7Qtd%vSse+=2S1&%HzxJv_}<3V^kNtp;qvG}aI z7oIG}?GdT*pe>R8&vco^ylN_&J`$12JCyhh78p&sK)tMCwu`yzqPB#eKn625K5 zbRcB0AG{N$?yf=r;eYtb`TU!ttMeC0fi5h%jokle0q_&x5P+Tl0t6WNFcd+@^aGB_B)u$=6`MS*SuUuxSP+%4aLNZuen^6=(AO7j8q_#o* z`7PrurcQJuRz0FpWMxToCl(8D!#cAf6RBWXbQat+FJdfWj zeAY=>>(bI2^a%e`8I6sNTeIW%cYY(1e|j>rU%YrRk!R%R%csZH1dg zyTLaz@~^26v3>aeT_My+^y*bz{ltoS`Q?@3D1W0o>U&fmu;_6=nkQVTcd>v1VdxiK zrh7$x^Ewnbmo-Yt6-WvUkR)~MXo|`g7Ov_8tJ)341P6?KJ8YL&I;@PPeKkF^OcQ+D z)fkeve{6?4<+AG|^a71UMFs?5%6S7GiP!4W`;9VBs#6J1pKv*@;REw@j7l!**>ee% zD;+z-U)oGgGCV1}K@KjJDpNM74U>Gd%{q9cKbQa$)ZXz?+x^R40UkZ!2?CbecEjbg zjvHw)Gl&mibAp8L4eH<0R}?Oof`X#IYq*}neF#PA1sMDr&fqsRRZf3Hrqw)XF7x3a zdq=T%#*1P=pc&}VUKb=$Pty{SYa2^SJ#9S42A#e3cF%`=zBVC+rw}(qhP=ASKOt`(hxGRUNHz%s|%333p2E z!^9H+x?9SmNr`_$7flgqD!xd(8ig3K$sm z4CggFFM$)A4sQBbsPdc1n(1>54aEqr)&=v5Z8Fkp%#YA$37eU1$&g*uo z3hipJYFA3rSqmLgjr?3@@v~7Ony3&QwB4+}ZP#qB(GALVHt*O)(9_<<=T>E#ma8zm zW}i}Z3rW5VeqmHgawO*@UsJz#%~zpO)22MCT2}e8&_?4aG~vKA!~IHXB~{gIW1~fx+zxXd$u>*HnV0qt1S)2d!dudr^idc9W(8da2zkyVw% zzOM){qC5~Cp||M%V)gQtJ{IXW;*3oQUHFDV>xAD^1gHYcD+>dK5uUB0a? z9*f@MR9;{VC6VNfHychG#9m-(rZyc%xW>o-LGNcn@dGRXIzOnX{gi`3P~G8ht+9BI z-(RXeKwkXT2Iu`U2C#=xPm6^@5bMC6EFdzNC{OKGb#;yj$T|fp-Fz-i-egU#tfti}~)1v>rQd4$6kFles-OZs*9zwoT zTTf-a**XGL3`|Uh0CTwKT;e(bmpP86GklZ@i|>gH+jPjMil4p-oae_aV_Z792rKLM z^x&au17k1;HWF|WDud%T3CH#UR$XSnH~V7kTsvpAb#azJ2yv1C$`COE19T_K(deTY zCer6;*P%9}Y->+HBWyh*H!E=t9}S>I1BG^H<)R^railm6;UYQ5wz;=lQ0&0X%Js}0 zY8Uppg7M#pE7{|<7rFR}&93}5j<|WDI$5e5mQI*0Qe$|5FYOuZ#7v;S-V7UL&T1kV z6+Tz|+Ds$wNg#NENb57hfUhH$JlDsrm_Vx^!eY-cF?=h{5nGHfnqGfu(m@X2TdQa& zVhE1NV=CDBl>`T`KTPg>GFCE9!}K;xUXY+U${My1cj8kMPNftLW=F5NDgQ`RJeb>u z^$eI*WjD=N_bZ1#&EU_BcmsTw3To1gHdUw-7G8Sfr;)3P!qSu9R9@>=dNrB4qzFb0 z_m|#77eeyyZ`5MkQQsvo&ke@2XUuyfJ(i51*@SJ&O>Z3AV5_-RB{)bUQKF)vEY`r|N zbyoq=Kz6`8s?2=#c0i-$3t1on-Ii($_+n;y>DbSM`k8XW9jc-x12PeAz+#2+c7#~R zaXLGU{}7uAnv+U6_2+8-lXr6Vv(}mV&WHryd6Il-zP(Dmhzqy_hzAi=C%NtIG|!n0 z(b-L&j{V?AXz~0umu>GlSxCTo=J9@u{)eX|-#>~dhUK_XNuzK4Dt=S@joMPp4G+5nCtZ1FAVH)b>wt z-Xe!y2GF0be#I8p(x)B-q($^nAXO$by1JXu$0zKLU@cgm{ue7yffNEE3Q04o(-l`uy5IzJ&(Kwc_N<8 z0$lflc&md^DjB0wAX*l3>Tq|563doPA035JuMkw5ebar-8 z{*5iHX&cDRc(bi$<_?|Pt-Y=4L^-}#;oiy^D?hU+^$pUU%|J@R9hEz>721E>VIbod zz(>QYck(MYVL-rJhJg*n*^T7}EK5>)L)|dO z8XALX!d@>tpTtJVoB)bT^F-G{+yd~6BZ=S7z4dP4TbZC4nNLq^e*r!@pT&>cnkT6tGdP6BId!#zW)-ii%}x0%k&w*A#F}z2vCCd}ejdp{ zfJ3)jCh#Khd888F7^yVUGv-7lM>pks^TPh$a%gBT1Y6svNXRmmebbF?TbLy<=)Rn7 z>2y#M(@{tAXDP94u2_V348W%Mxi-j&HRp$ZY>Ab-vMojMc3cH9T?0@fg zHfgZ!(F$H@n04-|FGt7Y%~&kKwOi#X41=?o?F3M2qV**f|Ex00=>Xdo=RV7y=6!iT zX))YyK}#sohr&X?fVZ%bB9$cZPz2--0R}x@A@A{v>dJ~IjhiF8iv$C7MQJ}OO4Rjhwl*MK-{s)8{T5!2Ya_(LLH5+wPty(y)Y!N{ z`_W1Bo1Qwv*JClxH2r8=Vos#hD(&OplYhyA=nB*V<6B{oR1;QE%#cVb2D~-TD10qB z6$8R)8UIH%TViyG*_NnVpUcCQ=v>QPlqgRGi%C$qjD%$pA>kp#s;Kl?(~Q=xK|dwJ zrBwHfqe`>6@bPkSbTw8zI5(qnW(XfHrR3V_^$B2HmW=vbRenD~Vh4hBT`~9p`=$BR ztOrSxYiVA1&1sro{*QnSt{cYXL_TV%EEe0B!_SsNdBbXR9A&Fa1K1z%t}M)yx$W3X zvvdj*>tD?eUcPC}Hkl7M`dge%LPo$HAR^d|9X8KdHzwv6BV}K{=B>FB%`e*VBxAawNUOTC$ZU8{>zGc^#JNi_@u0f?+IowE>y~t97fX3W z4(E(Ph**NWAt)aF!bdGTt5CPRJx`0f>`N!&%UX_I*D=p&JUBsfq2i-+5(x^M3Rnp= zv=(f|#$j`C^-1p#d6lOUt);@K?Z744Ic;eOY!2 z5k!Y|pF^^n0bQ2o*AjPcl>WBolG2aW6=WXGlTOz4%vj+-{d_5_TjshZ){N8(iCU{nB2W~5^#be#;7a*( zbyah?+^oZ^JH{jlW?#$a7#fonTROP5o$C&IY5dAsQjw{a|TMEES0zCQ^XP&%JQaq?3}Gixq5jVu)JUqg5ULH4g{i|69k) zk78bRv$CsyS`vO6*qpt&7A59Gh!;eNkma4|h^HDcoLcHMV8Rfj`Jlm@ox9F6udgis zT79z4zNADI>Kuq{4qQXP8Uy-E7XdBGwl=Ye`xs&HytCC~5-R{p+T+|buiOjQ*R0(o zkmY%ekMg`amk@e&$`|c_AI-%l9&tuRDyIo8taT>(zUd#G_O1)32nokPP9Ikh)Q`D3 z*E!(pa)|2`CW&Y_VRB6DTDkFN4j&n*BnjR|6rhR4*nQKq-s4CO9Me_s!0G*g<)N!I z(hu2?>KLeiH(?->fWUDkT@ex=_i+y3=g~uFl!F{^5hms1d#L_|ITVs>$(#~$evu#3 z`A}q32(yu+s0vPe$&gca*P3PU3=2-c1DM#dNR&P|Mn{UUKZ z$9m|&()s68I{K*h*Hh7cR{}QC>iwu+q)EpPuA>vAj^U+)VaHzp&Q~k}KPr(XGH?UKCHkKB zZxRg8?8LTFyP8su%8iHv@7HFpHUjzDPqc_W-q+J%U3VH;;yWzQ{kigH!0>#OC5=-0 z(#4Mvg%(WDkJ}T8!*4&iajfw&)NrwEq|R zK>4XHmQymCfeMeIfM&8ZF&UY_i(sa-F?p!INws$Wo-4=YOtSPSIpTX9AllVgZ{g7R z{3}&*;gIo}U`EkAe)*%;7`r+w&PE-!=R=VXW1IV|C`4y0zJ+TTa>K9Xs3gH)br!u+ z6>C?$`DFL)3|`IGHZ`!p{rpT!MH;+_o}?Xq0{m$g_?2ylbte&Pg;G?W*xy}R46yO? zYl-S->3kzRg(rt)eG$~bVc))PT15If3cXQ|r+z&iwHRZjbtql*fKLQV*-esRJ9{X; z^|U=R5c?3IZfqoq4jbg#R3t5Ct+7ca?*p~YjMb3q~Wpt%{YVIO~W++8tqY8bO+SoXrI4V1N72ZHFsM}ey0488{GR5 z!;?x#HHi@ln}dFKWcwS8w&5{34zkzwx%mFu4quI=67cvcIsB%X*JdXqm>mnueL{BR zbR7x=?}RmXE2I5Bjr=LYe1nhtBf>?Gd7d$vfbImIXK7@hr;!6_+cHFkgborJQNM1I z%AvZ~Gi~)MXbE0O05)wjuxUX$=9X}lK)lWNrpNrYlP>?#l(SB`@zT*{LqV~+24Y|@ zF0xo8L}Ckt1P$3T>;mZ~3m2_|M+|k&B!xxnxIYU^Dc#y`L>TGjO4MqJa}<1jG)y~y z${Ki6zExfg@j>99%_<)-&i)i}(OU7#zvOwvZvZ9n4h2%<;@9Oy|0l2vLaIpc_27TE zOT}`IiYzvrwlB?He$%CFVSDrXS-k;$4k$|Vid#Y8`N_~gf%o~Ar*hcGa_lc!EJlc} z*P-k5`&*m+DcICQ=_z7! z?9g9aV4OaCuodFsJ07gs1$jC;I_rLWj8K=4;UB$ZNjGSFZE%JXW!kP1N1g@?67wlg5{c!NM}y`4p)-l@YGFueEBi*Z zpUl@ZIui1%!DUYNZi&!lH=8y2&o!sb~~1-xZZ_o;o{=bR8peA zUc2;hAz;3THR+SHzY~kC@pVuR-mgeK+&NF)MEUzHvOb{NNn&KA-a@3Wcao zibA>wrl&~>jD!mYs5k=r2_h4BzBp+~!&E(nF1rJuTaPUC=q|ScZ6EXfm?swc3j&NW zE-z${4w~}{3o+kjFgS+Scho3J7?HN8zG>(pT2*Wdj8z`O4mBJS$o97 z0fv+QXLmx8;KD4Qk$lGpd?XjmoG}!1G+T0eD@8%AXZ-wSXfB`6Hp*XGp!VHUzV1)3C48uHx~@>d-m&5%{&TQ{h(E7|h3%>@f#fu? z8m$y16dg4{%+*djP0fnPr-TFR1^bjwCo0}1--woTU@EY@u}AQH!!zA&CN<5_q)3Af zW2lNuO;pi;&Zf?=yMyzlDkf1NG+dHI>rbwf{a2Ra4Ilsl+C9?6C9a}&K^s+t??#yTi{S-LHqaZC>S(Ydzj3keeilp1Mw;eZ| z7sE}ym6$q1w@pd>X%G>@ckdyOZ2g?{f;ub-<)^JhZ4x<6BMX-*%OmJ)55#*XPddSV zId8=%u^5_?a;+xE$Hm#+&Xl-PMhcw=ahx#%)mCY8{h+)1IXlm^5Y1gGpvN4Lli{7; zsBO7q*YocUFKU9-LhftBYQlRs=&8YK#-`(hv$s+fIlj4Z2zazW0FGaTb9#}NPKbcV@wNl0y9?X*WX_q8Xianw zn(r$rz3X6P)Qa+mF9G|Edt6y&;19hO=)HAs@1Sax006vOYM_@l0(P?QC^nxd8MqrL z3e9xMMa|}$b{qsqu5mnT0KDery+O7zwrz6whU(jgS`;B3E&IB)Z=2Pku}4|OwRl`3 zsZJa#4ZZOWTJ_Eh)sK*?5inN-&(X%Hd8Jc+t<^kZ=J6Xj^nlb=pE~kmd-q)$L2;3k zPQ8K5-Zf{w?*yz$A%yHo896y(OAK=-Fi7m&0=;1Wn`HSk;#@(&i+u;i#>z+{VzL9s zH_(@?!n0H>Ife|RI59GY)5FS9ZDL(Vzu`SIQ`1xF*3P%$aEaq6+N%YZ(I#~@1tgr6d5UZ(b)EE)uNwHeTIpE^DWI`H4jSK=vw zWi#}_!>oS4-1&;fejpVfU34EOiI!^P@({r=p89);kqz6JS!V2pVV6a61(d#nVhi@) zFp=JWmc{}W<0Z~nv&KpF-}Xdv{n3HGq%j8VE?ry>KNTj@7pBs8kI z@10gr+eE6E{a%TKu3w$(JX!i}XS^95P?C?S4O`qqFdk<)LJ$J7Vh9OfO0tB+Qb4NEqAC0Yi+*#3PKc}eP~{Xiv;z57=L zptm>oxc;Mq*+{S!dER_1!RH>o@qO2jU$GHt-R7vRHjk!TyJ^xloN6OQh}`XRxqwb< zNv@74KF($;k8V^8Od|rbEwIsI4O~x=^yh%@3wA;b-J8SFb z8^;T9?{b$R?%r9Ti71Re5uk?)H`&hd)ZcC=4|@y=`{GC$Y4rO{yknZvv8(>D*?v95 zJBq=nUG?_=qw1~0s_dTc;lmN6yBnk%K?!M)PU%hwX+eP|{JyV$d!6g%zW1J)wPwv4u=|M2=6CK3^9*1Ox&bI6UNf`ZBVX9LFVC}xC>Kzs z20a#;1WKgcD^n&3f2%cs^d)9lr)mjdRJT~4>($$?2LSdCy}RW zun^;HFr^QI?T2x$i3AT+h>j=0q9v!$b}wY$5g<-b^|eD&B6{8dQSLV-2DF0y*Za*P z-yh!iMC@$81Ha5B-J%9J?J3c4|(9X8A-` zY@{i2`npi8xd=-FtXM=a%Hu>@Hd$0@GfrZHP`LUHxC4uuUjj3F4j>BucnVhRaOe01 z)^v6}y$)XYZ&?y_N@~3Yy?53l3Qdmp-g|;aKWU&WF8pAmV{_AZzhXp*;KNN_u=x6h zc-A%W5NE)9Ij#XF2!>UQ&E^{1vq`o`a08I-h)Ke4jmxViOi@YY5PIda7T<{>JS^JG zDs==ugy~FV@<}0#Zs5K5qhT6e)E->K9$eJcYp&|X@okop236~&*Cih$Rx{XK7+N{jjxsa`qoES7B&KAbkv1;PE3QMqpbmtjUb7I;JOoMr&GZmi-)?1r+ZS&|?`B{D4Jg#=5IEz_X-!1n^D_t_)vVQg>@ z|J?g_S!19Zc}wK84)S9*77vkfd44KfCrR3=fw{Q1-JsC9{(1yUF=wlq7}G*Qh{3)ak$_7_fGSB%+<~9`BwW&X zrrX`$pYYMpLl%Q?0m3jgGh@cn0|qA?hCSiaiFn2=#KITMtR#@mypjA@0=YV~@kQ2= z<|?n2pDwtx-deORx4ntxRL4k4-bRc;=_RvW5=n{?&7uaBGjA|VaYr$QuXcX2_T!JU4c2Q`Ki!JK8$A}yLM#tX7385t&-2{! zN1L0QC#Ib1d9+!r@Mz@fM;Zm%rw;?%6}YV)>*Gj>8>frM1Z=blgA-zF<`RP?Dm4k| zklY;AGBU79C*lZNC&sM{wGnH?;YCw>lcAyzPqmX(4iF|kIF8Zx9BH-d@v}dvyh_wB-AtuPHx;#cdtq_3Fyot@ zN%XVOL{H|_up_D zD+-VCY=ep!QloRKcnx;iRTFLd04HBps)gc`I(}uijv!=Qg?Skz4fzYoS!4vF(z}mf zEC99Bw9uDy-R5@Ubt485W@S_BSM7?g9864QOgAMZJ_hTPh;28VzK~#;%cwbqzdT3d z@4A&Z`NDiM)XMyN*(j!){mJs2p`PaK1ChEHVqlqivZ)}-EAyDkAQfw&%$ws3*c&1q zq_V8-U0iDK+)OM+76l}FW=!KWqiEdq!BRy%``T|R7nDLdpRUnRfyp&KabM1Y#*EE4 zPOCd}NbJgln-gaeMHO6Eas&=lNZ{O3` zfLn+g()wGD$S^|D!bj@gI*1XGdcXx?@z7^O;Ka8UzRG@JWq-G5x}*USb+avZ=#0#9 z6iE+o6bl%7b_8&@-r?_(5)6dJGWgZg%1B}5ZC_rg+t*#>mT?Ty88Ma0n2^pkqZ&t( zG}}pn#3OMyU+>^WFKxms=M^w9U-6=O>NKEKAPjymI|Yo)mMR4Y2oS`f_7+voS>th( z;fS|I{`l;OOdVqzLYrU0iH3z5Psd}J-yhSZSJyHfWL9-m!`8^YYjmHBu@wCxbeLmx z^gG;6+-J3*r#RXI;}{$;yhKNQ|#~Soxp=9N<=7|RBU(? zv{1$wniQa>y{d<{-TM4H+9}SyzQJGs`P-RcM!2O{h%snR-WYX1QaF z(j>_CY&8A|e6=~ah3R}i!X=bC>EV<>G>(1Sj8?s6+T8dBx%kBOywAZ=wR;#`cybJ|U;}1p}D6oEl#Fe{Qx;(97+SOIL%%6IjTbGLAL4H7tT5Wsubo0Js{*$WR zuqQxF+XaQ$e0J<~!2lR|G*`>4?`5x_P$(u**7^QY*0C??aW-?%!Qqp~E=jn4n%8(! z+^_A_P@Tx1RHBN-8AYZ=kv#1YOG>S|4&?gsiBm{ZVofB|lx{8{>9X@XY~08fpLVoz zsJPEgz^oH3<@yA=F;!XMSu>}#!p`Z^1iSMrnCqdy-=KS;scAv&8z_w~RQUJ#`X&8Z zDd^2Di*ZAQ>u-*FQwnT$a~@zJm0S(PnNx}(0Y%KE98x;dFBKE@ZG$W3Jlr;=i<{D` z&;^li1sA_iC7;nOaumV1L-P-sPuXR}-@Uo7-S^tDq4C%6=i&EWd)o=Xx8j>@=<<>* z`nuuA7bEe0F~`MO=yH?{zW4<_sOrH2%Z+e|5*hAp^b3l0+?gRngW8dQUmc@=l>#Ko zKn}n|lfh3<<%^r|u7>W1EUm?DIuCQGGG2$g=WYqtf@@g`>ix*UEAI+T%SXm73bY90sp)2Dt_GK);%3*gy@z;ATa+s{5L zR~0_DezgpL@i-;Om*|RN5;KP-fzaU>i-mB2cie9}HRzWkI&;!3SiLh) zwSU2qc8RjiHV&(+f8~lwvDp2wWQMb_@0BfaP-Bx)YZ4>YEaFU`v+~;IqvV8d(RY|7aUK$?i?>8io(#%9+ zs_^?A7VVg-@yf%mJmrhQZVzUND*ysG zsq5LG82?iTil)%{)f^3g+SbPV;0I#B^O7HI)idAw(pcjjOwY^%K(nhXBKf_BFVe=p ze@ljIJBmVRD!e=|uk&jN)|^0$@RgOsr_ubB#6-<)Z3)Z+xD@lzd<}FvU5DFFM*f+g zm?uxd*o#jwmtX?x8NN-+oe0szYU9Gbf>ediE14T8V`*BlMnzELMOy{Y70&duwXKex zzS;UMTlZ%35n0A1GS4??i+M|JT+Bz+!1c<1sC`2Q9_y$pk!}S09EKQlx5yoh39Hx4 z#Z$L=3l@XYqIUR(}H=*)3HMr)gQj)nNx_TW3GwLxFPmp+&21@>ZRZ9tPWBCPl=st7{M<-v$p3xH zluwYQp^q&d)184|1wqA6u%_0@-=8id&_2YD_jw;2k%4a?4SU*NC#0s5(fFCK0{Pf8 zu;b1}-zfjJ%Un+auK<89zPZ3k%2r6NC342b_a099c_+Qx;sXd zc$WGD5KseZIHC?Rps;%z&};=z8Id;DP5(CW%WZco%G?;u{PRJFD3I?<%`8y*POU&P z8Hn*N5{#I4UzgQoTTo*eK4u^?JTc1CzCgw5qWOtb7>yAxLq~xQa0v4CQ9Lf|s`z#r`lH#yno!%1M>d^HX9{QTJ zH|$z^3N#$ZKaaQj!97S4<+rFWX}+6sBY0_H(J8qBP<+galyUR0`N6^uj5$4fjNlFD z=t+Dn=bcdaNwm0IQ>m;-~ZrnWELkS|o;xB7;K6U~dn>Je{+ zzP_Wr_;~V2`v%j|Yts6VlH)~D&L5VE3-0G#+lAZNw@1qQK9;f+lX|M1=gz8~XKeN| z^XVv%&OYzm%5jB9&vbXlU4FlyL*LZiX8s4%*q|~GZQpK>_4|)D3xcIN)Isl=#4ac! zm-m}tcNk7^Pu!T%@Fn07g4TB!iuXn7$(t4eP|0*_Gk~kY8HT_PmR9{kF!JB4qWbxW+|8y9`g*^1!k-)jg5(92j=DxthBlz+8d$ftGl@H1ardo^$D?zwQR=>{)k00JMKWJ<( zZ^2Xq`X1oSPVW}{M{B(SxJO}IdM{Y>Vws1R!$Xv_VPL5va`^uZ#L zc!jVf5lM)0-P$J`Q=UbMYen8!+MlF&$OgFOk1&p?DZAbJ1Mdj*Y~dOs)QXr}j}I1& zHsJh(?NSThrlTDLpeVCFC<0qKwMdb9eT>W5?I5`L#^+)Y(SKa#YvbO3vAh>66OpJX zrSJTq5G39EY%-Bp#?y`${${dGzDLUo9Zn_-|13C74neeD$Bm*O+Km6`7U?99>1^i|7LpPF7`T*mVYvxoBha2Q#)A zHMoV&%;-^>7^Z(G)dn8x;ZL8N!>(OWz0H~){RSK<@#52Wj-GVn(5Y>!nngpiM@7nF+dcgu|JrNI0>ug;0cd4M_0y5tj6M&%Y2w2Yor|s*D z)+Np&@L2*%H+V5u4~!k@l~Ol!J_e-l&U=jnP4I)Svz>Yf>-8F;1H=~`)@ zy7ZPvRI?5NldH4GO@v1`I;QT7abh{TsC|s!pb=0Ke8vZVF>j3 zzZGX3I{?IqJ63=fP3r#emr#8`%exkORE(MMUn~TYJAMS6!~r*Gpn6g6n(EiX7#)3fu5H#N4t+h^YJc)Rh$=+#ASS zqMnvYaS3Qk=ew-ke@Nv?xF3sSaRDyM&OLR5n7Br;O`ZM~G?j#1e8k4aIh;`Asr(CF zw(@dxkq~In)B=SO5+|i<1by$kBB_s!UvPNEeuJ8B{mx6&>bvy0y-Ge11x_emhHB#t zAA<7yBQ}i`XOz-&M#K3AYp2{2;bm)acN6kALt;|0VjVvLRJ3ydN&^hmeexj0?^uHw zt(G*5IScn&E-M@280wc#ft3rKbBam8fZ(x0EnpTEg(2GWDX}6^>oEtiQb>dR++^6| zLU`k;&DZJUSm}5c>3uqm?by;2KRZNCYVOGbOHgtt?AR!#3+nh>@A76HP#QkqiRY*+ z=5|Ih7RC@4)v2MEYQbC$#KXfwY9ND`gTDkkcd|pvN#MbA*Al}R;d7@p2ONg?YsJnQP&OsMPS+ z(83k8Qs%h36H4Ln^J?Qu417xP)XXCU3Sx|_0_Su6XpMu|4{YMW*&okXS5O>t+o>5Sa;l#@$6KK?j=hw`WFrpE3siEAe#`TbOcERrehN64U(8is7di;ZR`MR}6Xb zA+!!-ig8_M!HAF`n8a~J%Clt)?}^r!wF6K?TCT0=BJ#Wm_-NpoY`|?o!wPG?SyW;( z(dhdb;yRtZP4h;a$Q~A;-Fw!CDN;xoWOrrV!OO3m7?Lz$(H%!up=vkQTz+AZSqUD{ z3K?1&hpCjp(;q-(U{#v|TvxEPNe3Y)#mnT!wd?7qk5|wTnkpDWOJ_|A@7$(&rG=1; z#BT-+E3DqDWDdeNfVNqJgru0+H6mdfVyYEB#{qk+Nn6N^pN79CgtHithd=nezRJt> z_YY`c@yiM&d7>P2h5W-))CaU2oo8E8SyiKC|MZyT$=4EX7wVU&i-;floz+Ri^420Xl>eqV-8cwdA+?r& z&pzb}SReg45x&^_f0R1Z-(_SyXuRnvq0swPVvtq|A!Xbz>sN44jYQaza$HL$MVu*m z{K{(trKz>sh!nFY%4%{*?13V87hfc|B~TbUV>B3kj5 zRA@XCVztY(;pY0BcjTWJz`y)-|1kyNjV~zYw5erv@|HMunBlbVa-w#(zkcz=-Piu! zrGFe9f5ixp!O)+dYl z{ibVia@)%~eL9sizxPC;*vFJy?ZwGBLxljI70mWv})Hg&;{$X8oU@MsGEO zjkwX)<8vg#W{aR(^kfl#OLrQ9P~Vu+b;6|eWrR$FzLtj2pYr_=Qo?~ftXJqugY>wl~Q@MHIsjIlfEoG*R zAo}J}9ntQV;P3b<7uQr{hRwO2BH`Y$F z5kDGT7_LxR`yY?=7r;+HT0nvEvyf2QynvD?LW*G2H_BU4Uz;U7ejGJtVBaBLxw6~X z(*uw}yX$;G6EC}j)g#T`c=BtFLht(j7rb*HiK6Brpb{j#Jtf9ueDbd}{`;;$*#o-k zR@42kfYM2&N5x|6h47yg{1RcD=W<)AbloynDRrY3ae~70*2s$@>sxsctK#ROiVuGU zVHoohnwrkC8`SYhN=inLrE^kq>BqBL(@Vc2)4|z~|BS2OF`Z3)x;BtzGX_>zQUg!4 zcbZBj4N6zQ_dZ@&4}KUYjSLXa2N9AtL9btr@3{ zC=&?OR%J_zQaqsEu{-i!8R%#Pi__~s_EN0lofSUS_)B`=WQbNEsGs;Ol_+Ta$v793 z{>j{y55ZK1L1cuIhX6Tf<||&2A2+AFln7z@Jrb;Wv*{EmclhGFzqoO#6sivtQksP77e7kK!_{LR;NL&C`F9XEtt?AKgsR5$xf zOeusifR%C0bU3H)R^EoL^**BHF|^SsGnG65W^ys!AR%f8K19bs=^~!WbzUEE1(tc{&9`UH7(yp1p1?M` zk{v^kp;bj#h7m8$k&iW!kRuU*0tN*!p+=^mu{jXrX`EXS4U@`X)zkI@*EP%UBFSQ& z@}d;KnD3lnwUP_Ql=2t#+b%~$Bci%TM!WenpUOUI37l;el!VwfU0Pg!P!`TIHpunw z@9V>3lz(~ON982+Txt)m9MdZLkk5#@A%FQkW+TH1n-dX z$alEbQj=PeUG5G2h@V==*T%TrPFMi4*$DuTY0sT9>AtF+Cz@iyVME}hS$x3UN1jvO zaR%0zyyqj}5`hvqjpFaY_kSm5{4uXk?BU`~%)6zTqTM)V9ZGr&N&l zUigS3^FFE%Z8JXQL11(75g;0-g*;BaKg6(Yc>faDB1iGhEhkTbth6sil+v1ii)dCR zq`6T}{xy$*wm12CP-0gKPIQqOVFLin^xF48oDV+<{)uz=T^_IZ0%X$P-x*_r3Vow& zvYpEB98kYkS8xm@Bq>d~f|;eW>&L|7Ui4=j7PFuYgz@Af!OX{=&-;cb$3UlZ{F54G zlfOzY^NHiz{%yeFa{#-Hn&D0@ffvjDzOd_g)xzN`@Wg$@C|?%ys9Cg%U#1#+y>IS~ zhFV+$1s_R^NVxpdlIH(Ki{JO|LUfZa!X#)@4-pUd8ESmReuwXe-iwKM&~{^<(+b%rKX7% za?8=Y2V zh@d8Z?yDRD``YIrA)+E~AdF0xZd6mI_mrc7NA5@a()2xQ{!v%p zE+$%r3y=BbrynAM5Gk2@G{5lm0@(taiU){Gq{<<_cgGnG4B&$GgV$Ag0z@jMxk!9V zVKiGiG2YgH+j;*FBFXugsJ>chTM{l|*wTG7*Dr6}@jc-z0HOKEhJDt}*|fN`Z^aw! z*);<(y`6qbJjN#$?HLqRv_Ii~1vS5arNU>m0I`|57#>LF7nzonM0o6bT-~9unF<=u z;L=iLi_u0)4e~9cU4vQ6$T`n&Z&{bK*VulmA}e2;D*P_$E#!_4=Ay3mzk;ghU4c6; z3usSSX(-ujcgBzT#0IL2W*syeHTxXhp3f6XTo~C*Nha_^BA6D~Kr4Si)8UJ15hgKG zGaI9b!UUD}g&yAb@tr8@Agr!g|Apx+7lC+`9Tuk_#zpp5w?KKb@znT5DQ54juG#3$ z__k02A*lQpMH&dnJUEOENAtB_?L_7%rVu=Lqei55|JqZ@!^Od4?Bx@ZY9jWNm{icZ zA65L2E0sOOu@R5jhV`KGut`%W||HL&|r#7cFEx3kWZYNLt~Dd8~}CfSXwV($l7$ z@4e#21~y)2Z98W#!(8}Ba+Enj4w`j+y&ll1K2DIU$*|QI@pqzs{!OhY!s9=o+CNr; zmJcq7)#bp}Rt>8=+C&#rk~rE;UI<6cY;mInnr+whGomjLL`U|}Nm9P6d%RFb$rvj( z2#swlz*cgLX4Pp>zYF?!HDk0kHIn1|5+CoREag*MiPQ62Eo3)bznd>x>3IGJ^Yx$8 zUqE+*MMpy)w|&1ncA_))5%%05FMj%nvG-iVe7<%zQFQk7tURM+9LvQ;K`S8j$g0!r&zM?Fx|vZd|4B2?Te@O!dyPrOG-iD@H4I;Yyphj1DlBX zsA@6RMc+5Rz}~j%0bI=~5Pfk4y(qQFO#`{C*ChAL-fVsE?Av*RdzlI0^2WSl8#;6~ z{N7Eua;>ATZ@vYKAI%1@SAeEYg!#=KaNq3(LMaZkEOo6cY4{>i#+&o}E-W#$;Wis7 z1e9Qyp^G~ib$mqDPb7)=KTaq3R+vh=8Xw`^m8WTCsfJSrZ)miD#^#^c`$q8I^xG+n z!mT3e+gqxpV{h`9fMV@?tfiFM4ZY3w3o(`DT@dET)vZl4bc>(6JB zX37Azzck`>9OCwMbX;%e^s~Bdr=D&XMnpJ~;ObsPm-v2B^e?pArfq4q(S_aD?E!wI zR}8VN_x%&W%+Rm4waFKVrxj55$)YozZ6MA8q~&n?a71#jiT)BBR)y}&QqiD?LP&&a zt1pQvRUePIUgzdHiW=^$5nyx3I(>42Z$O|Y%-^*B9g9h={fisBNEDg+w{g{T|6tFs zv18U!ikuFg#x~Aw&Y)e4;M)(_{^L=VeyEz965w}F4BBcu=6Lc$W<2x-z@DfDLQb{LR zLT4)_a2NtZBICN1)(ONGw^yf?Arh@xq=YGXqmISAH*?t)4L3XX@|H^h zEUf=Q^OQD_ua15@yjk!jO&iRzXxunQVp4By?^QS=+cn2F2)$PKYDY0wFQ*XK=I660 zGN>QRcUPMKOJo2%D_YQX)inQ%0NbDlyzbp#i1WuEQxl&N7$v$iZ})6DdPiKI__vVU z6fZfPI^o`hilFxow$Y#DDmEe=RP*ynee)Y_W;Li&Xwt9z6c>3-4}(-cCS3AGRW7|kD<>6a&+Ha>hKK^IN*SR4fv zwK0G6*`5bVOZks{7p{Q^K*VnzmX0nnM5cwxXdNYQW{_a=!$B#AQe=+SueiYegr5;@ zYtuf`?%Rp6$^`b82C zVxN?(SVk4ncM0ls#U(B6#0eRK-XSjf;+tRJz84?y+K_OjzU)--&s7qmCV8ptMox$oSYiPQ}1MM;)_p z_C6?NEU;m8GB;tQ^B`(p-W9Wv;ys6&-HvMzeaJu|Wj1vrXN~IWM>+yFb}VHLybV{q z{r#uU%-@-@Q=fGQluEB@!2VTBLsRg^a2=GBGZlZRmnk;5JA22B)y2yx`|&(f8zTQo z8!bHrxq1QC)CRp5_p3xOyV)%5%oBme^VVVmdaJiTYBvBgxq2N@m z(q+m=LECQr4xV%lj?TV5(aoUBHhDAg*r6Co{g(LZNURoHywH<$(JScnPOaFa33nmp z(m^k%;_N9MTi_lyA0(gq1*1d&Wn;%q*EJ?SoFU2s59^uxs+IMSZGfTf;78D27#{=D z2+hkt8wvxuHQ^5jV;Zp%R7W&HwH%)cIBQkE>Cuh){7;tqkH!k)AvnHL%#43V0mmGu zSz6-PZC;TJZu40T7==vdQ8`g4%%z*7nkcsE6c&i7{|GFI_=rZzqd?RX3N@fd%J$5&LB@b{ZtTtfe!xqJh~j6g zqm$fGb3Rd>gm=D~ld;x`$xI4$EGQ;AIK|ME4i=>{9^e}AawZJoWfsezQwf2ecV8z* zr4C_T!C0XG_U+ruPKra5PP*F?GHQh1_l?6@_hFVLpgFcyX+o@~WH$SZ%D?};mmNch zPaEhe#AD!b?~vqBzqErXmB`w6XwiP|9p501>m)*&C&Kv8i$6#9K-m5d9r~wSs6c}w zBH;&^^;^HCqwoj~hfXir*G76gI7Z0MQw<7;I*x|!H-_IhI6mKt4@oXz$T7DLO}9!{ zn2d54E}PIP3V#4#Xt|hr+`ycWtAt;GuP)5;Ygn@-4=e0i#@ICL;;4Qq#UmPMP!xsA z+`2-sYVcZLMV}{7cV?1&NENVp+QAhpqilDj4a(zc8h*YEW_ z%dRV#f_W4yI%9ia}rSVS&$T4laOZjN5>PvI!Hgkg#EQF+t9($ex6 zVW$V%y~n6$2vIZKEC0N|B@3T-u7hzc2Xk>;S)#uJ9(dcCc&Gcm0y9ghDk{b}!!L{t zwSJ~5)?x+umGKD&b#;<7u=_vWKJ@>bSDdYJco=ypLB1!4;e`GZ5Jr>%bYA7YOs1?# zdRQb~TmrA}+OU)qNM=fmL?*#aBz;oCydTOHwb_!=nicoJcBB-;m-`Oak5i)uMqo>L?Own`GOIDbM zSEue}GOPYY{Y{l)Ry6S@cXxIp>#p?)`1fNIhNErb7 zn^bc&`G7i-#|Y4o#^Nx?=BA_F>iID(y1iqWJ=?8-GI9r+8F^~AG@=1@=&S!F5j^Oy zh#wJw#Q(&X??f|Am!)jjOkoZ)+H=^6)7j%PlMkwoj-0Z7kW0YbRvA;HiTf67`3z+f zW`D3zgHL2P*piswC{F{LgnWH#H7#j|5k_dNV@Q_Zw*wJ(QOpk~yzaLC_>5bIW4U*_ zneTI;nFyWMXY8-~2RY~o=+~li1i_5L5jX;#nrR}wO+!n!ch~#Xf-I70nQL(fVmcqMm(7J2rzj?t+h`THOmmAmp2*HQo-8e_C*O$6j=6w0n82S_n zIm!u+{leCV-YGH~74XRtewS_->410jSrBZ|MxyPMeo7bjRTF-xl|ilPx0}QDN3X(D zgTmXz*Yod#K$xtb^5a!}0JSW0WRF@X;;MFy-5@KI{tY`kbPNhs2tZ0FV9$E>Y9za( zfnXRh1DGL&EAilX56qTCtaEG(ifez$^b{Q1j-!)LBAZAh;d9KyEx?GWl|v~xb|DA6 zx98-rH05sT02p|*rd}6Ia}LN9{&@fgC~qEDz3tIfyva+(_{eqY-m6l5@DvU47Fa$}+n*K-r4B`~i{7dzY`EQ6;TN{ zlK~NHry4}uWduEV+0!*}aXH7eICb6-0x8^4IngczSJ0fk2yt zU&A{pIGTB$lj;-^{Zgk>j)K}U73{ZR$X-Yvk8;lb_+a8EkUYH{C$lOjs$w>w6jYM@ z0mib3$~l;M@0(GojAYsg+vWXAzeT`SU(?4DREJ{d&Tlvk`C*6*qsH>}OoR*Y+EEIq4|3z*ipm#k+)!?cvL}Do}ir_FJ)m zdC|Cwd%k5GE{?0L1sQE;axDnfAl_~Gar1akq?u`P^#P(??f#dm z+_!{K>#@bRieG<^4gyxw4!G=gVX%%Ow0ioB>aq6Xgv9ydh~Ep}nb_FyL#&B~y=sn! zBrqqjL`kXnCAL(E6t~Z4-tn~-#vq#z$58MiL{(k^so#q6hx_T)&C!C75~~ldD)^L9 z@`#~f0d`0my^nB>aSFC3pH!XSV{+ zDHkK`eKkQw2z5NZ1JV-CkxW&%o_D(2%iSOs4z}v1-cPC~IYXj$4O}NX#GtPbZ7TU3 zzMFoY`TCWXe?8UN^emWD+We>10-4vrk1;TwLe-ZC>Gk-4>$ewjf|A7VoUM={JljtD z9OHpyPdw8A*l^Ta>A0h+?~K~zZSm`(YjyL;CC0Py(R+t((C4+i*H?lXqdZA_%7n|b zZ$3K*c|zCN6rYf=4Qk5P_-F-OcdnjfZEvR;x2|{g^GQ6zyDoo5TmGq2z*LF`6pbc= z^V?c-dh<6dkIEt?jrCDE_>1}?vO^e4D@+Ze^4HL_C`7n(>8HnBh4X$xy`-m8ZkfN@ zqNyN(^3xg+8m{1IcGI2|lHp387tkUJNPw>#%=;DG;rZ?(|KZCxLFRecWEJh1XSh}u zjG*k>c>dw;0|MJ8m(S!G$;6YMgi!ri`to@LNoc+ zLh;+qX8_BFQ8>>Qg3@YH{>cH}fqu-Vi}EX3e5N_whCFB4Gh4sOdA? za9WN9M5a%Ui`wSk>Ky+y!;uv-yQB-p4> z@at%7`EF!fyL3N`&zk{PoJ?~rfH`m z8G0Y^1*7-9ZJD@ErHp%xEC?6Gj~?sCS9~gYj=3s_1bxHJL1F2;+!d4a%_s0mJiwq_ z*eS;}NREtAPwsP<(|PwtS$}>@MmLft@1~^-wLYRZ5)!cKz6?6Py*?-8v_CqHe475h zMycWgxLBx zaV5A;_g&0GQN*mEKyE0_M^hwqk(xtlt9dJKRwZ&3U3g*=HG(sPs2cw zD)Z9#6GS-$6#hfm$ef4E;ELE>Q}38-kKXrdn|Opw()1&7W#p9J|E^G9o}60>+=QS% zicNkGWEXGZ(A-u5%iJ#sp-x!lxv(vMTuDhc^b)#?A9h7a;9Ei__SxvZFVHPvd};Js zi2CI^h&c3t!x;a%6@O}uC3-(ccZ{RW2{g}^10QL!*Ux9{DnH-zM|N1pT%O4w6%H8v&2G%KNgG`Xa-s84G>>yIq{F|^|5yzPi` zF(wYR{!k4rKKI~?n5MCU&~el6}WmSGvD-1nuL<8ON^VojdT5P#0z|qH*cUC zwZS;80bjouu^2Sp)4~i`ij{XuD72}2{+ZJ0w^dOG`T zol8PPgCSCkUglAzqPNM^@WuTod6<(_;=Ngg2DZ%9Fp3M)ZiFbp&K*k+9Wt`-hie!A zRgt(md&3Pt6Vd%%2fdR8@CnNAJB}a1inm(nez#7I_Gd@_!yW%8S3H1%#(yleLz*}9$?9&;^MO-AGytOrwB?fvYZ1;}v zDlTONNagEdDfVH4Z^s({PopXN8Cx4jNE`vZL9nh^FmZF^^|VQ*A~N^>KLJaWE;47d zKVi>MmApXQL$~fjdb-6huJLW%sT9)gh?{7U53b6E(L26zok<;4b!{r|>os}Uo#fT| z)Rxv~Z_KcD1D$4{MxC$MQ%zch1@bab^)UOw*spzfvOT>C@gTL;^+OoU#Z6v6D(+H) ziw%n$=KrUk`*W0#@26?T?#mzNJB`<6r-*Mq8!Dt#-=8y45WdxTHC;2|XCZ$d=+Dh~MhjOb&Y|5a_8I;+3wVj~Hji|ZGy z5F0wm0iI5|M|7@AsmuNY6c{mFKr1XN>J1a?4#RR+v{n#?^5;sjLxD{MYGOiy&JG8{W0ygo=8Id~(z}+`hcXerQj(Gi`T8xJ8f^{*7aAu1 z>J(~qX(KnlzO7VSi*`vv6M~-bxgRC9p+PVKx$=f?!;wZG*lN5VyagOwZCjDZFhK)- z!;8eAapN}2^1@J*7EE&!9Pwo{OcVJ{;)QkL!4`y;(7||Gq+#!$$2)B-YW3<@=*FuW zQ;;uD7A!FbG{~(YOO&F5x0wO<+E9UKylMU;@A)*=$%rk6HE*?<1RoKLbTLrB$889~ zW7Oz2enuaqssEH;OI@8=cf&g=V_vTc49gG;xK^mCt7Q776uGgzVWb<f@>O!H zy-_K{${-}A)qvv2pvXjs=&07x(aDok{dW9LrQ1YHlmDSv5G`0#JC4OGE_FpZl6e(E z*o{eHi&yC!h>^L=2*=0kg?Rb>SNnW*Yxa8){8d)V@IB{6X10F$?`D`SMaU(vJeXxI4T-L%#l{-@}JAL9G3Wseppl#GqfUH?C- z&N3{@?(O=+FyPQB-7O_84T5xvbax0yN_QhEB}k))AV{ZlcPfp5w19NiySd~4+^-)! zJXEgPbL}(M`mKYj9e))gUhR7|O76Wa#0?8jaz69Mf9u%#@}KY9zn^avH&h*>PA~Y5 zr_)CZNnGe#tCBXR*@)GKxk7#dJctq=zynjo3z(cpvfBzx%uJ@23Pq|I;w!rj-}k?- zJWhCiOvvZR!D^;WqxKtKZ--DneheTgS3tJFZle}HTPKCW8vdYF<>7z@vM^*asr2pl zLx{u;)e=_7ncO+qa+rF(bijyt8~TmpohUO>g|E%{!aMrR#Bn+_8SXLmiH&z(Zxy#v z(DNUk0yiqk&hD~ED~9hlOXPC!V9{s%9OLGSYljh$asARwV#LdF&L~3xT%xE^F+F_# zN3>T}=_8WhC5|`tbaPRk4&2!Pcc=XI)&Wmpa|w2j5JL5;>!b9)9&5ANJp6#NYHp=4 z#SX8>mW`!DuuM>8Q=)B+AV4o`(_+>j`j#$VH=OC5uT^Tu<+ubP=5vilC*}M;<1;_) zM!WNpgx7u{0S3f$)N5;NmJy5ocTM!+gg=-PyAXbPuL@LrIwEe z#jt1_plTvBMBa=uL~=yq$U=B{E$s4uo0ji@@@P3iG+{Yrv?U$Z4TO@PMR(=TRV09T z^(N2$7skh-zh)+X*JGm)sEWvm`X*2#*4V`Kbf!imE@Z^jV5CNB{(IkzUcxFWUG6Uh z9bzE;R(T7-M?hEkkbeMc*HGi4jYd>BR8&z()Tg^*+nmw@aGi)UF*EaT_Q98fV;+OE z6@f7IpVQ*T>2-oqS5-R&5Ky-heeKzN7-L$_tf+EKc3~qC`XPxle?<*G-7$))8m8ia zz<_Rfo|6>gCns%0+ze?%sK`%$EGZcNVwp3ZT_kb5JR4aGf{&z%ZN7hoBa8{%?4%=+ zHWIDk?N_><+f<{!KFg;Lc&_DhhAz38je?Y*K0WlxHQT-_7fcmDf9}1vIobO?-H&qt zjud-W6hXHC91A+pzNs{iqLpUdbD_ccRVH6&lemop?=u82^U1AMG9>qYlC%&*!sPUwiX|X zwJQzRFH_p($x#tjF?tUR9SK|PJv2!@T7iizPua~I8`kfIQ+q*j{l~BS64H{OU9Iw4 z#KQ>`3G5>6`WnlIG!p>qvr=vE{FQ;5Q&V<M^!D*!6$A=aEq1hI19s%*quB^2P7uRdvk?e_K4`XAdd^El}}a z0dm`qmfG3W;|TEjEm3S`f8!pQS zyE90bpcfO6L50FC)(29Wmld~yIy+}bakAir^q&!PXj-4a-?-|!JlM#E$o4CLLBFc> z6qBP^=(+FWG%GE=JxfsoPMzsrlmFX!C5cPm-2*2sKIr?WG9GJu?*nCwDrMm2^vKxN zN%h1LdARs}5o$2)#9sSV9q4p0lv5oEsjcN_wThXnqw6bkfxKPy%O%_AWt? z!qEYKv!wC%a=X7Ag{->ppW68`f}fcEbXi+x;N7;--AjKC739=XjLMnUt39 zoXX`xf?3RJ#KQ7cz_^(;aSxk;iG|n`3Y}4l&tO5ksaK;DNYHxAp1JfeUM=v{y5|A) z5ldK$1(6pMo6>bCm_*maeN<#ELqznhx$*|(*gjZ#3{=@n;NE17U1;H)?{5p_t0-Sh z>(;!ib_&l$j`mlrT)%#);(KBs#Q!p0BWbEhG4Ss}!e2EG)%#X0BVT$83yX1(o>hI@a;*NoU~&v_0-WT` zMLNPTqVAb<5mEunklKBBxW#$ZW`TGMS#Ho|=zs-zBO-x=KHE{e>^Z5?r&UI39#yrJ znTD_lx&oSjk8Sb6zgpfD#(lUfr7l-7?#bs`YJZy*fE*_c()8HG#^J@& zkNI8;j{l#xp1-p82mDT zt@imQ!v6gnI^U-Ds=lhrJHI-&!@GO^Cj6J}&h(}vfG{ONWZquG^?A<+nnt^a4A z7KkYuuz0jRRr-X*J)2vI3faJ!7OonglEyH8^L{c}KEPQ=tyz~~;N=A|*NCEj(Xrxi z!fjusg|-@iwhp`%RYhH;(s3m5ivq)O#reC{A+(Gtl+do2RA~vm19iy0iPLSFKtQ!b+Z*|mbic^n zTl@t1Uh_{?K?5UNcK|bj3Y^@+<=sgdGwTBKU(25Ji3j3!jY(+n&pKs4 zC0&Nfc-oxD!cb(GpxpCX!*MGKhrTQx)r8B{S?lj6ZwjDPMCd62&?jstipIkZ{<8{N zUpbfkK=!i=8sW_64^xezs$PHDiti=mXxI@o0eShtW*=VK=s?k*=7ImI6twZl!g;N| zlb=%B1lWHCsXA)!1aCh}rMEle9Fo4E|$@;t@k=khHS2B*`^;`Lc)=l!JE}dh^ZC;H5SMR~yspjwRPqtBjrU zUKc+HiV)UXku;@L4(dOrJMVm{R6>%0y-?4tTxzgJA8a{qt#NCe&^yl@Mo9#Q0XxEe zQcfia3}g5aA7HYs9IP|xBbu(IZ_9jnc51YSpjL9VDG*J>N%O~c=O<-j!Is6)?MmeC z{hHeTy5Xk}SU^+5)y;v!g>%DDOYe+?-LP?bFzy0qTGy+KXRszM{}|PPkixmkTf+k7 z%$J-WiTul~tgI>r>bi>rYW{Oplf;#vqMs43MOVQ%FOb>XejHgRMVEc8!%Oy(FStuA zCtRa42K;#xm8CRy%P`3o*=R=U~9VbEXay+-)=B1sZwgMu9-Y0Jt5 z6IobLV5w6wu!t;FJ>|!#ac~WEAa^&LDXE0EaMjdl@R9nQ0kE5{-#=c(Pi= zZ{BmD7onK-+AdMjgFjT#YnF&0y*Ga=+<{&3OMBp=xZ{AunJ^hBE@Wh6a6EyxT!^ZL zVNp`t4jA34?P{;mxl!?8{sFhpTR%spL8pm{iH`dj;TFI~e2F1lNO8&HvXCDX0Ir?T z>UFY(lq)b$QLwqYy&1Ybm~2cSC2rntN6USG@?mi#Q|O5Yc+&6Oc4v8`9kbtx^1H(e zI>2A;ZR2{gd4qt690GFV9BbYJYQR@~d9(>X;DzGp$=i00nX{`@D&d^pZufjP<$jh6 ziJWIq5xa4Hl)$6)jnU)o7s;fjN}X?)S7?Bs4_JG?zX+4aRimr>Iw=;ixC~6Wjopz9S07%~U>mBeup}b|>lbQ@>RILLR zvvr3hP+y|en^B2H6rLUQP=+^y3wR0e3kXfF!FF2eH@WBP+vOtfPJ7+m97Y_hMF6M%ee_|3ejQbk7+I3Mkbx6;z=$sOh9_3}+LYrhs+#-;I==l|-< z932$#IvuZRlW-m2I7_&HOjp8s z!#yHA5ti`OnjSOCCvbndo4c47h&M^?qsr5QR*T#~D+&Wu7ES@?5uOFM5W`@zzD$3mEqC`tRSV=0E zoMP(^R9Fn8mgeC%vcV_T;w3SiY8Kq|Hh^5nJsgJ_hw2$$7D8%x(+^EOlv13Y`~2CV z*MU+^h-=Xr6SEXhi`)~Tht$|Pdf@)~5?oH^g;jS3!?o1AFWiDJv=6i=WEjH84YeGx zQG5ILK7WYiI=+4nU($qL=Ec)?>gwq{*(@&wfgPX z?|+~A`N&=?YO3qsv56`a2dZ=eS&FNcwThWOID~lId&8?FvO)8-;nKT!ck7;oCdpxF zi+R3mElhGRMlvq88S@UX|1xP*(hn>}MC$VHu10^pLxU%$xmT3H>E3GlB?zcX0K| zD-a^2ID*7P*bauTUw()sJv6Cl0jQX1m6G=AvHt8V!ue92z{#y0%J$#;fW%BviN(xs zeS+B~F(^HnEzX4z*}7rAf@^K%!=3iv3Pf|87gFNTDXhop$HLqkDmT zemZzVh`>~gh#Z6IAnKI9-+V=m+q|4z8)D^mXk$a#GyiMi=ok{A{A}u1aA>6=Z^GHa?tP;M(}ddi6OGB5B!5GyRL32+yqj zW!?K9_L(ty%XF|bKG=bquT!6Ze@NItmk_-+TDqvog^KNQ>X32Z?mTz_5EM_^t4BEI zVy74)mXYCU)4l6@8I3J|3UJz)_t0xyy-9mJObC!jWCh*Rx5AKI2c1mV4AIfH_d&(O zdaTC8&dzsaIZdejasYt)kTj1t=bVT4JJ&MU6ep@ye25h&sr^`WZzZW|u{n+$tGO%( ziFC`*y78_**Em&7mx#CVu$>Cr0$Kv8z@MxQmXfjD#+>SJNZXER;YCAlrwFs!yS*VP z*PK^5gb!vUJRGQDs@11t!E5(xcmT-|d2p88b;gUf$E|zgeUQHaYgZE}8?Sv?Q?e%_ zf}UE65acT~c}>Ot7{Sa;2MeJIZvZdIlO!j^v31(?+P5NEt?pkIfc^-s`#kBZy0%Ze z!hi7YsWA&3Z?$6MY+2#Uv?`((bZon{XEGmg-SXJhKExdzA$Y;k09ai-^_Ay>%$R-N z*>V$h&=g`a2j-vU)T#XdL`}MCnRL%?bzLr z+S9zS|DzO8|8=vzH?Km*LD}omGVI)^a|1l(jkS{`xkSePPLvgk`DxBBjJ*=yYCwqJ zZ%e>(du77K%N8kT{N*`f@oQ2(!OD$W2GV8A*&iaGxuxdBbfsL(Z#nx{bCnI{emh!_R+OF3k_uu3X0@%nRj&QDpFuW%se7~ zu}pSR4c2l^Fd~8ESSNF%7*0nP>KokxVm?L?QeVGZm}Ei=l$+tf&mrQf6mOKN25tKt zpO#V80@c{1uGv1M<5uG!#32j9%h0`t*SSsJ`?E|D^5{ zFe>6&)I#^L=-_QHx?DF1eM;Q~vTt`*%^4Z#ySB2^NH18e zo5!jPVx8)FXT2Em0QM~*ldt(r+cF9=Cf$_AzwfLSnx@%lznD+KhH-m z^sc&o3+Lb3sYH(I@&UF#QHfDYGjDjQJ8S1wt;vkRUZsg-$?t8vJQq{^B}upqt5+VQ zJT*sX4m_VW!q(_A&i}e2EYwHGmHbZ!>bIYsQ+6vSS)4bZAXmXVqtv`K{4wa%X|Yj9 zkNrmGx6^Q$AYn+>TTq47;%~zGhU_6vSPdPO{jT*uPdVZ%RWLw;o||S_{S4b| zcX||1Ktp%w0Vr>FIq}W8p%M>5I1_+#+LJH8J(6ts`Lc&bUP6!s*ekyUM{N4>$}MfQ z+Nwt}`Dz#kvR3bNlPrRG7Uh(2`)a8wod8A{cusvVihyMHQ zPD@S3PoJ)|_Ahn|zg$4mIP-IL2U>CxC1d;g>BXn#cjFE%y$L7VA=!$e6MAbcF@Lm4 zrQvmSH`51_ed&KxQpn(U-#&hn$Lhtnv^3WVC7zj-QMfJh%Fmzio%kV11yyk;QG{G% z+W8SywkA#lN;I_jS|i?bi`)HE#Hom-DGc_n`P6Ds`3=LOiC((z&DXOwyb`|3X4ApH zH0#h6LNt7yiCCyyn#5D!;^h^56=_vGSp-ZY)+H#L{3Vi9-wvhMhIF~k;^?1VJC>n( zxv|+~j=jeJ_dv)~8A9;|rCAVv*-wq1q+VC$qnXBpQ;GA+Y5D`YJn5cNUD|!?hOWfN zipNVWJQXG#;1sR!9@7+!p3a~eE5K)AMqS!X|U)_!T^oDdl&)i-tEoRqA zpx3HLJA1VfW+TMKrcXriUw zW}4S9)kMvHM>T1@ZUY3qpWh zYa4&#Hs?gBeLDnwrK9|XPWNl36y@~XNyRtw6YaJJT3U?UR>SxVkl2qL_(b?940}e* z)RQH8Ry5Q#p)CSvsQh1C)Ha5FsdnCxULe{%hB3=wL$?OSc7_?ZHgJ@TExM;a%f#&* zP2e_t{;JtevTa}LHU}o)iCZ^)M{#c#6*yI)ogH{fuJ+Y*k~oezMQYuWI2^p3GpSkp zvzHH&I42UN^`c&IS9ts5&;D{`93>pF>c~tRF13a7s76Lbg}&`K4?#9y-6$1OA$a`W z;Tg^KVpP1#9kZy#pJJv&Z4asE7xnCBL90J5qjQU|)se)f&PL0G1??M4Zu-en5+~&T zoreMWEn|H@hK6P|i6Fyq7orPPzLt#6P1hEV|0RyDmU6p^>~v6~?4)LK%Ue^+^x$ZM$z&MNEOA$!R^A-bbXA zMIX#4Q#CVP)rt1^la6*+K`uvf2hW*YuvQ@`gzk(%@&xw^DC!paV&_d+Hvt+wQ#Z)=c?k~eLTf9f5i zz=E5TA76VNWQugxlT2HQpFbXa49U+3gw2xwF)CnLm&wVCLy8Q`TxigQ8sMg8CRnn` z4RQFWW@zv{^$eSJvz}ug84rGookMPq*t|{pUhiX&r|$MPUC}Hf9E+|{wOvt3F(I^Y zt3uemxO_-&Cs#)NlNan+knRZ3^(?WXRFT|X?ds$De9|p5&MG#n#%;d~U9Ylv5hNP! z`eV(obgH_k-S*WZwuIV7`qRBZfj&_oA1`~C%GZsvR>QK&eOpB_ZC1DM&SVe*M=w0?91cy}kv4Iy+bGrl`9Cq7Ix((du zXh<_vNz&u9?4gC&`bRpv;nd2;H^U)q63c>#RWy->C*W3H5Z}zGTu+aWH#T#FEm(Ds z$!Q8jr$NnPiJkU#IVB$%i*tS#wM1P6yo;wMPU>|x64-Nw$?g6eWgwjPB}=L(pa%{B z;G*5Km` z=dUi9_zxL|S8fQME_STMY!_SXAo5S1>=_MID6SG=uYM_8Dm0}om2=Y~4>>g5`b{Rn z$AzL88t4xe2o#!t@#Q3JA(fqpT+}Ux>luB+!nXXtt&Oqtqqj{Lx#a z)?zRcv|g}X`K)!1SD|kkI7C(p`|7{*-^0O>Rv}4&?UVgl=f)r;qSUs3vt}Zom8rgn zo3kdCZNSad-J3B}X?)S5IazEVx9qP>C*p9FtV@cj($`$%Txj>Q)j=_d>{vq{>8Die zlzQKV8RL7Qfq`gxv#_kQ7qUnYqYUiFKhPlrIE?{EuV4JH!}v?2RD~x_=JcdE1cSjP zbZL&@RV^j@wDI+wBT{H4{v!dC3b90V97t-LbMOcw$$NDA!QcaPxvUa0@_*i#2`~@~a z|4{r>*-7%lTMHos0cz*Z=;BawJb!m-J+)2D4T)*m`_L^BOpwR5q0BEiCP z5^%BXE7D+zU$JTCWnp0{VB9VH^E`XxbxyxY+`a^9_;x1 zieyBG2Pt_8!geUe(;LNzn&EEr5IvwLR_=>1L*5_#)Fl%CZ_m&5B7*i?G}@=@ z6Z|xxPvNYx3L`opqueYLJ#x}^jXaY4D_CkO?k5-sS8)&J1D{Vvw7u_#p!UTRX=e$% z);DDB|L&Rp!Oi|+r-p(m;M-kwJ+VGOD#g5JH{g{vippT5OsJ9N2=@SQv%de^e&`P zJJpJ;xUh<5%Qj~ir4T~&8NsIIf8*}}T%t%A_NQ)wFg4@o>78|1DeOBaXv>KsktD6t(5pU1~1wOY8oy}S=#d4h(D3X3e9+g|qTU%m+( zHIUZY5IiGt@d8%!COKm_=c0P&LH@L~4AY(uR(x@1R-C4 zDlL*Qe9VUb<-Dp<&9y)K!5%_ENn0akAw=AnYa2LbxDJ+HI*X(-=9>M7KG&+sDBi{q z+NGcIEHl_f`7J#78|fV|s_rsJj5z28zW%C$W(t3_cC~od?ECphfd-Z+0I*a^fHS3Y z3kFON9@bC@dyM{-{UzZbx`=$b+*z?Vj4QZL_G+RbP1v}}zI|L|Q&KGCBSrK=0LOF# z;6%+C$yQWWx){?xu8QyBb{pk%5tjYRtT-G+*OM|KHfOSl&wwEv?HN}o=BT8S0Kh&! z@_s=CkOy1Mu$AZJyq2G0*M0L%Pq{npo^JB4Z7Sv7JAGqd$I*%Vt68rxPV6XcM0=I8 zSn$`1w=vSIsjU*&P}?POgztW1*&fzU#`Gr+n1Xlr1||xouRHr%_7^OI=*f>WeCdVn zM`jMO_fbeM`{5RSSbJns0E1%BVUC?}sT&yl4eS+&l@&ZN5YHaGx&w$`o{O}zFCZi& z9Y)ZGW^6I5?INe!pr}H^R({TInTqW1;Ff0}DiyH!LLH`xG7+%2EuspwV-loC&poKA z{Voxale6e=!uvHjj5m<-?!0Fm%Wof%n~||+)n&Xat62G~7Rsnc_(;K^fmCo}z5-#N zSBC{iQ^Jvx(Nl<8$ol0I488dfabUDCPof*UXs^7id38qa=RyRa&$o9|2pY&f8kq9jcgoCzO-S=lcE^S$Bi6orVIj?j zqXn>b6vHkZe^-zfuvslW5ZwcN%FLHgG&vgz`y*{5F*mb0V5N}`?r_3tF#R~z@~ zWgWm0x#~%{1gn{n@ zij7(*zD`88HxtF8y%0g$--k`?F~&i#FnsnkTtODc)2OpG`xFsyW{iN?JM3e5gAfsrQ7V1 z@9sQhBc;+r6}EhjypmG&9#)ueJ1-AYh)gUhB8Av+;jV4-_g`669U=#Q-BDkW!cKWN z=Z&u9GWqz!jb9V8YR996riHdVLF|Vho<)YA(H;?Us86P_=|uyddneG62Kh{4*^x{Q zha7h25kmgBh}|L&f(WTO6EaM4LAu7DjSe1I2~tb-^U~T>Pi|wCniY?3gQ=K`T*z;V zVGT58biQ`O+y${|59?&Uq^EyuM)EY(-R0$_6xUkvx=6hVk7_YvHrm8k3SKHEMtM&x z@qt*#>_Z+~=MTOLH8nw>@Z&>{s(NZ!$O!Fj3&yKjZ}_lyxG**>!>TrQnn-A86$lgu_b)ywNi5sqZ}W2bD zG}$f3bwv{e(j_?Fe$-SWVZT~qT$JFA7&{eLue+7IAl-^>wIa${EBNrAyFgQ; zw^(6~G)(U}7=5BbZCl1!DdGVq!e&m#FQdH+>>HJhEhFs5K|o}doZVPHjtUv5#}_qg zBv57~OvmdPtRJsPH~XZ(D{-!kZqj*)J^kLgBkRHTbL%;Sw&{wo@Nms_Y>v5h!>kUc zge|S+!-r`7={v@|Aq`rBQx-zS1EbO+kHD{m;Xs#Fec}Rs_2ot8tBnSiHEou?+DBMZ;3!_CZE8m$_bi+k#k6@EvP;msTy z<{rM0r)LlJ_f{WXy3LW;jXoGtjXtBgU03vI;P{^8>+4CW_BN8^>Gc&yF02oQPt-3? z7>v44se8~EwM7;R?oS~&Fq;>_3^{nk15snaS$E_bh)eL0U&?APTNeBsx;bctbC-QNw&N z$s*D?4TVb+@BwSlVM?-miPb66lCxSIYFdMct$SBg9J2 zv(P>e^8n*^f=d;)i82~(;i_1y7@l|7c#Pt_74nGdv1haw6#cu@m|t?M4z7pq5y2hW z-8AI!ynic4UN@h*-~fZSt6=C|Z0-PPkMCB{efZn{U*QV?T1I89Pw>OWT}krxd48`O9c=LZg0LSUmHc=*+AiD2X5ndu!+ zXN&i`kIT+ZxkwWPK9NTiE=?gIf}_xRVk>gW(dwtu`EU;R{`x^#X^+v(drfc+?nLO7(`kMx%~3uLp91f#U)<0>71 z-PGt`G!J}+m8G!iq%ORGE2`dF+C-i(%OkJbR8R+hN z>i!>zpLCDZy0y8H(-_5jvBj%`@*XDs*W&Q+>P{8f^c1q3&9qq_D)R8*!=|y1HJm{x z#iUOjUF8P}tN0m|;Ow%c1ZyPMaD4iPtwT$sldX~&#N!3!xlA|F(R(aYx-!yXd z12YU0VO_!b(4cVMX;Z&8kf_k?B0Hok42^Iw+-i?mz`bj{H{>qDwX&t6J#Q&n&VOM4 z=DThV0)JF{ytoovMO{^kwwI;VekI`(18vKsl?4N478w)u!xiQ0@k-;If`8c@e~kh{ zo?yPdIBb7DiC%k7Tb~?Zf_rYi5m+_bVhA<@) zPX31zUVx+EfX;eu1oE~Rm*98x!S!Z<5MvA@t9RAaEgDP}K=f?V>@7;(=m#LDir^o6 zuNH6L%Wub2(9Ar4jv80~Qt0jDmiNQ!+mRcGLa}d9*F^<%Ua;xq!lv8(a_q!uT(#U+ zewVILi$Fl$vDrN;w#Jqr_YQ>4Zq@h?pS*bzkni<6S( zKZurQ;-^AW$sZc()k(IYvvF4+Qvxp=F(xg+vfz0917pr4E-mHv%mZrNgSVuir##ar zKELp?%S0edJOGMuYfSky)*%4Ce`{PPzFwr;^jlxRVVA^qq2b5OTl5Wbhw_b!AmOJ! zi9&7)E1Hs+>ac7cE>0+q5+LnV@B8!z<1WQiNEFJ*3f_IRA#lTbLzx#3fY<*0T~zhf z#9=Ub7N0-mHje~49A663|2v77%o2CuHYEbiNbqx_OF|XTV?ur+OICGub)mqXUMh2; zm%8|?JYu02akr8x)L8)dJaJTmxW%zIpO9?=AaWYNZ(MkZt|;s5m+z+QV4ZiAw6t%KN(koipcJ=3FP z&hmpp`DL5C`S18V{ zQ}kYOa&y!AyL*7)MKsW@4AA9u*h!)uv=dKbRMJ7t5x4h8>G?Hy36 za2v2sahxlyJ$ci6?qp_hkcU)ZImBGQirbM=ewHNYho`UUlzUK`_re)2V7f`)-M&}U z%v7U=F#Gk@t0dYV$*K|3anEX=xY5H zZi|^z^t6O{RwwLkRsIELwckTOtJrX@%Rx|H&DvevISj$S-f^C;n7i0w%giMy0bJa^ zDoghiSADOo{B@9Rv$0rZllX1Vd*_D2`eY;nyS$u}i;vcT;OV*lSm?I-Y4MCSUDIyK zeuL3tHtG302|C)+r2cNNIfcTwtmUL0dGLjq>#A0QBkc!;158J&o<9dhSdeHBd2 zx#Tj@V^vBi*2mGUl(MD@{YVW%L~DXlC60{`z?iB&wAv~vHV!H%Ck`+wcX4&;2X2oJ z7}&UUgg`>SQ(iAUxB+aos~f;i4(iYKT5cHs5Zjl0pQ8@!r#JB$u>Bb-US287AyFC4 zVr}*>TG_UPX^F{mFP>dgi%vEFb~Xc%7K@J_Cntn?**vqRSPxzH_ch4yYg82^+t=vb z&JV3XQg~{XIWy%O{!+{wp3~niKhxsx|M|%E4x4%@*lo+9w8{4T5QmXTwSp$`SsY}$o74fFg=aKe?AioP+LFQOuuxsR zHwfW=Ff?)duFDUKNSqynlVt$!@@*<2S0l%GrQV~&Ge<1^H71s zF6!>9w0K;=n<8CLvptg^&2C8Y|BH&+q=nv-TCT!KU+vRUf4X~-eVIh^=oH7w9tfHP zf6^}j{x${%kJWI7ewgsL!+sVQs%y$BXyniQk0ltez=vBa6*18$-Ipkd78;lQX{qrM z{DWgP`~Fnj+!>dvlBhnObbK$96rof3HfC!TC@?`h9q%@woKE_4-+C+e?a0{{FIn)-ceJt_;-07a-}F)&FuLdlhVP zd>_{oEB&<`8qR{Sefq{Mq5*ULkpj33O;S{7S&%OiJ!x8s84OycI>=8?tmz7+LM-Vl z?aDi#Cz?jIx#YUPxw0kjBy2u*clGC0xz?Mv<9qtAy9zvE>&QqCOxnXd)|Z6-WN0fHvc}fhhV7MEAC`v!|!;yG^3Wg@1Xm(4MgESdgc{f-~V8%>oIQWofq3K*yEJyj6kb3#?uUnS(uQ}Dj)V+(li^+)AN`7 zolO&1<>cNa!@pE9n>F&%a*mM=A5GPtumzTaRER%|H-Fr{uriS>QOB{!__5v4f}i4m zQdMBHs)~p1%HR=UhJTj%P=-mE$lHK7d*bIU^;NyCrW-z>A+Dvr#sI{-^cuPpL-;vY z{sRq)B*;(Q*n}xljhPkBgzM_H}kYj;gfxA?+ynX$HvWb!fZ|E3in|FgSBWBxq z;CEAO@00PWg8r!gZ=86U)CyQror_C;7v$@b?g09iJuE}|mrW!--~+tw?)Iw0|9XXT z68J((8X6`;e2PN8dw*>CtDW#Jo!Sm!3`maffkgNDKvVK~hm^(x+)ce;s>JWU4etX2#Juv$|98&*`4XZ-p#U302q!G2S4c^nt95viLH%QDN+=x+ZEvKFpL%({${MlCNX#&8 z)-kkeL~;l)NH%=*x#TT!Bk6=ZH}dlf=?dH&B5@X zVadtyrj|C1cYfcj!u4dZdAXYl=@DkqD7*zp~Bv1oXM(N*E;x~kFB;~hv|CpxuNCQVfMkv7e^!Ns^OgL+fS;#_}$V{ zF7!$G-o9%I=sG*8b^IR+j$%Lh$s)DBrImrb;(uFJxPi_EJ^Ey^-ssIV;_D6f0?m1Bz93 zLAXjluE>3TR0+UZJBjB_DVdpU_ol&%KrA1so|S_T-#;_z=EpdGQ;HcJhU>#=Y_^)E z32Or>sg;%%YsXavOwI-lNrZZ(_!|Qh@Cg@ye1`G^Sk3Ff@6smT5$P2U2={-o7zxDe)jwrcAr zbpZd62@6CJ?RF2OK%GV!0+ea}d8N>JVgWur&EdZHj{wC_dVFzhG6+fJiS(y^e~1kHY& zVAFHh`w03MSB7uyzn=d0`wSL3U$B1m za)_9mMMwI$wu7SjYEHumc6VG;2So19hMsH5UyIHzIb-7ni0G~}(|9F;W1HS~s#w~( zH58|h=}b#2mGq5rpWa$m%@jd$b;gP+Q{V^lKWR7TO>)w)$#Ri$K$~Q>=MSEYmcA2` z&~9aHc)QdX=YNXuoEG@#X8WV)NRrsHp~BL?%;K%$xTAgYqF6~F!F3|!p$M4_R9K8b zC5`oqCVTkWof^n=l5?XA`N{g|a5ZI()B*6jOKWt0>%QN`wpSm<%nvFxGWlHpB2|*L zIh>GYq<{F3Yb1e_7{_Vr)t|j48#k>NaevE?|9O2M6C?D{r(pHE#%LmmXXj3a0aNQH zJ@hU7meTLsMTT~EUAYf`}$(tCI zB+;^J`l3l7R-^pTliDFa(Yb!s=H1ie%e}&VgZpzkEr>z4r@O76hhT&Vg>#IHATq81 zu&}W-3(#InShQ{Od?`&-JsldX)&OrgCTlD39Z+^&yp29sykO=x%dgAcf^s`p=6TUuDGAv2s|xVI`#KbW*izkf80;G8fv$IOE2?A=F8BBYb* z?#NkY0o}F472Y2SMp|qZ7$<5Lj2N$K61SvH)M7^$$`z|iM6RqWN>F@oCbD-WQ;!XK z99y1AwkCye1lwS<#(WqR({{>FbO#wr6%7GKgdR+*=_T8DmVV~&x8a|7I9w_kQoazxOZKa*2y` z&fd@7&-1CTtxcXbLG^0nyapD4ZYlTbeFx1e&9{1g8~_{>39VnaO^9E6vrEWND=Sl- zVZFvGJ(yBxZKM4`f8iwE*WZ|l<}u0vz+EYUqB4sLF2rGBEcSQ7{LjJy<6VnL!bT7T zJtz>gsYY(Lz(Ra2m9aLGCqSq%C9nr7^x+p0P4SrLWoQ_8!~D3sr2~!m6_*FA-gB=A zwxS?eWn7#nueP_G*dU(hg3^SkkH@4!mwmErTI%Cnw8EZ@=H&R+8lBhgKbQTb8_iG% zNz%I|W$$797(y${VI@Ot89ZhgCM6`R&CbE9G@8d;QwgT(l%7W2bai5%XjJp%VDRnn z((Xl^s4oQ42ca)zJ+Iy^F3ZWvuD=QvTG?sv2X z_q8xetT<6vwe@Q${Ni6zYH+HXu6AFi?Pgwe0O8gyfl2#740Xvzv}1Ui5*v2$w4UWm zUD>2n+rW4`HzOnC$IzxF1K@kW)bCMvI~r;KXW$Lq86z_km>QG=@h#;kj0^PvGo7sVL6**p$X3@fupQQc2{_#Odp#>C{rjCbUnN`Bt%>EDd zvc5Z%{Aa1j+d*p@&Byt)KYwHzfHb~hm$CtjW*s+~J4DS}e)1$P9a$hrS@QjMxNKK~g3%^rI zRQ4_UJCikRbPGe@M$mUmt}^Y6`OkMySe=v^CAl#+14X=ivBZ90`F(tjBC zq66i_0ewJ2>mdHhgmDuD3_FHtqxb%gMdAhz^hJg9&2f?3U8?Y8`izDJe@An#54(SF zZWwWJP^eY`XRP|l6~{-^!uoqcntTW`jBu>vphm9K(Htz3V=km+;ujW|Apc8B!$$k| z()(kJnKdCPYEeM?Hn!3NzW}mQb4EFxecV zW%$yE-r&4R8G#;r5elK`2?#<#W3k#cwkr6>Uqni3 z_6}}0?&JGD|3e)AZ^jrbOCC7&0b^v->gsA6Fx9inDX5oX~_m?7?77y<$_Z^4KL z0W8zE=;>>y{c7AI5yxv;f|lXD8FKm)3g;abx6&fS%Luc)$-272FZI_dP!lc%X0PaO zOQ-8(rd!vx7;*jR2P+q~8<-wY^!u%srQLPZqXyF0f!$5q zqd?io*|ZN6YEc}v0rxY(pzW31H-Yags>cZWyWA)nSXyWTp>#`CjJAUTm@ISHn<#{q z0se3Qv!#$m8)#_3M_G%FmQ>s8&=M*RObax=&Hr0+R4v?yVYoNSE7p8Ui2x>4JYuw) zo~oh}w%cw&U5 zuFFEImF9Gm?mN7ufwhTJ>HEiOTSe|NdGm`s07axmJ~GJoBoT>coA0bqI6qblzv~?6 zp9s7Fog^h`|7nGlz~mR;58rnDzV!CuUtDbb?}|h|j~&$fa_o)zw|Dk3?M~CeB}eWz z(v4Vx#{?Cofq}$b7elYpc;0BESPW;0aWL<XDt5G1v&NsaLm~vLWRxL6 zC(cr8KF^=mUXQ4`er@tB%8Gre{&Itzn%(y~^*==%cnJlLM()=CaYk~9z2(3!a9#iM|^*G zosyl+3Kl{g$!)n&(_?0`TUTDm-3C=08>wG&dF5qc@D?_keU?9vG(L4$w!Th;4ctt^ z?)fVe{M`^a8B5~FD5I&$wR=k(N#;9VH#b8$6IETr0hQXjN&q;#>y=_|@=aS8D<}K+cYd85xvF(|qPb~g2zvxG@?#|?SRagMavSO ztaW@7LHe)QB2dgJSJ60=5?;n9_h+WseDAub_8w}QxLB_kO0b!d;!&L+p;&$ShB zU)GN#7`*;_+~6uggCh$a!cIa016$Rz?`VU>LALb@V>JPYNXoLy33-4hd7fx}I@v6M zrXwS%?-sth#Vi6^h;S4n{b>~pw~?*D#!>Uy`(x?XR{@&lIhXz*S7sjR7J@5Wi5s>TD>;S^#$sOW>gYqZ$pW%3eon>V7gjXT3R{>?*?uq9rOG)=tOJ*xK56O zE^?O}@Ae7{nVU8WV@rX1MPv6I=8CVE`aFra??L`H2(!EMSV!mXs=-;~<%eEi^?!46 zasuP{!TVM0S@0?gL;vQRgt}B1*10?VufvrysUW9jWR29TFyq<}ShOYT$4|TrERX+C z6sNC-?OfvRsuaOjLA&XWh%lx7_UvIt{U@saT#m#m*lk$>{cBbLt6sJ%be?60mre^3bk2*LO)&qin7$g6zMmIe)PH|D(?)RaY z-yp1^(j}+It4nB{x;VkC*KHs%+m40~4R`UpS8QiBhj1y!o%cE&({%{h2(N+h=PEKK zH2sLCb@j@{1{0rF5HvCTA}lNsIjhH}nlm1aXuJc}c=3h^V936l`|$+h1%wabKr746 zN7gZ|wwj`bn0U4=dTCRN!`U8ma%|B5|IH@RkiiNA)<7w5OEla~pnL-BKcH%(o31tXqT8^ff zsT0%$CdX79+X)2QHZ?W%A`#x)klTxFwFmTo9p41(cT_+h zEV#N}N}>ggNP~1aVktqig(UnAS34!u>GWyB3?g?U;O6Wa*^Hj+L-mz(hf;S)YXf0O zbiBrlhwpVrKrG{RMNNtDm-zHOtOFkpYs!DT0OkoDDBl;CPn<$I9gD@9jX9b zDp7(gZpRzi6iOcAOre0Gf09!}P~lEO%=0WP**L2YdZhXM2{KyRm>ON6wfys@5Lht7 z;i0yMr;AisjeyucQphG#CkK0<_)sh*l^D+mN5H6(v2byxO*Zf0GNOg-!{bPUQ#`HP zse+J8YiM_V;)Dq#I8y!DYeh~J4Qq*4HgH3k2N<*fzQsM?Af15?=H3~e!)KFsuzm98 zWvF4$@asQ%vkuLFOaI@kd6^6caj7;y`|KU}2%Ga3P!rCq_h-_O511bU6Qo)PobpGh zZ9-fes`t9ka*O|m1ZE7f|E89MJy>>ab==$6PsS8i?nd5P;$A(al!y<x#(v_&Q zVzbx97TdJUzQV{qg#)~(ba-L4^ed0+?~sK<_cntA@JCu*?X46wBEquNnj(^zEKy`vTgjRrh0KM z;OEiWEsL_D?ITA66ccT;SzlJ_!Edy#rg?2fQHY3xna!mCX^6mW2kyp7I4JHQKJ3Xl zvKfLn3z?6>r%M42OqqZfC=391M_`RQ0Tef!CHfV^Sa0x@RG2gq+<$$+LY45u_5a3H zP~iuZ$FN_g#sK6R!JC)>+0@HxL8p@)x8qCh6_Y$)F_*kaQI6I}_TBb|bEM|gNu_-D zOiK7Hy(*v1p6$-JOr8K!J(`Snh9)Mwlrb{=*^9Q-%cmGnQChu>yy?+VMsP)`X#`~b z1x2&){39Ikw(hmg2ux?QtJ7T(92^`rAtCLVM&mc8-iA|iIm%+L3p@V6=?Kq6CE($M z2NnVB>wdsN#@k{eo1}5gS0h*2Vep>SW@+ifYC-Eqq&g%XL^RN36#W^m2)@gGkPY9R zG!L1NuM`U3cGY|GWZH8%a2R+pG%bbW_)9a?4_$>$@(T!zEXjm#JC_;O^sWIP`~{YU zFzfN%J*>bLn#BV#QPG*hVgevXSi4VQO8WioPCG}LP!gKAubQ+mkE-(kA}7uE@#LIU z?9JUt!&UvL9qQ|$14wc2V-~zItNgmk_6pG60aqnHf%bE&)puhem~>%xajL72+RP@a ze%LB&H;p)juQ@-r3YYNeqBOi?{VI4FznSMF>VJ39-_P6~3n$Yzv5cpenR$O-G^#Q0 zX2&jO63t6g^shFBo|Q;+X?`N}MHrnjZ`>&7c?&RV!h^ZDkeY$hP40lvuOc}wZ?TCl zRjL@R@oXWW44Ax%w0NS!Muui)W=`i1s!84q*h)&}`mIs9G@KDI)neU`kOpE5x>Ko> zoV$^|0A^?weiJyWlcFF^TeS(|$o&8pO>Lo?KbfE$}9pT6fABF|!h)2M0TZ zGx=zn<^WE>1Gd*a%juw^yU~pLE5*t`En%0UcsIdNQ{c7vy!Kz$d%;1h(dIp*NKO1v z`zV-3ERHX3Sq2Yz`Kwy665ErU9tPbT5M(<5Mjwld>>D4v+Tp+EvO&=H=rdZqB`8lo zL=&S$nm{FY17p%%MZ?BLV5mgD8YlPy8)|5jl|@PkI#Ku4uS5#fW+ZyI(B+QK=0urO z18>F&BnYdhwqn}ijkVng#w};6MiXk?Ij`+aZZ49l(f2P zbMb{kRv{4sX8{l70K(hjit>VIEch8+#yKp>92n{{1HEW95b<_4 zI^j>6c*b9VAuuIURTYsCyK>QixRMjg$db*|guk?|0nS|ZLU!7=wjX3(DkwOFy8X8O5mTuj?x4dg@%{Bw6-^Re)EFY@9 z#KxqBPpP#ubw|H|N z0GrV6*MhKHj#B-ktk(R75DIt5=I^eq?Lu2$A~5(&tQfl#>$+{-&NcFm*LQ!aSq7E)(rn|^W|C;MFd@b=lCbvNyqP7VUryK9X^QVU(EW3d`+H}B z!JGh>_Z+MOSBz}xiEb15p!t6&8-n?0b1n@W92_c~??DR{;wYc=7N@{*UFi2{bry6U zROtcD;6oA;vSGqnW@Z%hQH*ZlAJ}wy2h){A5Qd1&@wJhsST#h}nb4D&*wAX2md+0aXnjeOX!qy%(7G zdXiw)pz?^%=3NZh0lj=0E%JZ-P@{rEis!Li*6{KHw;D1$>wjS)X<$e4k+CAhXY z5`H4+CzB6{*=wM!8*5Q#YAI7T1|LhhN0DHER${pf#?6dxN2 zkmaP$rrhzYH`ajTQJ^>8PNdC`n-G9bHVKr=U-_09C&SSd}5bZ{K z8?%+AXlJmSC5>Ou-s!atEvTW(x&ea{YgqQqa9_^I0pZu^-wiz@9V!`Ak+f|z^2_tZos@RD`#8iHc(qT8TiU7TS$cT zx);fkvNa)2%n`N>h!qBez4#UCq*kG?ng1r6@gK$IV7uiG9SWvM&QJU#Urx4l z*fy}-%dWb0em9E5)SoU@4bKwZ+Kpt1Y5G=tw=OvP10MmN^sn894QNPjG3jyUFzk%a z2@(mod8#9@P7E2Cay|MZEmJ6aC4Zh2S;Z8*-H?FV#(y`YS~(sHrc?wXr~XsHO&8f= zzvewE6kn5j%eOUe%J(92>ps_EzLhsC8~M#8x$9R_dT$}@cZjUw&pYtLJT}_jTk?q^ zoW~4N3ia|RT8XV}WCsQ;D~BKbMY5}6bRm(R)f4}nx<<2yMZ_dFFZ$cXaQOUqiimzA zeYGv-kwF)Ne^KCW}Eg%Zb7d9wNfgO_Cb9{pAISLMun{EOYX3y8O=rJ{dZ* zqoJ_Q-eIyh1&W`jHhFRC?_Oc>S-cYOmx+mQfCcFSuF(6_`M4{MD6#H}QID1z`B@CK z9t<(6%={Kb9{2jFB*pnB@#o@vr^_-Hm0?J1o!qJilk67L8}d7{f25Mr4wj2QqE;Ji zS8zAbr|M>tPvJfa$t4K-Qj&670p@wajnReo2v_p5m~bRIt-FF3e1E6uqnTma)yCYg z(=blD@h%a~3|N71*)>UB%Z>bspk2oa5NSo5LZ^7qn0!2mj~8+9@M&><_Kgz`HbHFO zr`OApy2!~8UnCj!wAjcy5A0kjZxVG2WKvYv0gSf%Fp~!RD2@+yZO{0@OK~J5tX|cxh}xP<}p_{)aPO8F+R71_5w!pwF$#OFs*#s;Lgmx16a4 zDzkq1My(8XKJo6>p0BK7;Fcm%ogh=ACkUwGwm;Jx_6Ks*zGE!PN-(dD3KA zs=vMS-Pd|R4|Vp%z}r_8J%3Ew4e)R>f&^cR;uO=0nwpxd$|q}l;D>q^6=q78%aC7NSvleh zN>|lZv*36BsNTS*n;MV>FSUZiTTs($QYW6;*i2z3xB{Eb0-grCFbhe{<$%GlvBy&Q zAxl^*z40}2V;*fSqY}rQp&s3@byPZYGb)m)ycoHysj1mrFVynN0UiL?>yK7d zDl_cDmm?(CA1>XD1Vp?djg0Wnz-h{r^!glxtGC52+C{DOwf7i|>`>4ckI$UA`^6T* zeTl*FCDY>4pu&z`{U#Or`}OA zqa89H7r{G<^S}7|t*~?PawiMBHtD|BSM`c#Id@HYDlK1or@MgAw@$>zP(UOU3`U@yO-N zP?A?s^Fp*S>FsQ8z>O{A0J`zLC2-w$XE~v=2rD;JKHVM8Rg0&WVep3htadBrw=-3`aH@t4736E65%SB;`cqBZxx*ooO?0Qg6?G?`7?U?@})N?J*!}S z#!pcL>ELx%gAbTF3@NwNr4gq)tbH0cdlSdtA_B?9V&C$%r=iC`wOeK1DyNzaKxK-G zbKxoM0@A-0+n8s@ zoUCzj*UJPZ#<*<}jr-(#EIc@WX1cfAe4f#XyTM)T0bfebwe~^5T}3oQbz=2>bc0pn z$^?qR=}2aIkm>iDL__ZCY+uYEQE$7&d$&4XvRNu|;y^>8%gU~1_HHVT$RI%*d1ZYX z=5^M`7$tFA5TuDT*^eq!RnlF1WTEh)GUH ztHKMzv9lVl2pQPXSKB9?TH`xyUFN%ECcu>ha5vT)=UVHZeYI`(qxSR08iJkR~pHRzrv+V!cLrmkZ={Eepz<_2sBJMhKX#b+nS;gpoe z5+28mHt!RK*zGig^Kp~PT%Bw5DvXkMkBhcNuns;@zeL9~qmhkaDzG#VxV5W)AMm#T zN4$X}9hmn;Z-Tdr)K(*pwu|;=S`)pag64^NE>22(Ye z;Fp@2$OE~nBDtGSS&E}z?1$BL;U?5?*Pp_+hHQvII;t)C1k<{{nVC!CGO&C}RbdFK z?OU;Yyr6|X!p6_)Ai8N=Y2Al!G&(zy5_STV6a8))UDpKjX@HLU+z>quPqsuDq}%hI zJ+;g-!oWm{zV#r5R+wOW8lGJNWQl=k{(E*|xh6i=e~Vrq8s%a~Y3bNhopqw$@e~$p zN%L#J&PCT7#ojYlht#daSI_Fsu}PP7@<)U*qK0?u)QyK~J20ry z5K-43WUY6Zvk$>8V&PmO-}fnccVny#F9_ zX+H?x`XWG@9`#``o<9weN*C*nH2yhS8xlc=c>BxSU}isaJ)Dk>!X}G+yqR8P+O*3c zdnM!pdCeWxvg^!k1|PRrKp|)6`Dh0%JNKjG%D2l7_J_bznRs34BPXh6u}=uP!~bNa zLVqo+4K_UkoNnKsob3Ya&`$S9%v3#}nz@e*(lo!(Y_rkPi}njzyz;&sJ|5vCe!rX( z;p6%Z@lukUE8Uu@qr%ZasEVhk1VhmuLK$tTFza}jn5>6-&GZla3J!i;S*S|lT9fyl zti|qJoj$N1;V(*_d-E!wz4%ZpyW=E&y!3IbH zRyUus(3z+9BDveBaM1QdS-oN?Q#cUv+G1>q+Wl0C7cG)+jv{10uH9fj~{zz zN5!UFs>98Vcfuq4EYLoksb+cEUr%M9oo=T8>!iXJgLF5QmN7PY$#SR-dXSy(;A$Lucz5zM;z|RU-5%LNPvlin5b`lJ;}u z`ju*)uD-tUtk0h48$K+7_FW6>#mSY;J@lrEtI5;9XkF0#(0&F>XQ#)1KtcfT12md( z{UC>y8#Rc8zw@#jUl(w0pLS7dL}QQK+kAjZjK41Eay{98dY7c&8Llq_r7nIg-M&MO z>4cpuI`%Kk97)?ktGsp|A3fi|lM?e2M%^To<&H4chCNLn9NxW=vh>O@E*QXy^h?Tx zX(^A^D1uhN{AV|k0*qYUh#R*)m(1)h zc1kGHJrPH%=1?DAFxB2FcktKDg|&$A-Q0s$^{DlQh#G{;kx-wOTf}P_zPh#!)7ty<)nSd%oIOXT5Ib7ch&@C0~ko23xs+ zDi^%cVSLqhCS58-IKv?;s{roFHowl@Q!*tKKwzK1O0Vq$f1}TLCH-p2-VkC6go?k6 zwM?ny+Fjfq;f9orgJPXrW6T%@e#wdJnKt+R21}YFY}F-l%=SDd~v5)Z2$( zS2c+fgzM7N%A|9!vDuzV+`r$mR`26O&66(B@7*hvGKqwcqTcwY3d|kPxOH&=;l88~ z*rU-5C@?N`EQ{TjLc>h}%?njLC*#qZO1-$8>{uu}$RkrzRo(GDEKm&9A{_F+4vM5u z;BSZ^yT`Ls<@9CkOyrIh!2ZKVn6@z;S+aw`q`hIvTmPX`L|sI-Y;bm;pu|Lu&%*TW zAZk=pRsl7Ua^Dtz!JnJ6#XLOxU|++p02Z_>%%d$5h>Oj8#17eesGuG~amY;xng|k; zmebP>cer=)*9$NxXV6PETg}=k3uhfT1hA)v(CE{v#^z@Xlk;9+!fv#R9M5HZ#UYyT zGp+m72foQhF1+y0UE00ev87_7$0%@vuDG(kH(JfV1HoizRA98zeJDJk%RFnJ*g^O` zx_fTcXB*vjBL`yo)Q{WITX<$E8_|$LpiY#D~tz4 zOL9bAEHH%i(kW#sv$oKuk6+@F)5ANmw1v3Rza&)cSeFX8>**Fod%^q*o)`TEWhh{^ z(bvXvKZe((NxQiz}*>uBL`o0_GAIUD~xomJN}e9)lEqm2sci8Y}F!FEOoZ zWvs0BH7Sqf^OUuE7;B(M4Njxz<8SPZ<-A zy!}+Ggf6s7NQUBu4Hvnes@sLSbpBFjEKaDa20pdnHza3wS_&qDobZ{7l7Rx`E7O8@ zM~C`L;bp0Dfus4(2G`eheAxT$w;8sE^*)vsd!29+YEwRD)LhpWY z&WqFI%^w)v$IaJPHM;5a){e!d!W=5)lTFoAa(}P!rK-K@L3$HiYD8+l}&J_-~-TCgX-6=lWu#`11*|c)&tL? zd@2~9cONe>9mM{C>(SiaXI(u#>@Fd%Cm&S$_#2m?v0Wj9J)u^e<7tO|4(Gu5t-hO8 zNFWRwLB}286F)~@?~z!oY$Adq!RCRvZ|`NJ3TRX?Fz;xUxOU4x`Qe06fK3%;p{nQi z_>8jPp+_&aJbd3Se~sss>UgZGW-YOLDGR&cI*1t$!4Xze8fA#;(Tok5-C}T&JQZaX z_QuAt6V{%BLoM;{Kny%zMx$`?s_uSxBm_x>zz{tQsaE%T-c#K+;(opDv(9AY`_M#8 zCe|6JYM6f(AMxa`8AzBJV^W^^&P=-}MI=J%_dH$8GQFu)%&g^)UGnGWw+YeEJfWLD z=BnA+PpP&GbA~B*J>TB-`lbu~NsL?saPL309dc`5**!t>#*2<(OHpGIzAm%$S?-1? zep_aPkIDza*EQAdglLZXp_`F!C;p73=qt%@eev?8Xfg%-XDg)5-L@7Fd3?i{*aW?{ zbd6|c7oNqLYA{u%7ifl7&0f%sz0eA+-PtdPTgwLniuv@oB3C$PC{UZ%yo0#qN+^m3 z0gUd7GG)eyi6^HArFSDHr882?y#1_gKhys6tdI=onb@NcgP&e}>7&~MMv61xP(~+( zvp*|+(?vNtVU9&wOY!yU*vrdY7FHoK!P)*{j-0}C=I31$JAx($;&(cZa`tN(ygjfP ze`;T~my03;KqQ`TB$=}PtlQbT9_}Mt_!>TS{=p52x{OhcpiEl$k zRn%&kQZV<|n=j^zR z-yTJ|RUKJU-iMdaMdilY>9n7exn@c;H1XIsE&k331}QQANcw=(=kS+ZM^K!U?YHVl zgOvubEE^2^?baKlb#9bR->FL^RPr5v7Y-FJO25rlBrecQOEG)dE7oq=5Z0_`4Eu1( zKUr#M&0t;3&uyVd=5MEUuDOre2a7Uev@<3o<~jp1|Jo`188IMi{KLPFdOsSIp2~ab z6H@ttG4eyW5sQ(+z*x3whQEUcmANE`Mbk#LC**_K3z<#!#f_ZDyHe%%$fP>cS*2 zICq`{ZZqs@Eb3p>qfuo!>He8_IzR{cl$Jd-Q)>^!E|Ep(nAzUgq9Ln4d@F~&7^BlU#)a&O!hGtbGxy;s2fKvY}<7Qq*pB(ZXq3MK&? zewCKp7-z`-dvf{ciDp+KBd<38nk`&-KD4ea%=`L5ilcBVLEk-VJUewHQjO>F#kJ-B zK1u)t1tF;Y2Z;k9yvBbO$@^`)cK3P1lOFR|Zx#4C#_Sqfh`-%ycz7USrTxLOpDyZoM5jT&v^ z%B-m{JX7bmdvH%|&JER*0=D+DReL3}-*1yD^aWA<2bn{dph<0L!O4_$B9&{$jcB)E zlf}F<-1t|VyZ0W?&vm4W2D>zbcaGw)Uo&{D{j`yR7a-&f{n;b|r#nLSGWf0-SM*9L zHZ?WEEV@_CAr@F@+^?D+ljHAj*7>jrD#8xbuE5!xLnBvzDy;ZXrugG#1)ul#nT`+7 zQ#*eDG{5GH3E6R7I6+)wHJGiqE<7*qa9B|f`JrZg^N8m0%_9Wud^2##qfQsP{TTAW zx*r}c8B|^A%u6$xIL~pV;pfULefmu|L}$+`X^j$d2bH{}De8zb%4-N7r>1Xlv>s}Z*Y zw=0r}I);uOL{2utMPA_L{wuqyl6b7k4$pd7vbX@N=%Xt=B%GRQ!p5|spron zV0wNPe0wT%=aEGV>FAHHmR+CkhWN9_`i++8^g7xV&4!7- z+*vl{P@{4A(Mq((7@h!${)q9Y2=b!N6;_;^n}2c@6WVdwIBT9|cUf}!>YUJ}Xsz;m zn=m}4z@PHRV#4ysqJUYKR)_VCpPy1(Cp~j7ilg!@36tb~*5K@WY(u0}ZCZsX6c2uG zTzn;~F6nyx|QcYf)K!DqN1Zn26{T+*PqBHbv6wFKg3dK!Im zN)7AgOls|IekX^K*X^>`vq$dk%NV2^*BY&Rf=e?w$U5`Z=dhprK2hb6cY>uf|61lfdbF@2rGy%}>=Qur-MB?2 z-NI?QmMAx{P@K%^L45*m$rY;+eX;!OF>qO%ggswt3TTs zSKAo(-mezw#ryGS@&z&RGiW67%p1aX(lIHt>UWW>?~|?rlORlJFgjvJLjUhCp2k(7 z(D2SyyQevyJUyvKNN7@5rPr0D)jlh^vXHJ3shIP5dgph#XF*0i;Une(_9ytZ{6;E7 zT4Q_;T`nJ3!`vTvdvV`ksTdK{*1^9@Z=-%(a(N=&jslp37S$ptbl0(=ccmlv9bdpCQ`O$HKfCj=gb?3rnh$cmr&8B4 zRfS`6%@y+mIzg=iG^pq?KDN=XitsM5344~Tg?}~U{VI7#^6;%dE|wZ^rd#n!$vlJw zeqG5e6O9F-(<7oxh6P^#>07uTduQwEz1(aUMdyX;p_ZpZk^wipitl(`^ZTUtO9abP zl;lL2G{DuE8+dexVIPqsN=X(OQ?P6mE%(k>b%*b0cv_2ikl#}iilZJxUFtk4D{I4> zjjs+;N&chTJ+~y$NJAe@b;0tCIf7Na=*?0_#x|ou@jN&99Gvo@sv!-6P>cMBhV$*u z%l>)(zpr6b1UkxoX7V*vIal0gvnhR#juX0Dw|z7di`aHtylLF!@z?1f6w@9voae zn^^VIod_XAxzCsl3$6Ld-m%$%xN&tAzZ!4vN|e|B?n3iZuG--T`B)v}X7fl!#KSiv zbR>LB!f6p}3@;+x`Xq~T10S#BiC%j|)~G0R@EQq@NUm*k+xGv8AnKB#PZWEVAkkUe zITfuMIqEvyoYS9B#%J6hxlC(*^%#v5<-R--r5RfBF{VnZ;e(Ze`^c?;a^J#<31Y;< zz}XTAXx;4~JKDVCVc*2&2`^O#&ls)hdKdxewLqD4SDlz40KMeen z)(+t{BQqcOzwADOFd9qeH)M?AGiC0h^)eWKkR+i?1)t24b2a-|=lchm zNzk14NoP#`9%{uSYSahLx;icT*kVD<+hQ&$U0r;amW14mMw)-i0Z{@ZC3LN8d$Ak+ zp)AYt%F{@e@?TuYogr9FXwVTXv#^g7CBer?d}|nVVVWxfEqLP#3;ojz&K*M$^l9ngS2<)ZTQTEejoJs9Qr=pyE%cQK^02ZeXUO zTJHPP)XIE>DA36>mifC$L7iWHm9m^8&w6^Z5X?tK= z9g0Bj@84u&b92>+;nG}O+D~bdm{e%Ji#=yPar};hBwz7{7og7650%dw+chwMrz5*P zIl*HWk2_Z8E92`u?`#^*lA7#-5QR9)0)d7ZJBy7~_tcbuLba4;Uf4Ls-)ivR+5m?s zLz>K$E1z?}l|y!pH4W|UW*QQ|Uz?GL+!sOl;Vrh522CzrTRM1Zj4##frh z5fPX#J^Zy3agm2`Pg0ldxT3t2dX5IAhPYW}BOO`lXU73)e$8d8IS-b!ZchDbY5X?b zR{F+qlBs`RZ}k|xEMj!&mw&Id%v04Om+nzHx3L%g_Vcmt`^H~{7k_Cskv0f0+TJ~_ zmL5ob?GQkG+QAh^)%d*d6`YYNKRQ$aTAw!`PO&&+(D<<4RY3S#h__NVp3JJXbzlFS zM5f2rl&BO5sqVdXLY6|qiJj>tbQKRIKcZ}5C&V`Hm!p*G`frdT|F0NZ6$v92^uPJzIbbfMe_< zh4xVsP|55@M`MH$FZ6y<)Pu|29RHX`a!_-g-K!1(gZ2-Hat$07E{)3=r##5Kh{>yy zk(K8OlE9tZUphh5Nbw$HKFSgZJY_n#3S+S9Fe;;g^3_F6?~a!^bA`}h1y8>E83MiU zLw9`3_j3!5N?!ym84hW#qgRu2KtmZqk7wF_5Ik4eO^Hg0x0LA{$(EXMKV?aAAXQqk zVyNo5;f6^JF3MdrT&kZam^0P3DyMG~qVw9XX}{FKc7M*0HU0svQz3+rhvB8ObJ46(CHNcRhVR+srIaADWQApI zB)2mzIdTr2-gT+_u!T3E-=-QkUedzpB6(R5!>|`*N`DZvDf4)nm|z$$I@DVWS_dZ_ z#Yh}I)uTi`?Q(m(ea@{vo%@=_gM|zw=MA%92JFDPrQsJfR73jDy(>Tt$bF1@f?+_J zxX;V&A?iAVy?`+C?XAU5yIn=BTQlFaEgXD%R|kLan+Lw!PExiYzEr8{qa5KLqh_0` z8Xwm-A^%a97d@!2#M$NeXX<80QOHQ04)_fO<*fE^YMu_IfSlv$)}^r@zX8RUVdAJT zg6D5BqJ&AG{M4DKn>2H*aSi_Y?%qyNPU?@se-5!YGPL6B4&Z#31AG%vO4KX9AwV;H zgz=V3=VKh%i)iQEHpj5&N44MODz$ILNG0E2IoK60w$bFbV${Vanjvnb5${6EBScW` zTjuU{=07|fCu24gJY=hVt*Yz!a^9?I?v<*9_Y|4Mv6Z54>RoY4`-)fc=S03sVVH`0 z$MIqdguM0CP{n8`c`_dgiU9L~-A4syA*QBn+Sf{m#9yfy8T9aS&~7dL|Wnzx!zgm@{=PQwvqr4I-XTm2Y8L ziUwaK&gMKnp{*4D-F;y~VWA;rg+fA>Iq(0u&kz>Cl8z8Sf#?2hVBq&#$8vXhw#AZm zd<_)(t}w62LSc`OWTTLzyuAIJWr7fUA7A=Q9vJTc59ZI4h-r3td*BfD)_Bz5%v_#%VN4J*d+n$v}6jWNWdBw0Z zsjuYl zw&lII7Rqmel1jXS}m442@JQ zA||J-B|+}`0}%6*f(pp$^FBF1|124T_JUNK`E^?Q?CMQL`KL5d)*cTML@)3>T>VMv zd&|JcLGPpfrCr_DM%@cPJInU_N*)dl3By(Bedk>>g~(8wr$e<>Qg8bHxyi=Kq9uJD z=^q@dou`Go2&N87)p`l)4YD?0#>0XC&X4g^kmr`7wlKChWcQl`g+n$Y=4;(#deMa?L?&3DRgzR4n@ z%#io)F%Ot9E z(pF7zRGc~d_$UERz4x9VLR@llbCYRkoQ|Tux-Fx^t-YtGN6*NJ+O`%MK7TQ{E<0Hg zxWkc;5}M;)cn$k-72C+Kod1Hu`R!7J`B9S?bdWfgxq?PXORGLk9ckgd)XXc#q`DcDO*Tu}sde{3rcTPtt zdh6n&M?l06e0vP=t72d>0i9Awu={XVu_*MUmoN0iBHe>-nsNBw?5$t*?~|kOST7@| zV!uuAwJua63#W228XV}6<_eS*laLt96sFgEyLbkTN?~q_D`uJ%2 zq#`W#+O90npwcq>7C296i}!z;{5`9G$Mk;&^C^63qE8?n=(C{-kmJ+3dua#W(@U>O zSqEgtef(Zh*VpRtOwGjS-SN!&1=lB3+p0m_i3I_oJ3zq>2bZ_xCNvq8AI^%de=^^N z2S&PsKZ>Gta826+yaMMOn2siXh@Fg&EW=d-=h#AXrYNtZb!(mTNFxF$ANUuRlyqIZ z4LyO|W5Q(K@m-N(N)=z7Z1jU7++{-$`PR7X22|cY@YeI2fycwA2i^0(YMsD}Hw!9a zsCSSvT%?g81dPsJmvwAtZg1;*}x_masXrSarAUW7GTpvu!ho&yKv(Nx{u8?#; zfjWbt_u0pm196v^p58NuMv$iCu$OYt-=Wp8Y3+^nbrHA zfq^6-SR)gkg2ID}7$aoi^enst3zq^LZv-;pCkr2~Se6{ll-Xb=PurllEmtNXeTN!C zncJBy*k?U0csyami_b)}!{(Q5zKlIo^Nm6-Ff{jiehzD=nhKoJ$A6E7d};dNh4v9G zRz|Ccf_qq~_IQi$av`%pm$IRT^FU$zsbnJzRudl1v-Cz6}xNVI$T|Mn{?Ab8! z)%m8Tgf_2^QUlwGa+G8W-oSi?eax8jF~z{(*KSV?zxZW^2yh|F!Q|wOH1VOV|9lki z^w4iXu5Z<%(i@i-x9^|yu1V?tpZ9B(5xh)9lQ_w%{D^ZPB2?rq`Ey?YSI~plBg(^i z=v|5Gnk4ZVJdbK5a-liUU(x?Cm^QBBQ&kZ^o~~)dk-KR1%}aq<_`B2y5{-W(CnYg? z%)6xoD~3^xJa8*bsrNqHwq={l00rIh_DxroXvE$`AD#J9_VZGV-;UB9UarK>!q)(o z!lZ*2&cmWR#SJiE*7*aEAod-%Zx!0RkKf}VdfhHo0W!C^R{qxu4gb%LfU6U-2YF+Yatz(G!{_nNDK8Z zm6iL+1#PkC$tga^Gq)#2m|K5FM z)aWhk^PczhG0MQ!^}^21?%Y&qSW#wdTjQ!31Z{XX9OU16bQ{+==7^{n`!<;ba`m{! zx|zsrImEQ0nRt1JpXNl1?z$AT62AKz5shHnM9NOp1$7Cx5?G{qI0Apki(^LT5mIv zBR{}p3aVt|bRGM*UzyZ*o$E9(%MQcVUkTp_M=r44Bgu+=q^JB^>#?mkV}{%!a`?MM z1}DtptKUVlUpCVAQLIZG@~Ay=kmiTL@0U+?(>U(Kx3K<9>?Q-7*_&e%S_0@X$~z3l z?&wGv-4&))2${i$E>_h~R_Zdy(yv~L?{1aQ%$XA)PEHCbdU=H|VN-t_&ua(R)?XM8 zf)LhQA4=Q*-!bnZK@7O+H)*JzbB;{8-8n6zF{q@?sI>UX$L3hE^xIw-1~*rx6|8|` zBQ@Kk)=XmRm6GOrZ@DW%TTu4Zxb-a!D}$rZ2_EoyN3kXStF%l# zFlVB|(G{RW!KT6(D)73prg?P4wlqXZ!+~3yLmaaFNo#Z`4<@TXQ8P+9QS1c$hlELw$nEUxJ z?hz%399KMa1(>;4TKfPSsXgkVd^$^{ZT!z64ML!E92A(GvOj@8ySWz`(U?8xMv1fc zpg;VJ>I`lA;|;JO8V~(`X_A4a?v2?+wFAhZo_f%y=8UT1J2DsdMe6Imi?ee``V~JU z@NKuk7eiRsS3_o_ArSt+%*kn!WqyBlTWuYfO%pmADM6fKNnRy!4r1&lw37YKRoOXt zBLB+*$da7z!lA?`y4m#z2!uhujiTVA$JBFAeTEJ`d{#nTfXol!8Gt`xvb6xY9LD5+ zF}61aj`rcpB5$cEJiYC|`h-IeOmSnZC|>zyn;v_5N}h_+ATGMS*9?QmEw_lhSqb_P z0mFCPcPAqD0)>dGZ3Y3QiVMy8HQ@Z=TBfvqoX&LEOHtBcZ20Y#lJkg zGmav?A`3B{P$k!HCHXP|12I)#0B41)1>jhD`2zDL-tqL~&%sF|!Mq}lrOCV z-Y$9wVS%RepUgoj{&PV_K5tF{9^3|}Z$1@~9%6n(4bElt^z#!*E6W99llg0$wdSRq zmmy)K2nwdsTeP84eV2L+N@z>g38D=QTbmIr=<*O;!eMkGn}LXlxjBwAZ7aFndDNI2 zHjRav^o!MF6`Cw23@4{^T_iSM`XMF7XJyN;y2Imu z3Y=EqOC3^-FQyEIr;Q zgF=ruU~=pYCFDjUDa#5tAv0mFcO|}xbhh-oWo}*MZx$)9w7ff|{ZMXI#K07CHCjlU zIUjfO+wS~Y*`#cy{{+vVoVqU{>}5-Tx<1C){Xy6tjeFZ*7q%)nr}6AdDc#)X>O>}e zh5!6~5gML#r~pPG93Xgw$ThAfkrAL2-kw&p<~j=j8M-Y&Grw#7~p zgO1_(VY%OudNW>>0h0T*v2k95SwN7NgRUYX;FQ_K)u^td=X2hW!vFWEOdm2H0u%0x zZ~jI?YU)-e4T6VK67oUw{~lukMlz29B3P$R9u%xaOgbY}lv^gKl9 zRn@;#I4_ZBm+lT=YBCl)R|n+rr2C{Faz0H;cP}=qth}n1mAI8#IUut7ElNb zis2obNpo4)Lg~Evj$ryVgFYF~hP4H?fTGL2JGlS-WV*}ID`aCQTw6Hsx8&bZiHsRt zy0@tAv!-*2Aum|yc4}tlC$l$tVE{SVoCEE=e?UMCZY@|6Pw;-Hk2e|z1dQ_zK{Yv& z{O1ndP~dqV#{>ZTQAK=gkIZxPaq2BU%i+wybO@i_48KfAAeHNgOq(y_Zlpq>w5!DZ z@u`j)?AQG)kML{8;djvuXe1ZoBDNq0i=|oygsbsP^I9(4`bMLWYAQLhQ2B=8VRF{^3rlY*ffyyrS6?gaZ5v*ePNCWtS5cD_}Q!s2>c9{Cs$+0Eic2tbk^&l?vi6jOjx~2S>2Lw z7n>3XjQqplYNGnN=z+!>rWFJz8gYx^y^QCC{~lg8D=2IWQLCocdnPUhyK5%eFV6$?rD zI8!ot47-EErZ=renjc+r|M-PP90c4QYjQdEMxINdIE;7FNUPa)fw6NV-foTaOW9nY zUtGp2S_AX+-8ZhQqiXy4I`w*>&rO5E#ZBHi_m#^FjXs025r1;nVpepiHu_EOolE-y z{+MSXDLg)X0T`UNPJ@Xw$d{#>QnR#2I*WJt$IU1Wum)d=w-B-!m#29F;mc3OR~FBt zjG6pDdvlh{u61dlw>ZWh%-M=WK2v$&HMR!k2fnQzn!72pa%6LrMu}*xxXV^7N)2LOKE(#_w07I zrm}snN)V#pDonq4x$~$9YSXBt`PR=+i=NWvyM?+| zNY`^k{XI3S)(U}z@v*!fpHsz|!q$ERQg>|iV}gWsf86!-E3=UqCEV6*Zf}?m9h!B} ztK8tGwzA^_PEVy@O+eo})se=X$5#8ro`9q5pX2*K(gHkW1lZT&w-9v12{s92FvAI- zpIq9xjH?Qb;>CqSMUYa}baaRlF)FNoCeu@po+M;=@PQ z+lTZ)QNM{nFcnlf44^{b91IPtK)-ZCoRdApiJDmhnY~N_QZQ^;wmQi&mQU ziiNd@c_Yz;c5mCN(jR>Gt)ZDdXCI}$)iRFnX}lNf>m=58)&lsZCEX~KT@XmS(pO@C z$dN@-XEfr6Dv@p9|AjH&mvNUMqQ>gX?a-1lD4-EFr;P4Nmq@x70UnQw_y?Py_u`ul zoO3g6eRtb5^bz_rkwulDDtXqvUUekj5HzbxxXW$uGBJLHi+b0EkMV>fNgmA{P$pMN zpMHh`fhWD9;lrQ;Kfo9>)`B$#T&9w|(Cys-HmnyaDO_%GDU;rZt!QgwD%{C{YdRG+ zS=FT*OrR?)zmnWnZ34d*a&mHUUO!0@31tC&55Z*J=VvtNS(0HS-yWE(XvoI-cUct} zg`Qbu%qb?%n!Dapi;&c)u1RhBzhaF|9Ru{KSZXHoJW$^?W#|5B^p4lL2mLpkD#MKDAND@_16zQ{k-{m&=C1i4J!*jo7@YX z!s@nVCJXufsnkkW=i-y-;guead+wwXdb$eBf=bQ5dKD|=28T9DuLV=zq!^B7HclP6{3 zM~3&w-8JoIOxEox>pAGy*aEI;WjK?41)9!noe_BWvO(MH$X(o@_oz|874@7^1mt}E zIzB!|-iz}66f6W^TAM4ubQ|Ox6Z00&Z~GxnG$A!$`)W5$!hlTa@lew^U^wo)yy)>o??kq*XQq|NGA@` z;Jl>m?!*qVHxE*_bMACLgB}guOzsC^TP<+?RoHK2nPrh=NFvf?2ELR zUM;L$iu+mKgx?hl8tR@izMs^)L?pA@bXr~xXTeAmJJu-1PuLE8JihMM63XrK56l8s z|HF6=KuX4@l1Tmf4@H1OTs`vHB_%`X3i-uNAHh2#0)s~gHs8!2%qZG&N?o?%+MdR* zKyjNdbZ@D1Zev}fwgLc=!o_98k&gZlT=$=>NY0ne+ZCNW_r&i38zkqo|B(T*stqtq@Z`U}vv9%vcXT zYEcOI7Vy&j)tY_1If{P0XI*dE-43ek$nepeXN$WZ^!bK# zeE_qPD4G{Z0}o`%v8e3Oe(eTiOZ0Qx_@G_ z71+FlkI)r(08nsUCM3AzGcy*hV}M8p$i;@$4Uv8cQogg-Q(D-u==^gWK_IuJHHjKQ z-JnfA3>)eRCbPOW|MH4Sl0(1CENGuLNqTQA5Ix8;Ok!CG8@Yj9D2S^7qe>b@Ao*KX z$0w)r;|hsC<<{fQs2nzSMj3-%La8aL&M+7G*rNf^j1gBeCG3-J9#wUoZ88x4?0;w2 z8fZ#7UHkq#?%w;1GQh0;x0C0g2TDP|jitsY-=!iZ!D@LB$<+KM!GN47;j*5O(+INS z8VCLJRnyiT=Nlb;n>eFrr53idH#fv@b}NlNHw-{aNKWS_qC7FWHR!_JX4`m&$>q3s zm*oTO&29&8v!I6+!$ESgg1M->AZZoKkb#qY{ETnFD)8};aJxd1bcsrMYy+bE+qXrH zAgkKuc(oUCC2+-E*t`oX-1vr{p@nVqCsbl#oT3XK&2wtxCnb{ng(w(X%p|P??*Zui zn4)E-&h7h%Ft5#@hjPKLaPA@fJ7s9#>;BpSltZA?pE)`@Ld4pI!`(No{iRzpn3u4| z%>j4Zwl=)75(CzRKXz_ujhWZ#wuQ4Jy?3|=u3BA0SaOcaTUWSZ+Wg|M-V^nM0FC?0 zR|$IspN__2m_@CP_wmqk<^EZnY|)8q*-j;I=K9_5Df{!9ax`b8E5~t2Z;w_8nayYT zMMO3{YfqO?&`u%x272+pF4phuKJ0$=wM2nq=?a?D^R$|45;tj=a4|ZSCWX$r{3wBd zlGRT5Mfe<~o?;vS(a3wdL$@}%k(@Zz*VM=oN~0@rWWqWz8R9yULU!N$bd|lE59R@% zaOhuE(sF-L$=w^)HfWR{$q%98k&q+dPRDKPO5C-08vSS^B*-}X5dDZ@b;eTal#bly zz#XYmeHN%Rq&@K<7nP)$Zv`7qKV{=$oTHxlV#aFE{5RHX&_gUrhtGWP+iCAj+k=qZR zH!@$p>+Fk2XqZ%vEcTEdJ;5NjfAN!_(wp|JVHAE~1niR&pBTH|I*V#t?FwtpM2);s z7;hlI^Z;%dC2HWKY%G0T2bfW`+jHzMkk}_Vc4AnFZUvS2ewD)^n`AeGWAlG7w#Y^N z;>Uj&=onc5Dx{Tc%(RV7NxdO7M6ox_&L03){Cm8=X z;R=TqKQoGcMK|xvr?;3xx{}?Z9)A}D1<=7pxRSA|#t15xjvoqAB1u!Sd5~?t)YFO1 z>+1?iZ|b{|vCvzHhiZY$Y7HE>=UT0PHvJ+&Hd7(l=ioD}HdP8dYCWvz~X@(5`Jy_D4 z=tM+Bo-&1&^}a2(y*ysU*SA;}vzdL37YjDQA6uMb z9UwxR?oQgCX|VLh5H!ZB?SO#4L{&88k!MrFH1QihZyKJC9h?kHK|#b+yaUK#BkF&g z@LEd`H74-;?HhGh$v^PlX>hExG!neelG>Sa@Z5s6xr@m^1g|sVi4{Xudd!nx^A$j56nL|*y>PtB9_?INsfea zQ*6JbDBH2xax2PcG}1Nm_JhJI)%mmo9l>IUq&#BM#}}5(+YMax7ep#Z>-!}BA>DM! zW^El9sQF$lmCp0#@fF|7^TEjTEc}Y@KY__9E&@l=CHBt86l3qg;bDzxJM|r~ZcH2> zNR4G|2*2bqf1=s#rEo2>w^c`@bTj9&2=fq3SOw2248j&rcbrM@{nj+Y+6J67bc~=t zlavgz7dPw>*=-%pzgpA3Vr=OwJjEx+xlz_yuWw>9GWI_Cr$nMvNe?QbHjw62{2*}; zIkoj~uHbjju}DoesWXr)Dv4`Uyxe(yUZ&2mlbEeB{EULTTRoGd`4$Z@9)T7!| z)M#+qTeMT%T3p}U0}*4+snQIs_kFWY(^J9VqHwDFhrZ+chqq{Z!!=|g;>@Zsa$^hT z&wXCrr{Rdgda0xqu6L{S&M!YXk%-MlC_-3>W7=?^KX~GkTknk6A^+sDwPTM6!nbev z8%?*$O&J0{9|2~kY-E(d3jv+3J@THge6>ubND_ILGl*x{8XzJ0H?lV1IhTjwM%91O zi1+Bo#{O3pI~!84+QHrtNFc#jWi;`PzUAO&3(s#CyVHztrr55E_E0zby1al$4Fqw6 z3`k68gj1bj&6lv^%Ar>{D3M{FO!n@Gs*qYl#ZctFCbn7-JN|H@A6MIh=|BqXD){kw z7Ma~Ww7dyVbfy*95qUnQUd-0IZ0LV+eZcVxjx6vYPe6G3)ILT0^)rC4O)8KQ@o)>l z&Z_Om5To+Nk}=uY6PY+&K2iw_sQ}fFWjNnSa`qj_^nUxOl{^W*1|&l!&v*c(GuzY}CaW}(J& zK}Zr3P)0~VeY_!Us9hCWfP7%azEwMavvco*8b$qE;O7fB)#$2URf;-p*gPMSf#jR% zk)89!HvY-Y>9{(pVB<0&FX5jL@NxW;d|;nh^$29qJEi`ntyTe1Q)Kx)V>i%?{`{-ZLT;p#vEB}o)r4M=lY5nD^r@@*(Ruco!j2cL@^Mm!$Gzz>^1NrvyOuK#}sF4PbZ z>YbQAFpxTiSn5}_gPWeneu67Gl1VDN3{56x_Hxi6N(*V;$#yJtaNzCp#P#O8EgZSe z@9GLVk+xaq~=Fly1ND9loi# zD)BL5)#HP}#){$&Q4Txh6d^NpzwyO#fAgGR^B0eEAzked9$eJ;q$(wM^)t7ue{tDO zX}BBRJx2UkF)bdVJwOY5#=$JYUof*x`$8w1u@3^g)>GdU2VtGV7ja7;`L$knnf*;3 zw_oq=y5u8pO~Ehv9QwoE%3=4hgb08#isT54>BH|YI{5v?zjGJp_`B=he=$Nj3%{pF zaTsPgD2f&jdGSiNA(4-rnQ09+Q&iE=0Ol?iANL+sjh4x=z{M?%77xe!$f(Ge1o#lg zuJxv;oPEph+Z~2rYCCJ5UZIndpfg1GD`ZCdCF4(xneoe(juyid0U>2Mx2Ih1Wy8lLZ4pLbMNYu??)KVH^?EAdv_+15G%vG)(9A%=;u+H<(I0P z|04XF=&mebgR~J!wa_ZSLSSG++|A7BT@XB%8fj0j#uIH1Z--Gc;zUkshy8|Xf#Tp< zQ)cFN%x}MYd}=_nm=mLBR5Rf5A!c}K$d=Hu6I(+pn;yd-7^s~BMD}Y-%LfgQpCQt6 z;C}il|L@%MSM|V!7*12clh4iAp!JLOAUVP4kJwtL!j*a-HSK-yee;(V%m6-d*aoZm zyGbd6d)cc1TYZJBu1(d<(~ywcu`tw|kW=d+;f9e|&d86dK4!>Pc7pK`5V4Gtx{XI( zR`{!n44EL;d{@Y{N|;xo=Akwb(3!Y#M0pchXwyQ$($sJ|rt-xb=Pwk?hRBGIz*61_ zItM~D_d#m%ClvAgXLhx%E-uQC1s!T*GATX z;wcJzhZWXHj2duJ!s{j9nb}`IU{$Ne3C3Af>LmXb0af>Mj6>+o)jYJoy`VifrD&~H41wIHh)}=&@eDC-nnoqzS{Ezfl02QV8LfE zT=vkx^LLTX&7GeM3`iFs?Yo1>{ktpw zGb%!mU%dRdKL6q}{@`>Hwj*{}8}6%>*)*JTpXDR=XDTy{`D7Z+_f5l5`oW*JpOO?H z)s*F@ZrA@B(qduL`QWw}GytnaLLvN@VHY~uK~kDe^`ZtDb(JN?U5h6VZbuY6a^J`oRs0X zW+7ktYJNx9YS9Nk(2Y0zr#A=|zJf^~>Caq7WYDhg{P4@Ts|}3Ns8S-on}#k+QAQm4 zb;;yf-7n1!jqlarI$MiWpN_T*&-T+#ClX9Gbl6gN4TOJm?VyJm2qbevA?5-SVz zVU75Ua3vKs*dNc8NZw2je-;Gl%tg`NP}Bl=V69hA_~>`rY?sqk^!S&0da3hibQ(Qr z_f1!W4yZ|Klib%wtyUyuYS~UBr73o1HOF=f4XLvdBotAH)mh=eRE(Q^7JWoekkxAN zXTG%Iayim?7i%W7TMUMCZga4oeZ-U0Y$s20X=>Gnxn_I;%Dh^8P|VRM+g!ZCMY3V$ zLhEJi=(INS6F#;{!qJ4NrovN`SDvJ=Q<9SOs)pL5Y#!D+>P(EOBSUgpc~L1r{T{(E zv!pw%dWgUKkHLRA1yOv@{9>kqHnBY+PEO7}9laG;>9D7& zINW>c*@V20*?T3K?;xz~q6&)lmE^9{I*X>3+K7w@y3d%Tsm1YSa6{fB%*r4K3s`uo zWlMJV`LhIuy=rIbdaWod!SN4Rif4@;_!L?PT14!;(d2Rlwn96fxcF)Uf1@?VI^JxBZ;B$-~QGNx=9}d;hAGPUXyI;LF_8a98&8 zYuugr*)RXDHPLu810IGu8|G1gf(r{AR`jNSOzvcE{w2;Gji%m9qtD=!@0$=W7A;QZ zBb=nfE=uYO1EARa+9}ckVdQxwtl_L5B%Z?lU1d)WcV%Wqe}9`pBSuu%{;2lJ!6vw3 zr#u_lN9f^{N=lgZ5Gbs)yA{eF7?!IFj!(H^f9#(oAORD>^-hukD)VLG7@p4v@tA@P z_{DG`wu}=!O-J%1xx@rHM$5hKl%kuWDas`K0K(jT8Z!-JvG zc+%nO8c@ah@o_*tQOtcWDtZk4Z79JGUAGN+}5x#PVi z4k#f1AT9E6z%6K}3sJ$VMKF9Ls?(_6FT`ARO}Nzh*6-84C-q}zlz1GRqqPR$Vdvs* zIDFUWd*$rH>wQDzU+yz}{nCD?|Fmb6@wJrj8~)4pKxr`U4VU z?x-^9swb0PYtV)u^G51MZviBNR1N>cXir2s@H#0O>LKwwNMl1( zxTW&m%vnDOIXD1B67j3wT0uGNxbgfXjJCR_5PW$dB}>b1wW$4leX9V-Ae`GM=pbd= zeIy?t%u}Z?@ua-Qye0rTP~E9WKj&gg_V6Li2JiXs4O*0KZ`7amS!0T6h>uiP&LLCM5N!I0-(@mGWW>$+M z9Q)SGTZp87Ny(v+@gYdUA!YAqFHaK@Tc}D$NzUi{w51wkW~x8!^XrV#F(!;KrxDzav3mR-2t>4r8bXz~+tRA|iFegE+W|=3? zCQS}P5n5Q(K=nhGEt}Aw?k*Y;$Bi4BTnK3uI|*^Fwnic!x;B@r{>S~$Pp$37@bD-M zi?Cy8g`F6#%H-wzY)!Lu6>uytt=2Bo<7n6CkFA_0ag>XhtRg_052K_H8wY!tD}& z={0!1#O3L*_59w%dn*s8tFq9l3FOk(<0h;0QLuH{QM6?9q|D73QYL##c+87Y^g)}! zooq`gMbRg1V{tRidDWYsGVIHQT3s0O6!}9Z8!HQ)$ByfmiCcz^0{f^g@)!Kl)RMbw zHkY*B$}WMGPH1Hgp6FsAl47|9*82vQ+kSZ0fH4$t8pUoEgy``SJuMg)2n+Co+1=0c zeMSkeA&E928)YyUSi|xL` z-1W7>_X4XF+bP}mq{XoVn5i~L`PI1}CFw>mNE^L!b90&94Wl|byL*v64MB07%(t(3 zdLDOo#VzX2TbL7X7ofK=3?q`(Q64J@g%367`cD?rL2PzoHrQH1;_( zLELYC`t>W=8?>&n1Rr&4Q?vzCBqS7%=MPnNcSluTiqF4O42)Q3AtaWT3pFha-wjpN z7mrau$eJom*{@Jjo>UB^7grTc`umgQVgh&Yhk}gM|K#AkYdF%jng?PYk|wt;_6)z% z(We{Xe1bJ)kNzOV)`rY{i_sq3B2B^;Gg3 z)yj-AVQ`|tdDQ?|T zD|i~o@tg#oo(oa^3i`?T#1xAWxH(!hSejhry$ipWR!2sv#P{UsMntM)#|B=A8rM~w z(jvfmFmUUR3VDC`iFP2=M_60QNu8~F&1Z#KF-4dPdT z^=Q6&6n+eAFLM4O?a;28G9lV^8NsfOxHr7}^|kk1zf=3yTh`-@c!wX-QtQLN`DT1BnXmWypo`?(eWz0c;9O-MnHF5Vwe&C_h4l)1g=)% z5WJbu-b@HXifC2G#W^M+G?!Jf8BNQ=P~kN!g)$82NO$sL)>NdfPS}QD<4SnsL<#-o zZMp>+P7ey%6TM`WhLL}7`$@3i=`X4Iyq83!YMh|7vM1hS6~VB1zdQP z2{>)YhH^<%ArZ^_O@)0UBhKc*F1o!qKnf#XL5NXA4vvSFJKBzWqK5wHZ2C0ysSl=c zjUu5T$;j*Y$Ur-Rkt4+K)LS)t%%f&!g#J(7U9L}L6e4x)y78VT?JNGo zH#JDEvVvQox>ggI-)=qX*F6w&pRU5GTMBWfM|%6^(l%o-$Dr~SHW<=x%x-@L^xC`E z78XuEEyA^TTl!#d_g2V$BU_)jqp-T*eYgh58ek)8M4t|Rj>s+Li?=ftfHgBa8^|~d zN!2^2uZzrCf=cOPj(lJe6_Bs+f#2W!0}I>ZUnjG1hC8d{xOve5)ShEA zx2mQ3Y*K@j!!S=!THxHu>`*k)pWgM}^sM&{j_sbQ+ifiU=Ts2M>+_ z3(#&R>z&m^bPV+n?ZP}^4oT=Cn~(|qm>84!2YUUddRJpZdd_H@WYHew?Z$E0<9C^; zflWjTh*SU_^0OvzHX?hD#Y{5I@M)NFHLT!$v)62%Cn9ds@04G1dMT&k_DfLv0u&J> zTLq($mJe*ioD=Rr+d=yrl&7-aXfxHxE)jL+9i$G>YPbgdR}r#DA8+0T6XO$iGGrxeV5lj#9;U5&lxPgIZg!@EnX2Xk-LYIsaz%BB`3nhU?fP6UM`6Lgb zbV`tU3+0vYh=!w+!*L?=_4uCt<}#^<6TX*5`{^cK=W~L8FnS_sYC;Nw44tltw`+aLB;w?DT&o$>T8Wdr|EQbg(GB6^ z#c+mv0XAK4o%i%P-#V{*=;;o%2RGacP;Qru=Vn#iI8IU?>JhvhFY!Kg&Cg9%KxeD} zQGz64TLx1rIiiW#Z;m|Wv?rL@@AbPVrEZc?+}v?%gjp}r<$IYtI5N#OB~fr`S()LJ zLcEnjkx0m!LBPTF6e6r2ix-|;iMz1*81FiHK8PAHsAq>ZWAkDZ?#T988hvdXrs?Hgb`seG8ZZrAApsrd5GhMsj(4zvqWqg;h6r3X{t%+kvTVYtN@ z-bN#sQA&%!;p98`pdh?gHSP>Vo7>Y>)`@5zRfhc^F}_AKA$uD~RDk~>K^Kob&ms;l zJ3l|CVAD6~M@h+(FMBq5=`KiB%~cJ^foeN`Spf50hS7#oL@ab1?pZv?4~hd%9Ex@utyBP;HHx${lXU6z}9o^_d z%%lG8LgmsME8*;lC=@E#$7%FK+)dqdNI2rYyU6mLL?+S;oN>fC2Zb*T znStFAsr<{-QoUue`54-mcv&w9crj zdj&tJk9|0!Nx&wuE>Y)Kc@Qcgwi>+6$8PgB$0BQPj!ZQO_n zsFgp&dlWIKFmMFqevv^6LL)dqBBCS6rWPSf=#KT^(UQQ3cdK&~1QQM(Dl&H-l?)L@-E9FwSew{uMG|2rV=!cN0|E75VU#45Cqq96RmWcNHGz0A$`+k<;GpQ zCPi6JN+J8*Jj=~+BYg({VccJ4%&vu~`7F`?Kt~piA{W@{^8!?U!Sh-E0MqkJOa+-V znk&%SQHgK8bQtg=57hvc;hpawF_7#C!FQP?*jwj%ivb~udb2984`S7;cK$Bs1 z=fa%{ISld)-4^S6O({t`-pIrPhrkcjS*1^kB~Nqd#SZ~8!NOmXrulm+ivj}tMd$iDSCd8-iT2%U+ygy z$0S%T0;b7)o8R=oL)dGo$urzQ#rH)&WRYVx)g(sz^p2e$y-IIaIwzajxwHH=g(20@;JuIZUIxi|8qtlSd~6_xMW47G!G zZaXLs@HnR;_eN9-8UGm-obdveB}tPcpB3%INp>TReiE*3`tciGX@%oLpmnyXrd&q( zGsSByyXfK{C2=Cz(;{p@l_TYqP+8G1p%xl1#mmyy3+dDdu&k-i88V0^y-4Wtwo%my z{fU4PfobzS`?8cb9$j(WhRvT17ju&T*Hij;^az-RQ6KZv;#XMJ6S)U^oWG~m1>Ikn z4nFllp%m0VXsGX#T;F7reM?xtJzokxbl@zXq6fZvT2i;oLAaF^7e}{#Wl z_>mrfIjehtmo@Z3)4sA;%6<5R#A4d;UTj>+8{udgBwqKpbsa1gk+>%U%Zf- za>}eX7Fzo$fF7*!Meu)-jgBl0aTRT-#=g0WR`NS&hWV#mEz)iW?E+5JlMJOj3^pS?Z{S%}>%XnPMiYK>;TG@Y+NKca?y{`|Sgop*V=K1on} z(M9quhnk-2G{Vv>6IqCN?|quB6pAyU6d|Mc{6%;_6gIP!Pj(~VvX+aKM~Pi#lBZb~ zJd}OM4(uciJ_md|pbo+_b>bSNKU9J6B>_SmSsdnQl*(VD>3NT?8fG^1FQ^9d;BYsKeSTFgTqR0xxZeFLK@N%3OL5*&B2RGy#_ z%UM{gRk+Q%Pyh6Z4V~UWvfnjC6xDiSI>317)Qak0ZCy)^9eb-p$SSHr@(A4_hnBtQO{P2;j zv)J?Un&IxJPt1!wNAOjX2;&FHlmar$$G(N0I>`^iE}*a;{gY8qb}sfr5xq*IBL>UC z*GcfY*g*_8?|YO+R0hjsQT)HRa2P^R<*=mK1#nn#ix?q!}E%Av95LKq<8+B#XKU&>wyOLy*gNHgi zdbqXejXcot2--4xTMy##88BQtctfPHAQzp9(Gs74Oy=Q2i#n)#;z(|U=8p?4Rj;OZ z1<&wR2gxwq@x4mq3doLr%tnSfCR;PmgRzZa8!S1=BBVv~S-_J9&WT14nt8^^A$6WbvncU?@UX^^S>Ed;*%HpAskCRd4p*Uw}L#`L6UVc#JC%k&u9WQlm_zdGm zmGlPbli=qEl@EzbC~9s;(P0$;t7hAU*TY%7eW$Xk*#&i*9!0Hd52%SM?^ z*LMxyqdc`g$+WA_QTZ0Vl|lUwf96_r3LDrNkM9Pv8HY}fG$?4EIL$Pncy&BIc=fPb zw7Cph=u8EF{Bx&PrQT=FT=d7iE3~1ntr=lcL}t&Wu0&w8L>VaAN4#D44|kq~^y5CF zHFF=9LcCk2&wHOtR%UsvzLRkvC3{xlr)=+~*v2KgvwTpdxp)$p1j+bxMz zM5Jr)u&T)eFjJ&(AGiY=&6?%CnQCm0W}^WokwK*5jLo6nAY=M*xBI1fTnvSLHpABJ2HO2E}=tDm_=9x_-aS z#?{Hw{#rb44x~G-?g!jp>xYa2HbLMgVuy{hxuvBC*SYl8VLiHEWyHJ9UOEvYsi^Am zB_XQo{%Px?3c!aR-5Q;)@-vTmt3S7&GZmb*vE2h8)5p#)r8z!sGQW5Z&z5+IW=)*K z$zeICJ;MK`#kimB<9qWj3q@#2WcapL-lb=bpG_|>*-3er0;j|bb~===DGm;)j<3#^ z8GmhmbS^b}wDBOK|G5G>GU2)PW2`X|eZ`OysAx=fb{aL|H$jzWnL?v{hk**DQc+nd zK}Flp8_L(_a45P)&9Vt?12O&ae)ojdo1D2P}unzHpRLonWTZxYnXX#N6aV9UV! zE};qym+)M4bNn2(pM79KRbDDSwkGQ(c85Xy(vx!*Z(Cac8mey$%nGE4){{bnFN(La zwG9t{6+5L7e~+k51Fu0K4$F3F;cZ2jzPwP=&&)cX_xzQlCn3@xwkpcn+}i3ht6Psw zaVUA&TwZg-5mZqW*)kwL*vVoM>}0A!e=(e;xBp?k0^ z7e2RMtb?WKX0?u!M9zx@Azd(i;ofyXna#|pVpJ*f$t%m@w8{4)31(^&+4$6x!$lhl zD%)y$dKrcxuh{_mc(Tyv&oY6HO}&$>@C49j^k;#s05Ijc>YgT`GNRL@4C~Z_8=t^& zN;OOQT0BIgM9kW{^Eh*zkbqziCS5B?M8|S36%veqWG38%qSkpF>-Ylz# z_lFCOcFFP%rYGW2y6;oIO>*d^*o-SEG<~`(tvfh+*i~I(?^WtK?s*5Ywg%tw$w^_N zx>|2m6S26&obq(PDy;IR9l}o1*03*tJZ}i}#m!NE)z8C?5JZkVJ&-~S982%sPgHq; zj^1H_6S=zoLDh`*OEm{5+2O)${sIfjf|cFErCrF@k3wgcPv(}0kYx6T`U$2F$brT> zn7==spFcMdFTK7F%csyA0R9gYg=KK^wxO-TvEMTtD4~tYd>U_iVGb5gyS^4QN|(SU0~kF5M02Zg;!{e zx^s4Wcvex;Btp1uy==cP16X9=+x!a%Frmf!~6m->-l40$Sf zqab^G$Hw$uP0d9b?^dp`zeOgZK+hVOEC0=sf^m%1zJ{U`zAJjaflqlL?r7=rLyZ~* zAy5)YD0hIZkAF1*lVbNu#VND1v|Ccm1Rn{v!jXw9;Ifx;2!=!}hmEqi0J!e?l`e^0 z%qQNotSr0MVzn-wRvIE57nlNWIOZ2;Sk~o6ETF(3blY3h$X5CVoweOK4v1J~1Pi

hH2Ok?%?DL+B#uE?H6pO;rM(|Yu}*-dB_5wV`@iWghz8HvNQ zwqG0!4{0D6Jl;-h4UAs_bpex`*NR5~`c{SjD2z?sf>FwPnSJ~U)`?XdWfB=5PT|Mb zj|sgWD0mhzw*nh+8vJ{oqL++Zy9^40^>|!(8nr#tNhxlEUyuJ5L7N6BHsZeng4G`% zSo+|V=&+K*IA8U$23*O9b1iqoa%&9W`dPejSU}!yCNDp>EH$5Wei$Sp3Fce^@jaei zbm$&R)9|tt$P1)A6?U*9W=Bo$OB816)Z>rkq>ukqJ|SwfQ;JggsT)_wH*_&a#S=)nvQYvyiH9 z(JhBlqs6|u3R?w_d^#Le%6`J9j?E@`C#|gXmPUP|dtOBM5naibHMA!#jJ&t;=_&MI z)k@ND;rUihpe!+H+e)J;tnWIvg125(Wo5I=^Y)wIt5?v_1OAq@FWK6;GhTw~L1R%N zT~%*2a&N}|C99^v3Ic41Or8J`M8UW&m=|!2(G+-AgM^fc`(}uj>99umi!%AM**wAF ze2%y~F;ym&Uqb+&==F%S08 zFGrA}lf6xfI@4n7=T*_zPFyRw3Cx>0S@T*t^uG|`#GTMx^x3NNyP0;QMifeW=SW*~l>$tI ztZ>vmeE}>vhy$w%wKFR6?8MK|FM=F~#4^bw;xC(7g@toI)I5?f-4IR8k;Zn2%g)TS z1!{o-E<~#@h)XM$I{gG(x!hJ|dND^0OK031FWES@PQJ2^(oAF0*x}%sn%R-ggn#Gxv9{06{Wv*-K-1T)+oNlT*8+#J?`AFDiI>A|gVI&VL_EfU-ApA;Lx6hhE;9*Z7M@wwxPr$ZeD&*x zZd58{1cVB@C-rcJh#2DFat~>G!Uhkq)q8LV+V;x%J?{P9gwKy@Y0g_2;;)yWeHMw4 zz{C0ym1ZzR4TpC`KlZRo-(}ihI`64Jw3ot^iMqV-LSPVzdIizX{y}Wx@CUI%mm@baI>Wv6UV3 z!Ggh(O`cC@R{M)#S}XDVA@9X4yi%jmP&lhkMR6&!2Wm^dtky^R-VT zl&_D1(&Aam@wD>g{Kt3WU1S0gW%8KdWs>+n#SFn+rAjU$d98fz^9}d0);aYdgu75BPfz=W?8ldcY$g>p%@x#z;#eR~~~yzXoS7TJ}T=@**Yq zQBN@j2^VTqPx=;Kr6-A05nw*Dwy1oaM%ptFyyo?z#w3j~M}FZhqINONM80R8QI&+D zd9T1PrupL6Zxvgk)djY9Qr|pwF~8ucWS222;fIsRJxOUj>xMpAq+WA2FZTXG$#GeX+_~5X3L@Fs%uP{hd;_-}6dQFX~wMMzuUGzGgJPkF3^~coILw5L$vfFmS4~ZDjy;N{` zR6%;U=>m@a#!F4hq_x`U0R|PZ-+V6))X`8YSn0%sqifBrT{FK|(yltPMykqf22_fM zFNCDRv#g?VnP<%$(U?31PfVR=<(~ZM%R_#DZ*{Zot0YlcMB0rQN)xq}MWf7VY#q#> zecFW8PRvQdphxiMDuvO!{9j06bb{1=((V1zk0WbxiaA+XR@CFBau0C&rl+!`A0^*B z)J4up%`Tp+x31J{WMMON!6V~BOMsxE1|gvnemY&gRKUBhGHB2XzY7`=Nt)ZWvpb=A z!YAu~@I#E8OyP3AI^t+osXKB}-%}Qo8BWT{PN__pPDl$vwZ(!C`y4JVxR5*NH6NOD zF1++w@IN%@j{=|<0rYF%E_TKdQa`8adl@B$)Ci-|S>z#!%ai+Gu0D#>mC0krNihH1 z7*ajK2XScIf3EHp&+XGi+DpcZj2FQTp98vPreeX#UQZ)Lcx=DFWzy_~03qpJ)&-T} z<}R;+y#>Koh!)jVV9;MZG5nB6*OD7O>)|q zU7f~a_+HL^INDGOB)^MsnG&12Df8wlU^hm*1q1VcbwP5B;}XEw=J`thg%MCbM z`P9gA+C*o`3Z=3>aYTem5PgjoNTW5HBNW}{dH9lL2)cHeMfXY)-kre-&{t(#U0vyo zedAfSTL2R~LAyndBWr2fF=Z~P5bQUX-@%3%6v^rmM9rwj9-hU;M3Sl+5T>cA#qb$ras71mR}Ws zNczcYz)2+$5r4Q0JTL?Vs8n0f?K;c7bSQILwHg?}nhdkxy+6cvf2MYC$up{!E#R(l zcTExTjRedNKETL483Yrq(}YH^^KqNCqq!{Tvjku}0k%&y;qPuPSiP?{-^R*6Yv}_A zA~{LrNq=_`!ah(Y){pqPTx%geU7^d_02q-Q0I0@7H;w)ErIQ($&mz%%`^<0(=n2Mv zB3l}lW>}WzI;|66p<O}&%{=J9d7}I?IMKs2kG5g~K zD!wPW7?3teRhp5EDjZDb^LEljH{+{NILJrQ(^FT?Tm*MKg*vl=xLcayaiOPJ5nz9i z1zn*(y51hnb{tN>3e{_M3uS~heLdtsbEIFz%)3-h8BKdQC^Xrq$jt3ECRQ$z3SD)o z{l|VOp>@B|?GYimeBI|6=nTl5etVAa`o&~vA>hKWRdT|2XX@)0=PuKm|kC(o^=f7rqcPHC; zx)A$*Pi}9>P~D>r-#GX~&;l%w0`-8GnCk?rmJ&{hAzhnYhaB&C)EC=4RQ+zwlA57w zS0aJz(Dg&b()k#E_uV4ev-tZ=M7fU_OTN_(@ZLOs)>jGmXec;>RHCsA$J1Bb$zq`m?vpt z!4!Dj8GkHnUKy^{1Tv&A_x3QLgb@v)C@;j)S_ghCmKu%|&xC?82P$k9virSZ%N=B!ZlFM8&cdg*zlMkdJrp~gwXt15sOS(H(MYz7m zKk0J#vVQKMi-k!?L}2246T-(1J_g5Wz~6&184H|rFL)5%ygoV%vG|&H1;iSyQqt1O z0meTxi^O$%MDqs)m>4_?){%!D7TU25R4AW2kocoN>@9naCCrv7m^- zJ{K29^ha(+Zh*D_-8aDo13{>~=RwG1*Ddxaw6^brnAo~pI=<@YS{vay@+6lXN<;OP z9(FbVzFr~@z1TWwt0Sa(={SY>to=xwlvyW_h99WUDpcrLPa$lr$WURyj!lA$B?yL` z**+hs%e=jW^3|SkJr&Y~o;=|MV%!MwZfFrw3J=zBO&gFfSf8GrYIgbiP2ozDRry@( zTDTo5Z3ehysFQ$)=7%TRv+TX>?jqJ9#?q4tyKD21hq@puWEk1zwo~V^Hqb)|j-=&XWqo zdU)`Gj_uV5+3!AlWsUhbOjuC!--}8Mt=?u@uJeWL64yBQV}3*sY>3|BA8|oqh}a8s zS_dA@&~X-K0H<$T=y&6>J7Y8xQOhq|98pV>lf}|N0~!5Ht9n}^7ZXrX^X1z&t*0la z8hhXEHRc;DHQPMwhtr;(Rz8clFcRo)k4BU+?!wcl_ljiq>u33WI1cBo%(gfcv078W><@E{N(S&An31)0YCfjg8)lNbeolK z%SMuX*8kT~Y814&{WK7~*=y++ySpLDD5wa-Low>6BWI(5n?8}XMhJ9Lke64_$oQu5 z@@19H>|-Nw=)?52E6NM0Vm;G!dvK)KMU7``Ejq0yMYL^cm4U#ye#1wiu2Yqo@a=$s z^L@HiN5|=l?w1<(1KphV3NJ!XG#nhNDDbrCK>4*g*4m5g&*e&lb^}U>!!9tCp@8}| z&A{#hSaqi0f(f|L|Lz3*qCqvbfeFcDcM|O?0JW^EaB>cfa3`E?cikwBAbe#TanVb< zH4(UstQfy?(0vk=qmUz=aTl0v4QsyBC6Ii|p0^ zXvdt{Wq+8Qs)SbVt>rr13SovIKjIeR$YTHJUL>SeYI?EiV1jt~ah9mWyB9Y!ZHbkC z55Zb+_vP`rBCl$`45|Vs4MHHu=x9?k;A;cduZ-gT)c-{B;OqL3;MmbhCD2g%Mn{^x zvTp40T(^dg!L$R>7s(LCq=NA)(=H)Q50Evh7$HiN_PRaM3zk9g_ z6(zjqP&xM2#M#UFMfW5maKlw*Xd?x1CkliwB>%Z7@I@gwq7;xvc%|qdJ+Fp=yf^@f zcugT3!Zu#*ennytNi_%vr}}3^;`lRFdmICt}irh|io2 z=A@>9Yh7^jAD=-`j-rL7aXK_qEyhu1_QPbXXnp)}BoNa(G?&}Ijx079;x_#SsT)rb zp2UU=>A8(Atl-UZ96S-pZh`@l0gXZmRupO26*W-TxNOS(20`r`5>5p1T0xi^GyN)V|UP9DVXGpquuFbI;Sn7;w;FstW ztqdGL{`cN!S8#qX@b7l`UX)HuWZRs6-w!h=!J7$E z|9ZFeO+WNi*<9PJ>RPz;(7!I_8LHdfcSQmMf}iHwK*wqq+-OX+w=>?~4@<=5-7PIt zxXq3aPH?JS_hjfcrVjF>NdK1 zcC_P&sY#Cm(xq%U%tZ)oSHNJJpAue0AF=pMO&L)A{T;XMYXAiA^pgspk)O^|iMFrFC0V%twM2%8Uk>(|&86+)l+}FhP z@((gz@ovwT;lc%0B%74zIt_buqdG83AmUv6JYcS$a~0GWIFi= zfWHX!e70PZ+4o?fMXUwnfB7N|3=CeZCsfVLp#VgRvggqH^Zf%C>39nA3jAW-P<|7f zU-%pg9E%uZ7hvX@^4BTosr)vJ!72p*C8C2u4CKt5Y7T%KuA;iQJsrjV{u|t!IQa%I zhkm`4$>=+7X2zzXqQ3Z7t5e||2{>M#T5YRZ-UlxL#XV{rhL63>Q>)p|=4*x^3{kBa z9ZY6G*&Eve1FhKKO>*q#>d}UtK6_TBvh9BXgdO`H_yNkL%}+P{o(Bf`2Qa3Lno-!) z=&6(uqt`|EXW~OpR>rG%;kVrB_r@JBC_i`skT25ccmkdQ&C)iI<_7zPb@~GN9EVWP z!%)JPrDG*Gi*3NcFn=NEv{a~3Mw17R2}ZP8qkO#7Xn|y>asUv$gcZF8K)-34bmeyE6&+`m_o0#%`u68^-*bXV7jt;@VGfSa9?5qD!P} z*+{I4Nal?nz$$(u*foDG){@_K!Nyvm-0pj@?0e~#lABAJa&y#+txYr<2!{~t4*nT5 zL@JCI!8+@h2!a&Du||1#@a}^7+7r5z$z`opGx}7&5y;1X@eI|3uz+3vTe;g6sTfa}+8KRYW*eol zd9wJZB#=hCY2-j|+nFb{PMQSwf({cAHp1Rl6^k@EP<_d(u0pr)=QUKi=I!AKKA`U# z1EY0EtO4>}zb;!)AHXibSyh~B7g|H|h7;#{@A&{7wyhQ2PA ztMsX*Ef3385{+D$7nvw`@>MC`w{H-aML&KFwvjRZtQYTTcv6Delxdwk2t9Kz^}h(w z3nC0pwro*go9B)E#2@``xUfL3q|}iqaP72FxFkfez0iW~d-n5e^H=t@)57B6%hIL3 z#aH6u;-Sa93)vU!DPl$74I?^>>u7!#WCu#~W5VE%7~jDDHi%|^wvBk@9j8$Vg&+Sq zs^4-xk;fL{!J@pPqI=)ZjvJxww(Wz-)q{3YrR6vX&xI2P+jCFnix%Jr9*mOQ37mEd z<@*H(ogT81y%}5fbvRh{KU`gOINVr`}Mh4_5ma#0<4nljsD{jq@&XmQrd6xC%t%-Pzr8rH^ z8+_uHrF&N<8@bDh-pQW%HQfDl<6&xYQq;F0S*1FcWcA8ty1I0~pQAp2r6584c!8!Q z0XmNJjY5|LugN7(Jg9-RcqjeCdz8b3Qwz4GSn}7OU~U_NcqBAgY4<=gsU_7p{{ge7 zzNf$!cjnHq@WM;iU9wG)k;MUC&#x~24D0-IMSfvSJqvd);(TB8#v>y%#gtz=#lsC8 zw6tu8`u4IrEpI!7-z>PWNnZ$#m-u2Fw1VgD0<6XP>NVmUC%@)B@f!>t3QuVB=r<}d z&+Wq(3vbj2l55}9=7Nv75v>Go0m97jofGf%DZwVT(` zZ8g-j)h?r(Z!GWdn60JNQPVAu?#|DkHBiUH(>#5sde2Z&{;15l++Hedyi7N3yjVBw zU7L0GENh9R^O?_kT9^|p<%7GEr5f_TRecKGQ)^^x3lpjE1XH0iKktoPTT>6aUsc_> z5>}9(AIto>Lt;GpYHY#Z{wHqzs4Oaetb@lt;O4)LNzBprj@Q}L$&_cwtFQ$~0sFnw z(J7^MHm^|u9vy!0^IJqQ^wgL@7d`v;)UNj z(a80zBJRP$WBGoQHTd=#0ry>Va0M zi&wI!jQE5{2ZL?Kt3;pg4pt4sNp*h+(48&k4$qf~>yW$d&ox|=U5V3b=z!~jWPmE+ z`z0_dekd$-it{~=AE+mEf@HYar%`Vqr#^=&o(+5Y%=b8YAFe{;40E^5N{;% z5hS_g&(K6a752gnCIwFQ(WXArPpd%uqA0IfISDet7zItnus{TkI*pPTOR0;B_N1No zU_6+aq7n>}j1D+|t8g=wbv=S&v)CHDCQC!TcKH#9LhqriJ$t`diz~WnE1B;wqx<;P ztA2tzSX#&6T!2Z{`&RpE&5%s;^+U;j!hG5>R8*>M?36Va5{iZW=plD7_1{Df_&+78 zSzCWRK#~H)ih9cq0h%TG25*k8u*F?P3Egb{Alv3m2prR5le9S8=hUiC-tDob*a-4v zAwEB-De6h>inYr2nN0M9ZRm$`7E-vTHB+D{mfK}wEl(>QdFkEYxQ<|DQ~-2rACfywf`(Kll~aRO(r;uOa}iabLn$@#dk$MDbS&+UrBUH zd!BF0v-Qt+Q;DJ*?X`q5x6`YuZ)blcrrMuv*#y#`ggL#Vt%0J6Ud3-#f@;H*lI)6h zV@j84`nx-kAusZv%JacjPDzaSoSd8*dNy+HN-3w0!L|Q+MX19^6SAJmrZXT+zOX8J zmE+x?()&28?w|Zd)DIVAXQE}Rebg1zBcyExkzz)*b_0*huHpo3m$oo%niwgJF+^8h zy)`NNs7&*|A0@R*>jpZSt%U)XiP*zuA>gNBVm|Q zP{A<7s4|pqpOQpl@OBnj%#ImyN4T(sy#i@(7vEz-3hq5CAra$|pSx=|3$-)D@ac>c z0I|;Q*vAe6gx)P2*p~eJqLNujl%`;(LsU63I1zFp5KTbF{`*h6(ZHj0%SLG!kq<;I z#1ZS#qg?Pp|5_Rx-sl~hrM5iq6t%Ihm z=YG9qg`qw9T&<$&uyPtccxqN>h**^VGCc*7(RZLOlDxnsgPz3*{m6v~cht{>VFwD` zPaGnINo(U}lj*M#Z?U|wY9=;7Zm?}chIs|~LHJMd2(tgCi>^sbZ8%=qW`;kjQK}5< zlnwGb0$ix!`^hVii~S;d-r_fKW{HYqe*ZXU`|Ib^*QYs#Hcfry;mQ1689WPnD({Wu z$Ec|q4`wSHBdLwwZde3f>{Iwwv%4;`-oMbRf9BAP`;h_c)%S9Fwq#7XnY(p%3a3lW z&V|>&bp1{+Ml=RZ{C~CsC4Bx8bY!^sK{xd$p}$-B&lUjLQ=0_;=kFWx1H%<-7I?cR z7C)c6{ycwb^Xa>yO>70C%y$z?ymx}RVaS0J!#>=BeQ$nHu{VntK|jnj+m_6AdRALk z>RJ#X*+-N3DEx~5XXy%LjjJp8&HwNFvEennp#q%iOptKIi@z!8Z%I(YP%$F!FJFku z$oQNKmZQpAwOsCEKlO;S&0T!Ok6-iY>oHxDB#76%DXOTNrC{`D({x$Aw-IL;{R6gI zNK#{{V+rwZ{1X4#=ZMp7)_N`ty z2L2|-+7yvDSSpY)@jcO>F9-^wVzV4hH~oo1`d_h8Gz$lH`|eO7@dJbm@1GLQ6bjsE zdnha)H4+@RLqjsZ*q#1b6+dF8XsIuYx=^h9o{yzVlzgrrl}Sc^MiyeHizF&~b7;Lg z4OzSHlGn1lZTI^iKCz|zz4E(M;2jSW3~BLi75|@K#-`9T!1o?GYr*ku|63Xotw;5$ zg8XRruDb^6p#{^*3tz;90}r1&vYj){Uuq`7J3bmc-A{^nlyBu`cio&}ZKsI%%Oq^J z=7WYvd`yvfYEHWO@KQ~`2wQdFQp(5QynK59KH2}anKyi*7gQj5O4t>u#0>m z$_)ITlv1axD5F6fImkrAr%T5?*KGPr)uowMC9)gl4qz3>kD#P&IMZOO?GBo#Q`QzR z!5?4{0NW&QcDBpVlkcGIp}LkeF!zT|nN_b|29r{F0i>;UyM7c&S>Hu~9s;+^hRj}3 zvQqiQz3IO9lt;Uc_me{ld&h0R{}weiLR7IGnr-4r5cne4_wQ!2!UOi3%;gKUN|mu$ zS+M9!DXGD7a(#@l+QwhAm1=nSQ$3ZLnq@@rLwJ)~&iJg;q;N!KH4h3i81MBC~crgHa_g&wygMh4yh(Lia zQdjaE%%a%>g=u}X56Bqs>%GHQII(8jOT~`yw2x(&>Z#v0nb;AqH3hwgXI(N&!}gX;@wR z!?89uEt|OWxn@J;R)Y&ojGUrk?E4?-3BPv=T1fh5u{o#vh9!h?2`Tj(%Zz*l8_Y z@gBXb^at9z`SoB`KA8KysIv#Lu-O+%bO5p69yV$K)0_=!5k zzXefIOtf1tLQt6sJX`E-hY5>*!~Lg6=$L3=e}ZUYT}=HZR1lhKla^BS zAk(5Oi0|pN$x}UMMrfP4#W8lVj7cpyS|2Lh>qps6J!o=kqaRqiZq*bspMTTNY`#13 zwlp5XyD*C6@qa@C$x20zL-DI0TlSc@zvhH_!uT|qp<}8Q!L~3x`&D*~mw(%cFo)lV zXfBz{?tkl5iS87J)&X(R6S~?+wwb9iyynQ_p}ln}t_irIZ4Ip8TF>9NpZjb*|)lAbGkrQZuY@o7oYE8+)4Px$x07?c~w23CqC`KUlp1VTr6l{RHi6 zwY65n--V17O^1tx=U*ru*gY)wAL5TBz656vt?# zuYs>?2RiXMnTUDqZwVk;pcmRs(KJa(rwgzyrtZ=g z+dQ>l7?SB?`L|5D)cr69c~TsIFp;t&EPV5ea~-fJf0sW$zL)?|Ry|t>y}a850;YZ# z6~j=z`Yy{PZr|Tn@_+g!38IsJ$|_%s}yJ<&`6XaJE-L^*LrQ* ztM~i&dv|_TNDPEg#k5XBN|gCNbp9oKE^Xw`B73R#yVgtx=hE@LMu5i{SxU6z&Ia3E1(~yOZNeBrxR`fD4iUM;R<^l z8E4n<6)kxI-3d(HPbN{}?N2Jo$+ux~L>qw3R^rk~X*BHihTOuqfaTIw^sxnxM2p$) z0jXwVR5Y^(&FEZrm~S`+aj@ZMBdVMmu1;h$W{K1_{72)F} zwL}uwo31=(P|DJIGW+grR+)MUGVN8(M*!ZULtC5zUbjMmz0$?ibsC_tQoVxr%pKOQ z9A1MbmPUjgv)UW}wt4dosQapbkU{-)>)V@Jg9iI~t!(%2x_X1rqi=-$u5}L1i5RMS zSDZoTQBOuIW*UA+yZttm@o4c|>ZGuN42{>v=-{Pm&{@~Cw4~pHD-5Qi`0XPZOhgw0 zON()oJ?Vx5PC;AeIqj;8YG|Ce65D&Q09;fQ_sIRsz8!-$$d&E=OtCOp?Lk+yg4?&B zNu@-!#6nV^EL{o9j@b=Hp)6*`AdD*1g^L?(<>QM*H?;@nZvY?%n?uKME#-{0;zPni zfBX;6%p1b^x3gSXh%{FkuxY*k5F9~|tgII!xPTX0j}Yk<-)V^D5Mfp9Rd+2xwt+E{ zGnq!*@>vN&v3VnIVv-N&yDM<`xkUgZjhRg~!OjbG6SxU`1pdjF|7V$Hx+TGU9%0Ni z@2}iUBd&ncFi{7&^5f0^2Ai#Gqv<4j{pssP0r%h)S~sSl{gCS&!F~(z42?&JMk!F) z!E$M>IHhv|u4>WWgCjh?mYWX;E+6u}2Dzpl9xvc8$5oS(wIX7)zk4}2Np`HlgiXXI z>c2?8b0oqBkx&D?sm#kDakmy-WgfC+Y)cKPFLPpk_(??##)yi#8jQoA##MS`d-p!m zVPd-gLz;-PPQ64)PH^Ileczj|V1*d`A!Aj!%-SKbZ5A9>AFqSA5kX>LatK;J)!7r8 zfbTU$3Hn5OSJ=f3M9TGvYar!wYhrm>XBn&9Z`0HJG6mWXQ*-IsE&m`GFax6**#gJA z=;ZX2n-pzeBdsJpG(3p_Yj@7JTC?DmM+wY_1a3GnmQU5(_H4d2s!D}|zD$3eCfFX9 zq3@;1-Ba-BI`BS=5HWQvJGO(qq_FDATMa8l2CsvYDyz*dLFqkUIa$HsZxhd4ZP&ru zN7@jYIPH&u^ORv=PeB}DzO`$|W)n81lhy38@x?TW==)-}=D%S|!s}#gFL^BdKhFK% zRB8-S5EF9r@HGHJVlq;#9DA(C@<{!bcTitj77(bm%IM1(QBX>3hsZLqx_9U`yVogU z#VkN>5Rp?B3l4E(6K}10B*SmS-XmU7zJ%gQ$u`J*RmtYe`~YGr+F3{WX->|Woup!I z9WE2KEPgaDdw60zoBi{7$EH4rafI69LlP5_LYxaj(`{R9pGZn0yeC3v>}F!K0(L*x z7}wp0q2&krGu63Ogcfzz;9aR9y~pyXnQ{QP+_CD~&>GCOn|t~%7eFKQc3UKsvEb=( z9&0e6pFQ9Xx%b5HM~fV+rp<~~)l7fqd(*$v!MOA4J!=C0Bqzo@6 z49TtcVqUgOi7AC!5grj{Xr7bqcQXE4(fGe3mS>G4bN23i$P+dkeeJaNOt6DG=8k>L z5&5Hf^wy?H8Jd{6lx=1tuk;!<4JFF5>7#JR>30x6-PA9&DSX7qn7Xt@`aIO>Lh-ZU z_Hcm(J>P%oCpnf1L;X*4ipBS4@Tt(_iv>O!yi!cKPLqMT&L)htZ6uLaER=vXF_yF+QKvpvhcy?RlZKZYW?{VYl@aIe4Sp#n^>O zCjW!_tNc~GPYTObd(XCT$}K9&8SrPJ7R=1(ZoFBwZ5;uR-SFBkOl}!Ry;aJNVH2z? z1zt10OwdIh>UzAMCnj{LCpgV}PAn!N3cpMwa5n=QqJD$%2#jyjQ3T8we%+M+C9MDV z3(h)h8?y^nOYs5yTAlsuWNkr4o@J8R@@)QEO%#iIJDOFy6OGI>410{gYf2NxJVe%; zYE>mVn>RhLS5cz*%;eJ&P_CX>m%k0UAzms-C7En}L!vpRggG?Gi8W-_4lNBTp~8zm zfJY&BQ#l4`gJTx{&M#B$0;GK%W0bji*+v1&wu?`-TTh+btgCZmH$WQ?00o#EG%QK*#c9~ zWQj0}c`G>4YQ<#Z{MrdaZU<#$396x6HYyJCvwDn2KQmIb$qrq&Z^5p%kzR@691(5z zLio5NDW2NJj5A`f_r20)IGz0qsS2L8G8;)A5#Egm9um*o<43|h-zr{q==p*CBg`PG zS!97y4;CoPKkUhWYwPwbKR{t2urKe~ zxxQE_lkByfDP*%h;=$>xBZAq$|LwDX>tF2hpr%M=G*Ow9gg2Q%+|B~pid4#42od#3 zpYBu7iyb&hL#FJP$*y=z~4Y-%&&>xe9Q@a zgoSg;>hZFI_+7n%qWV7PyE2CizH(o!#@!`BY4;H;OSjP>1cpCtgVLmKfbz7L+aX7% zxOf?9Ot)GQlu5VMPM0&SIfT}632yU7@~VoJ1_OI-TNGDKUKS; zVRXpZLFD12s5+d-boZ$1jbBX=)q&le#SeS8>*b(Y`MlsXYT zGsny}2on$ot~s>$B=pRSg8~MV=pK-|AbzRiIBsXJy3;+1<2xjdp@yLk{{Jr(ZCG+h zU+{r>?#lt||0C=xpsLE+zb`4>DbgTaA|Q=`q=1NYD@ccQhk&GX2uLF>-Q6vrbeGcI z{omIaoqE^z&2nblL2jLM_CC+^t0&ATrC34*{^-j9SJQlH&zZ}z3&18MD#X!X5*>D* z^Y`K+@Z*$FH8Ygxp&^m7Jtbl&t*4Z}>BH&J$R9x)RT8~9>gyp4yLHPqbFszc#r?=@ z^Hcey15X40vFDOZsHYa zCo#uI$}9nKmuw%QBBHM|z7zt-gqyDXQM{T(qdX@@+lZDx&aF=X~468>#M>CwwVhwx82ZLl4NWsaiOGBeZ|( zTt;F5r;u9s*4gt55{0nWjAFTQ-+fR0*ReHruBa7G(<&9l)2}(;%@y-y9dpp@9D_LY ziUu&lDc+YbQYd2F-AsVc-`GFboutl&kqJV3#L4>vKQk*ol=urRwoUBDn1Cj}%tXqG z$(MeQki#P0{W}25S%Ggan|A|%%Ts9;Gv7Jdd{{L4=V81*h&F-b3HkGhfd6StMu;F7 zSY@Z>xjJ*0b$~)=H@DE@6GH=6Sm-=)F*7q$d>rwDFUX*MEX*PB;ls#pY*TGWUO0C( zhIsvL=2j1%l*2Z&5FSFii6q&UYp!uETxbRN9?YuTH0vgTSlza^iZm{5IZhug#fQ!GO1tr&;le?1eo!4@ zqTs9s!94C8A$++d3A~GBZ{$Q*Uhq7T+z*Sv;bLG%p(cluad2?((@e7MfzcBL2kQww zqnR@m%8?fh%$R4N7SLt!<-SP~a<`kHTHPh7GB(oLF++FOO3p~nq`WlH{r5T(f!&t_ z+tMB|;@yR|+ktFt-({~~shnfyhZTG3^rah4SDv{VuIA`E?69kDO|UY-R_4%_QYthT zMY20(_##c!SQSla7P@trLOz(|^z!sKG~(TTcDgBd;jn9;azHH?BKd{3(KEQ*ZuI`= zc>3$a;B3#>jS*(00nP#f6B7d5tG5K;iWkvFlIlN;1kHucEQpRlf-?wUzINsUp%7v? zb&Tz~MY*u#&ImwqiH|qa&4$&7K(?FWeiDBHO5(TX>0IP{9j3@m1(MGf{MeR<`u9|J$p|jR8?ROI#NAoe{mtvIRk|;~Y`z&;`{82DoMyosa1F{|^et)@qZIc_7A8t_ z-=Y25JLaa5TW58eW{dK7wciyHihF#$PfEg9$wqZp&W<8v@7x}9EIZ(8r`&!@> zx!8l*0K9Q+JEY6v6A}ebo$J5;coR_pSS6KEYuPnOyYxUtd_RRf%ii^HCUmxc-eFx| z`^AJ((%cQi-fk0PBm4?>R4j76Qr0TVM(Nv&cl6(*XpS(VZ(ZL`3-N~9wYe*o zC~L1@0L^VqiXxQP+gogtok3sGqCAQ$fQT^PqV&2s)(tH~M{4l_68;MibaYsc#}qj| zgTh)sv16;M>>Q$nOzwW97-NrazYn^^qqa4w8uG2N&zjTdqimPI>B-**J3tpU+3aMI zgT5r%o{GwF; z^rq(o`JN6HHe8OdYO2_GA2M?yi9{3`OlQ*}%85-Tc$dPrJ?dnHH4j)!VWxC{ZkWP& zX}5pj?@cND5~TD#pZ^&BFox-!m)m}`1p~4N>esKI33}FF#}yQyppeaCLzQhnRgLA9 zPR;~rhnvU4^iT?c$VAVZ0(oY|g&<9kn`|ozF(}=XB2_6UQUafhqHDe!PyF=hlWBVZ zw&_ZDM9hUf4sx9TSiZd0iw#5O4u4XI54`RdcJ^RciyGC#G`5V#kRW*GnU4WO)j!VTH%&7Ft9eM$hwyBR&iL6K3199bY(WcofJr?~4 zuS%akCUK#eYYL=kl#g%}Aum)b5q(^Vh(x+?puA&%+i?b$<0%LS<2Ymoui_^91`=BY74jzgM9o7pOBEG%xF+0a&OFc zD@s9V@@geY!F2RJeR;4IAX%0xr!6_jJg?p?L&v6ufobZ8jx&JjClo1gc1i@($lkvz zK+GAja&f6`1d8WM8T(`Bmgu+7r1Yqt!a|dPB2+!ohcLBldT0?h{IZ?FQB*PApl=C# zw1mWEH79^TTTj1SQRV)r(?fYxo`_R7uT^dw z0R@|3ts0JFS04dlXahvO9QNDLAux>$!DV#K-<Ep}pUdocXB0S0G>LIRn2x}>8DR7kfoa+_CWV?`yxI(1Vw z3vu`-Bg%bSJ(#B)y5N<(wT$yz+4w+$sV1G5m`3G@R!9%3Vcy@i!=6v1#)>Gh1 zo{TJN_SneHc9ZAlP=gP9BZsqO>WtGhw_A=}6^Hexwt6~zO)Iv)(a?#_=% z0tl_GJYW=BQ$H7lw5@%jEL#KKc0MzqmbJH~EzmTdrFdMJl$QZ`(kvKfmVnx)0YICY zqI(knK|UE()wNh%Sy?-Ay8moOm7)BKY6ZymW_3gnCXw#I59GvVwjph=5N3#rgfP^8 zSsTuP(>eHP6ql4_F>l?5-crJ%UMe0Y7lP#gMBWu2!JatQ2u2oC^bt_|z1PA46yLFb zFEEEL+h1mc3Ej1@fAffMwPKdqH@bkk#PhUne)6TcsPJE<{~HMkkWzljLiw%q`!1*F zN4c%N|*~tA9wMWo>S&d#Ju>{UVdrY2-Hj zrzbt6m4eZmetv!}H^V%g3)a>@2w3;oIw^QD%Fz^eHg9}gn>jQ@5DjXb7ZZF5%I95o z1<7}c3Z;lry=ZpPLvbZ=yX~THY?{b8ML8*Tz2gK{-iF6uKIUb&6v04a5BFxFrf%!p zK<{qmU%Yv)qkPW;`IT?wg-&;i0PPE$0jMDL&p)XeVB8RwF(ik8kmOGunhqu~tw4}k zv%=Jn5D#xJ*uuRv%lwa$O~rB+SS?!SR~W58u3^5hEl@+yq34q{qQX= zUSzXW*2c4!F(1@kQ$tylhJ*vYzcnZ#q{z&=jau#pJ#_Zbt=zVs*w$dUhQtKUb8Byw9sKhn!tSuu!7NXW}l zNSUMW>UsQ!KIu>C-OskE9RuoHT2+6O`MWfJ^K=1Br2M|o_sFC1Bk;%mZm)hm*usQO zmiFq{>=!m2p}z$2tOiuI*ZT$&bpQUve?4!++iMlYOV{&S+5K$M-yZ9W=KnYbzYoUG z2PcG3cKhZo)+O!l8q_;WN)KF?jbNJSWb;hK9lcgxWF+MFpJM9Y-{6l27571%3cQ@k z?iWXj($dnsl@?r%J2M)Ntt5Yc>sH(k(7+ZTbe^@V_=nBuZ$JeX9e6EzM#h&^wF!qS zq<_8BGbY%*3r8^0FKDX4waT{lzceJU0R#cFhsoH6!<_hE=LlNvgZwc@K;Q<;02gO> z*Z=$y_LM-cp}n@R^Vf+F!0}d+=k2&F=u#g4;X@8BKr479Yt`43;1EXDzX>njdQ8#JV7!c>* z=lrjWCA`@K+GL1O)fN;KEEHFoPvQvtfce`MCSr%|1}ylUF{(NyqNXx3D8TUO1^Ztx zMdULw@Uex0>SYrY@a&{Lzz=7K@uzy?cd`B72gWPV#kh1GMgv-FR_F)^PcV}fyQ(N} zksLxbmtKfjj9H_Gtv@FBbVeer*|LjQh5+7Z10Hlit^ zk-ZH~wDTXRR7erh_yL4BI=QUn7R<@eaoa8)8Iob4edirZoVlAHXag&Ssj0<;$7_3+ zuKzfnaz0Q4s(Tcj!buZwU)=yx%AVERto;dI*`Mdg`@V$8N0y}Bz{*GO_YWHb1wG~C zpf{)!pcBN0fKe5P(A|p1|F97{%_z}cXX5|wqAXMhR7m8*VjGf;Nt8Tcw0Tt#q_q*S^ z`r_Dle0_lRS+>6|stei%>d_EAG!AzG8UHU`2JQ*`?OOWh{suknBFj)wbxz+N2%%7l zn_m8*ww}pY@iPM>Gt%ha>wRhYf1!G@%~e*TV_WWQ%*?(T|9dt>pO_aO}M1$4r$4CCOd<*m+qd>v* z5~}k6!GG!q2nb^V*wj=queY8O1%^Z8@|VG&3U;hPa@~GtWeN5gb!ePuZ2MMGq=K|` zpeLZh8L^Ovq#$)bgLvG}Id}#p2Okq&kO{h|0f#8Y92h^Xbqm0|9Pz^Jr$$dF22+IR zPdiDhh`=*B#|zXm!rf{BR`&XIXLb~Tf=XMKnzJQsrX4}}KVrfBH_%41#AiUoiLy4h z@Lg(2_eQAKIXY?2sh{c>wUK-C+lXobGMDQv;fgx|1{o!bK^V=izcR&^deCU@c)x&?wObb4nRVP7s#j%-3R3H=rGbAHV zF8cEc!0LDNE#07Ei`3%cVj1G_#Q1m<=*H=tw{isZx`#qDt{`{keD+k^b-SYW8X63Y zGCpfz&D`seQjY%6YHmiUk$#t{2TX3ZRW!dOEws`2R1c_0gePPttCX{TVN#m*WJcJ_j==DswVnVQesoE zjWz;*XvSN#s;3+nlhCa-TcFCE><`;`I~|PM4d`(%_nocBwH^04DTT;6LAnrh^$fKm zFVq!K3{8W{H(i65_iu!(FMj?U3tm7K@=$zrcuP#6e;aVRry7~`ZleHZF?Rqm#K2Om z&%lZl=-)tz2=C;PkIGuz>;jGTff`9=Fm2A4NQs0!>S6SuqgWyxGzbPdaJ3r%Or14T z1eDvnVrVEZ5$e8t*;3DSkHPi9xPPDmMlBbR`OxPpJ2+J8z~~XZvj=D+{}<}1HrwlH zCHazeA1%uAA*kg>A72#IVj}?QS#-C8&YK0}Tbi=bb&GnUy*mk0XQi{FHfeKvGjUPwp zdAEp7sh3iTdT#?wqZK*{cYo^!dU1p@;XC>pbPk*xaHE~52QKOms8lHV8hD+T1&1H( z@3ed)&nIlJ&TJFt?!}?qjbzAwG6IGtnD)NzzFcKsd-RO#k9%G_sdt)~e{6SVamn9r zf`2|1xyHewEdlqoHit?n7h4s`)sACRa322j{)b}>uaH4`8JSXYmo>_iaka(A zY$&ftES}uo$$ddnAKE0apX?6sK-Eb^oEJ zbt-l6Vy|Z7=!+~Avo7;+5EZU~jEk1SzIGc#Wz%^Y^Os}$)fPe#f#J&DQtL%gaur#r zPw!(O%G{B`>G0zgKuctSoK1}##u%5x?>GgOe94<4IpfmFA1TvfcEYc!lLOgwvip(_ z2LwKx2K%61#qJS6)D~V|x(PLyim5o9!Pe^iVnpEWPC?Uw}eZ{_U!L3qS_=+<3@n-jN-8I3 zXjp*nE~uwl1Y>$Z34%fF0I;3B0>^oF5S1`{0Z8Pvj~tXo zVQ~lZ5q=H6IHI1)@2cZ@ZL`Q_UWerKJ>jhX0Wk0K$Z3{&UV;us5rLTs=KYsN1?QMG zu+)&w?;Xnky*SPwOY_Bg*R|ed#uk`cAy9Psg#*lN^>XeeYU_=zzR}QeCSoYEJht~e zB7S6@Y8j@Cl=hw!&P!|D!hkk_t^EDvy-rLzH|274;~fvD-#5+Q$E5fcX$Vq$J5lyi z-)fu(kJE5}M_zn&v~%+U8iXyns1YS6IwYc~u}qb`;o+sQ|G@&{2nS{Q2AEP%8@^t2tE6~&Hq&p@3#3U#}k-O&= zJcxXNCe(H}Q+-htp%fU)Lfk>WWr_iAEjGJ>510$f>jPA(dy zEZ@2zbKyyUld%vVwvbjMR1%oVS2kXLz3)*qL;%Ppi+5X$^sk@nTlH{>*&ueax<#<9 zcc2n550%Vd%Qgx10$HVge-giHPY?oW{w-9aSD6TC5CWebaGTFR#Hh9ke=mZlwI7sqV8tR3v~xCTD>ED3a(Zw$2h%aUP1zOvU0m2*`fRg@p%1?!h=Z@maZZ zAmvfUf`+?svcc7Hg|;{^53}Jh+S7|jqK6?ve5br~`ijkoIE=mSHLWEDui01JpOq$v zNecxop~R5U;UIQL?xd0 zYc^h}jN>7XIdQH@>mV^0@jt43K^Qu)4@`bunbnT#Q6+@UxnFvFm~sn`6{fvY^1b@& zqebLUtj9T$Z|B2=F1l55&vz<^kzi<$UmvTrQ7Y z{BUjuKw6lqf5O7q>bg7v4b!lDTU|-5Q0pi)eAo*6P^m-+k>V$2@YY23U={Zf?z*xt zY3vsA!#~U14X4pL@iB3hVmjUXbI}aJBMAT|^1@amD^EBop*foO$)6pbu?;*CY)+4$ zjr@n7Z392FZLlYfr=QrgaUI#YhVn16vCN7{xm2!V(UV)(-~nRxe{L18)Zx zO?kLC`_JJrv?woS@xF`!rX~OA0E4L;yw4|q@(i<8nK;|2NQV)z1pv?+!3DC{2ZgLKDtbIBDF{Xa1S z3RWaN`~;$^<2=5CtEj=v>JNl;KQqt%u{6M5G7=1<=P7rm#FjQXm|@YRJY5a%1pnPz zb}=N)Ly-Q+u6IL^x-6*-wvtBBYigR$-MVRb9hEek@kJBX7hZKq=|F2|G#bjPbVY|BUwFnf7~VPbHafKqqvBsNluEUuz2x1zz3c--%ysFssJ^ZwoQdx48{ zN~ieWvqAciwMy6PIS-0;tALWO56~dy_6nc?1R|(~W`K^V>w64Tx4)W80JMW&ZNky6 zCDR})MNbyGR+QS+Mpe`G=ewXE7A>8>KntS|%=Fpq1zpXo*wU}CssB-W0c8U-_;Mlz z{cAw6H`)soY#f7zipj~}pD93@JQs%IMW|CoTsN0_hZ-qG&&5-QXE!8~c;k+sq)lVZ z`vkPL8p*yh9N$wP<+Y0W;^X1Lulh40=i;5s?s^yn9L!g5mG)hQfw=f$Pb00!kZ~kz zrF^CJHnzq1a%Ode)E!Xpntm6LbWGKt{F?eCn4lwaZ_@Wb*9KkO26g&ZfjELJRsFc> zN>Kl0jFyc}sSALcaMS0I5&pa5K4bt5dYQ#&UQ8rA2j5jMkZjf`Y>#g~Yn4sc6?_M6 z`82)@-?apOCX3nHQQH}usV?9{JG~@i)ml3=r@;q7kkb}b_>=8 zBEax%vG+8xQZ$;89Md=i3=56>&_RQ;mT*nJSIGRrGt{R9(gD=&b=KU+kI%yz-7hz6 zY19j)TY}I7cXvI(+{5dgOVFWZE$>kqd{b7{{$6+>paUEeM%r>rA8DE+J(~lL=AgwM z4MX`4=FU`G%T~YbU8w6K$pekXx}-{fpn3&v%q|Dl7stU!Z=DkItrzWXKq5l|PEaTG_ufmvpR9kAH zOp&F3__Lk20w_RQr4%rF3nu??q`z78STPM>>B(|>bl1`uP#D2Nk}v1M7Va3cx(^f# zo9e3HP<$g=1;Nd|hBA>Y@{(3Ob|BF!2I*xvQekMoicaO99Fq4lC?*?pIZ@O3KjGMD zsJtrc?TO^&*4CDu%?y|^p~??We}k>lDXpietaTniEosUNx~ow2&mty!z7C+g6^O$B(&$lR?yk?Qr@Nly^P=^s9gSQbWaFt(sKdJzSvX zuA+<$*#~2|v7N8r94`TP)P`)SX{+71Gih+^oRxZPVJuBwR$WrYk-uuwtC2V3{$#-% z;vkI8SEum!95iE3dyG=w-v`xB9dRHC%2CoMgEAPy7e{^|^zLc#2Dr#e7}!lt?VaY96RS#@kK+LV^6r(e<(KRGv* zl_J($s+lfoI7wjXakij0eNxCmUJRp{zWHij6q91Csr<0!63EhTiA6Nc(?igk_aJ1( zaG!W0j{-}D!U1S z5<}j;dMnU=(zNgv6*ly_8z_O5Ly=2?8GLcDr)Rj-z|R|9tOeXFeU`-HMiZ$ zv8rgar2U}F9Y5%m8kA|f#%JtRph+`N1toUN%>rpCeD&V_xce?qYA5W7XV!fwK=r?1 zKN7q&6SgVuO+_RD>3XGIIE};FJ<#1fl)U<=3$cSIdV0WJc^eG88`0Ax!c`&v=rCo1 zf8kR~a$wN$*l#oKU}2W6$I<72UAWs04LjVsrmYy;#cJ&w$kzz1(U__+Nzl*+f)GEp zeE@fO5Slte;?i`fW%J|=SfX(55$+%`RWbK#hKRxtAeHMENx@{n5b?b^$CZ~`@$vEaod_9lQwaR1yjU3_ z^BfbtFPxKOi>ONOpx^av{2!}-DFlKJd2=GdOA0OG0$^a1d)(<1lD7!iY`$^b8LX5i z*?)dYt!|+mL~j>E|Andjvt$lg`#W-IR(a4BBV3ah<<(L!TS9sWx5w4KWYR|h4!e$2 zJyQ3h;V~c#C5}dZQs(Q*(a|=8)lrD!SN{fN7qVZ{8B^{5@A20DCrkd z_4>=*`H@^D^uTN@4*;<7@fOqP99Oq9&GGHG1`N8Gyl=6c&K7TtcI6?=dSl{N6p6 z;Yl?|38+C(H&lLs1^)sM59`g7@AmgEF4D;vsR}6=si^2-OMCcmPb!9DcUUaeBP%DT zXuc%R>^jT47V&)RHz?(rzBPRiRdbqbyJNLO^Ic$7V6`&*%D^afJE`MrK1o0f0t1gF z0`Y!K*^K7(c%Rz_8;QcJ*$YQ1Yp&dA3y#1SAYCHzIQc@S0P>}@1IWqKh?$^lJ`{K?Cf6iq~2V^f~?)8HlqCNDbPpo z@JJANcBdfdTR{K+D5e=8vA9W`M)GP#uxsOQx=6=W<TH-;;$DH_P)BhdTA2lQSz_1q!^-xeFF!9)(o~FTE!2p0t$e^ddXlXL2D#3w|X${^Xsep z{61So;IKfhF$Y1SUmaFbWo}K4voO#5jo&YnM!g4GL0&M$S5B=+jSwdGJ;aN#_+KB{ zPMyLMvH+U$Fd?8?bfByq8&k0b1fOFKj=cEq<08eJdOK%oVp4L4V~?r?-xd4shYSWj z5;uSZ(%skh?aA6vF#7NBm>P%hHtFX5`}Yl6BpuZMJUeIz(36^Gy{|?6`#R8tCt%eg zYB(O#Yy-QTgc662^Y@tRPbTGXzhCm_C(`pmX-9{=P*EWOv4ms$g_|EDK}I`6AwFtl)_60sA*U2(j#HV_;zDwAZ$$g0PS55)_jE^NoIg ze-XWAB=DAN`0%#F7$2C@)Bd^=|MN)i`7-g)Il02S@n1*DJDIQ=>cw+*cDD5JXygYB z6aRJTs#%)&zplMB8Z1Xdua&e4)L~~LKzt;fMoa$w6>l&I|u}wkUq<=VW|E2RiiIPjE1j-~EEHINYxPiGD9}R4gj9$M^ z0CN9CP%29p4JLnHPkDExqOPtEhSIX}9UmXR2D85u)d}GUYnaB0bal^uBwFzS9R+Zv z@y@Vau1A3t^!yCCFZe!x{`>>TQ$h&v@T`S*=v?z2|8-J-Jwi-s1{@4gWUvjKuFuZh zJBJ9fHhnBDr>wcFI9qQ(2%6Tqn%y8EmAkbaXFr`bhtC2^k{~iQpaL2b`>y=S-hGhC z#y`?pJ7f1vt{VQ0%l>h z!Eg_HMnn*vFLSKXD^<2ohDS!eJVf3!4?StLYqDfvVaZBMdo1ac2x9rVBLv#@iA}bI zZm$mge8kcZRny9JC;qs^C&*#LK%(9npq}0+@NYjn1t#Jjm_s)r*%-f0&mZe29JO5z z<{`KYZGp1$uH0c-QYv7Ib}6!n>f;>2Y&;U_RfGAilfu%Pn#qyT(fES{n=%kid# z;`8^xBI<|bd!=CX~bg`hVkz_YPhrIz6bDJf#I|*F$xoyB#R#wEvs{80#xjH z@SS1gRu7G#Q;$8#H_Zk9vxI7?_vXpscA{Hre4dN&Q(A)fpxY@jpeXOU^``rC^$umg zLQ`>v0VepIhE_Ak?uK)-P|}_O?H|jYC;@t>0{w`F6k#eqkgli&SQ^ifoa8~LJ#(6T zrMbLnIGwY7$Bqye`p;AGdml%m_pWB};uu*AV2SxOn>kyQwf3F@ZX=C+>;`uSwHL25#i8Wn^vvPqLz+NB9iSj> z1-cwIX4Ln5yG^%^!`z$4Cg)($<*?iWGxg?+^U={!zb2qICw;>X65P#!B!jJDJVo4g zixPB&Mr!~g;0EG}AK-Jke6_y?LjzHud42lY)l~%r{SO~LoH&&(dNOYL2*kRyK}r7Q z*zE0_4mOX)=(spdpQh_=t4hI0msYpqd|h0rojFlnC|fdOh-V|wQfNe2Hc!b?t#Zq2D#se z_C}Zo*m@5KBQ9C(fXLn#)ZC`K)D6V|>d=U5ZEfAd^w{&^9)HDFAJ+)=|G8TVBba#nM$cHa0c^6J#q6D|j44HOfBtJ_nL+(m>$H zWMz+NJ#x|!=Ox$?Isoizv}NP~6uA<+$k*;^r;8xi>3u&13SDi{_c9`A6oo}0G~s%7 zf9kAm{(9gXI(XP~ZUM9bN zw76Pacl<`^pO;}X7D0IE^ia9UH?8$YZ$8<6C2hrnJNRl<^PRwbaBKYbYVcM{SZ$np zeZj_%1)>?+4?nRg6h3hAOhqMjmW|YbvZEGUMATnljCBvEQcojy%&ejY8oQ6yzfIp` z>#F&R+%RY+dGC!cQ&P9zb`0(=*EU{EM=*7pKjwUPeROm*caa*7*kbaoV7YHue;fP`^i76?5kA*l6PrU& zeU}66BV5y>A>gd@2jtFT3OO^=ncJ@GbXEXpk<|LiS9@J&EzetUAL>z%UPheEDHPq? z<)EYMY5+L}R4s?gf`fyDRTkl)`qPNp5S?o4H1{t|MS_G4^e1q`xii$qDxOY zU@?_$VOk^zw1L*12T>^#W>POs!Tm3-YvKV{_wZcF*TNH!?B&T+EA(2 z0_(a1{HdSA-taWsIkNJ(H}oUFVUvF?)o01Yzip9wwJUb@dh6OUa6Fa#X3swY%p@I6LkayhYvy0BR zBbY9;e^C_5jA6<5T#L_Q8_90{8F#HiJ7_Fd*1FYZGjXS+{PW|5JsTrJJuWr&5<{xP zr2LCAicHiWP)&UVJA(u)qQlD0KobA^%EuCQBw+ZBfucC{-YemGbC|M;w78_FzAZRD z;7NLX336m6XNif4C8mrr%jPQO-olEd9w~r(UzJ?4W^04JG6xfY4^ozb(A+rX@;r0Vik$hk(tftB< z!_H(Oof9iLb~q}V$;Yl~y4T=nAC($&-cBck9ZL}wfnj_p)PrQd3Tk-&j56(`3&TnY zwH;nbzFQm}TS@y(k86QmG)(?Hq4;H=E~i|_`mP0`-5SKSc9sJBwX78NYxUXVhM`SG zs|KMY5Kz$oPo2)EN#XD=B(yNjJ=BBs_z`h~RbMnQvx7=V=uO) zN-057;opYG?aOb$)n~S{8yElRO}gL+a{%B+u^zXy5ziIn}0Ma(7UxB1odbH%?l*> zUEWBEuM^E1*}V;oB2Qe|c(^`LFUWFlzx*}tg>3L-s zr%f=)ze%~1qn`w`@E#?0;Ov>?cHk^q(sE@ZP#BIyms`UkhF$PWhebVBgNfS3g6@Mu!iMpV zT9_VWVZjHp(!|x^$b#midLl&^5TM}>;k}#O%3q*)hgHk0)nXC{wUNTJ!fcf@3xknA zI(Fg5@%SGE@udtFs{|hUQcqH+|4jhqAtbh;UJMWpJoS&Pjp5w ztVxmCP(Rg1-gLzDRqF2BM4R35{QZ8(d~~*b4p_ki_QU^vHqea@L(tH- zftrCca^vhPg2<;sVSUNZpsZNtuc)Y~F{hNwq_c_HwGF?i8@%k<|yU*Y3GNBW7Q`NmW?HXgD zwhW>jZ)k9=zhpo8FmO}ANoKdGKD)xf2oo-kPbr}oZ}`eCLW9GW=0o_=v0YxeB^h(l zxgtaB@JnbT>;5z^2_Op!LCMr*Fh^ci{c}7rXO3KI{~QFRCeiJ z4lj_0HpE4Fj6eb$MCthR1lHg3J1>|OH4vQ4@vN7JIB-KK*5gNLKShl?Z0-Rq6W?@| zRh40Pj!6bg;(#UJG}L7)Q8?c%Wkol99?3Ib4?53443I+=`gL5+sOhNvK7;W+t4<&0 zhtum2uYs{}hpq9`T&8grR0s+Bk+QNfv9$7vR5RN_S<1D!zH0Br2JcdksW*W^pT%;QnPjJdtn?Gn zKk=j-C?4C5w3cI+=U3sQ_RSx7W%Y9IqMvOx0u@rm*EForQx1o37>Mhs?`66?_W^v? z4@@Zb23A}8($~z?l^H$v=tB$XRl%G2WgTg){@THEt}W znKI4H{M^960Fp^j$b#8{5bs49;&i4hU*6!IWo}>vqmo9>Dw8o6QRKTN)lSv;>f~ow zDg9#XCH>C!X6!@5P`6GCE|AU3QEC~uzV#FXUC1<#ZMIy%Ibgac55^1NMre>NJw zu@NZY0R_b;_-(4k`E3zDPC(3yL*9nT7&**f6eicwtOvYzc%cFi;ck+MB2Ek%Qd!CD z@;Y6~UDFH9?Xz#g-7Rw8a-2wqrkvx<3lH>&FSMq!N&N)5WCSX&3_MOGzM7@dobW~Q zHnD}~=Hyt{yVy%etMaft%H*W5gLx$#^=)UD+S#*B`*TM201$E{vdDZRkk0$$Fg>Yc ze{9cNN?$SHfYkT4SsoOL>J_Fj=V0@YHi0#v3)|wV?xXpiBVeZr(B9NK1Y3_=Y$ONzqu0>9 zoe};0Dg9mh9J47(xr%Qwa+13)q?cbO$zvZ}4Rq>}YP0B0XEGFD@Ig{*E*mN*=-?I- z;Ti80#cQMQt({X#gzN=z00<`ZYRIyycHsHOrksy_?Ybc6L{-Cj5J4|m$q=+xEH3cW zgQ8Lo5h4d}!r3Hkj`(p>Q9T#n$cMPFTS-}#W{5ejpE{Y8*f~f^a*FdY)@2+yz(e?O z3_KN3J^~;~QJs^zpe*pnF|okvYgC64!M_0lJaA^iJMN;=hSh2Z$n%E-AAot2k3ACK zn;vN-Ck8GY(!8XTsgg8JC8a32`NivxYxNWEb2S&Wb_bF^mlgHp<>d>q+Af6|i4FBT z+Kd!|QT3tyPW~?34YQ$6$By6a*C|4KokNp15nPX)jj9%Et{Yy>qDr7=p5`{3W@KD) zI1M1veN6Sl4SGn|#Hw)tZcLAG@T?!c|B%%`9BLn5lcuXLJ&7MX@;H=vV%idmPR&ws z&<7?3gj3MFv>*_cC(adQ*wkOwO88ZIm7QwrDS9;d`UoH1^Pa;CuTVJR{JK3TZB-dj z`tQ9n7VMqP!2GcD5oFmfHh;%MNIt~#W<{1_z04t`ABw0?$fJAom48x{i!_wSMpfdj zq|d5}QOP*v^yJJ!i%mHOTgS zi8)`DzGKmJbG`goj6Hmgj+|$&J@E~WQ*K+fx80i)!LMG;KuTvJ0?mrHwbS1Ns9!>U zVJ#!08KSS@;(}#E+(rBlBqd8Sp5v9oi!t&w6=zZ%iaYAk$U+$C9O-g2X~=>)>7hb$q2s)V>0mXUSB8}=W~=xnTMocF3jlZ!N~23Wmj zhu_TIEOZ;}R)msqr8uX5spF0Z!;p(TDijVET&F<}U43?vhZdN{Cn!lMva_3bDr5p# z0DcL!t=(;lBKHGl#v>u6?QMk z1VPGIFJwV8?;sYikTLesI^li>@AQ@fr)j`R_v!_89B;;<4Hd53J@|0 z0FRqK3>7??u+B!E7^IU*y`P6Fv3UYSOnfKObd5byC#_H*_{4p~SVb!R<)?4#8!T=t zr`C^#OG++v-1eVz$>!tNDR_>J2;r#Ne&?G71}2uTa~%6RQOnm3>-`Bk<`?l%Ua>5q zMuB#QogKiZlHDE6pfnSROIKSpVyXit&4s1wnXKm!6E~M>Wb1hVgbY^okZa}4cmrbx=-6#p) zVt0jeP=IsmGuIvz#0?d&+^u#hx&|HnQ;8nVBWb|FVZ+$W>UArf(0-tC>Y9!_V-t_Lb}eF?mB(SC{OG6c>(L?)!@=Rx zJus58#Lzx3{Mv2rj?kVrl~v`t0mvC}aBEM*-xmOE_5eWKwi!6gitMt8GS(e4m8O$3 zOiKC6q?^a3?doHbQc|pxOdCLjf*kgqMbaqR85QlcU9>0VYK5*%!$0;IN;e3Nr$o)J zIKItDQPnteNlta_7Erb57;)C!0MUDU@0Pq*Jn(V98SPCNiI7Kx-5^{FN zqp6|&#?w+#(g)9{eB1OB6|}~1*>x|z#CR zZ#LW0d3VMnLn-cuW{gv>u;i5;up?M`@AhWnnXt}e`4#9c9+CV#utUh>wc)V{qk#cd zwqF(8Ht;}X34aa^JJ_p)+hJ{Poq*Qt`NP$D+&`8hGO5&H+^FHtG3x7a&R5u`9*&Hy zWNZtt!D1_t9lTfd6-LrY!jhkTqusl0m*t#X6ZJv%Fm2WWu!7w_IyFnmL3LT*jB~RV z-zm9NBrd_u{xwGBqPE(#=0d4Gc{(aL;QGGDu(yLpsEYceM+;aShXAU8#CQHw&;1_dDmFrV^Kq22?R1sU|1iCLM?p#F zVeW777O_M9)CuYHI_32}P@0iA`?ZdR1cxRJDC#Gtr^u1IH#0o(9b6sg$vK|pC`Ek5 z8h`#MP4@>S zNOL_u!~W}+d*ObrTk~^sRjhJh&gfJBamkD7O5ub%Xo#O-#)7!yTG5wd-($;H|*oRR-4;kaaGYRyQL96FW zp$P!!7F^d@@x%$lmg^r5wp+p&ZF-ccAib`S0NBKbu|J0aGyd&K+gptm*~pk=Y$3HII99{4=h= zJ8s;h+^H=6;cOcLq)g`EY(A>cEO!9t^~@is)(Y*C-5uz3%xKu%0X@~|)b&~C`l+4;E6WBU=~ZUO5` zd8vxg2rpyWbAYJ2H|NoxE4WDL{yj>^)AJ%TlZL`rTn@ z**RXnv?K5i*~UDf*YwpXJQkO-y-7P;WpWGVo+Gmi`gNJN!t8i^$@Sx6#R1Qer^Uxg zF5k7l`e3EB^1LtHHFY)n$Ef2I-#w@$d#KV?S0gz|C5zg86aZokwJ=SU&FYKjH&EF! z?+Si!Sw70BKv;a6;9X_73$?xrns$fPQC}-2%A}EJNK2L%70=u?Y=KLhPM?)5+O_e! zKT(F}w~zKY=rEEENKs;}E==L7r`h!$!gmcs*Afsz6N}RY4FYha8Pf{w#OEUJJR*7! z^j%ms@P2W!9vn|1&t=WR$YNKXjL+V~P+ae&9+;S0?bGDHVFCU+I4C&(yo))c`TM3I zw&-a*y%KYqn!QJy4CaiaB)?;{nDSBDlodzR&ouqc6?y^cik1}`txLla(^K(!L{yKEwj-^=ezd~6bB zWJJMoOZf&hp!H(TkqHc!#cxz>PjMSDmzNsI&eyixFyu+!Vof~c(s?Nn2YG{(J@80T z4m5GRKiO0H-gx4JAQRZ_R;yCQS+7fNswhAhty8rq5wfwTrr4d-r}>uw{&w1zp2uoI zj%0t;raK@SLQYoJQ~9y4ErLw`ja$t4Z+#+}>rRoB zyarpDx?xnPEoNvYt+@#BA)pEQEwZnt5AyTk7`;~jtKpcz zDsnBoF0|8jX{-D;c$JrRZJOjCe2Wa5&3105Uw*MtEMfz>qjvcgs0PYU1r9+z!XA>* z&fiaqG}&zyTn!#^2_+OBLMv=67aa#m=8armJ?v?)vXA_}8c2Ra`nqgrf5tTr@gwWHkf#uO)C4j@t@5YfgZ))jRHsSclGD}(Wa9UmCS`_^~l0P;E3CbWiemEz4eeQo9@zQnN z5~=D;2n%VWvB#b2f1|xpnF%w#*?a+koUH$zhzofQK!hX*Eq#~%jlli;?7y$$#R7>2<=5+JR(^>zsv=i+PhY33$iS1Sc5gKZrhrMuR|sla(>4K4EE^1nMkf8_js|LCm7A}Tr(qrNr* z3>6tx=9U?LL8q{orf!H1uvwo0OP^Q^cVy7gZK8ji^Is1IyL4i(#TT~L&lr$osKTTE zK=S{OA_gpx@sx>w0Ewm9i%i?1K7arIM2-eB?EA_9Bsc&`tbyXA`S*UqnlH9kUR$#j z1ZH;ddq4u_)af56?43M6*Ha(@Q~sDon1@H1nGlkVOaJc&A{B?NdKg;XGyqqh$Axpi zR>f2GA3IA+kNv0Dl)MrB{rz8rAN<^2`um}QX!QWwDZM0Ot5a;eMDzQ|QoJOVS#Y0^ z85!{*A?F4L2M0TZ)o%a$#Er%@*zc~6&?H3r_jbWczYKlTz4=D>Tub(Uotw`X z58N{w2EHA{`}{IP6$LH?=UnoY|IVeq23>Co0C)mxDVCVp0>93tA(Bv6bg&VJNEYMyhmd&he{H2D_DhPg;ydxbK$D1q#N8fZk+e9 z>uV9)sQSX|2H;9SC`#FHY z%Yd|er>A~CR{t2<(a~`Sh`w*3y#e572fbVkaJ$KH)rrhPj|m`ZWa5hi0z%_!Uf|*1 zkw$}NVGFOedH4%cs&wBCAn!Aa8Yoe{IfLhm{2W9w@$Lw8eY%jM9$RxFU}Z%$h`30A zgwv-QSjL_Y1?r`eXt0i@f!lr$P|0xKJ;YI`g~Ti>N!-gd3F-^LatBA_wP+yuczlA* z$4{V@4oJiTq860)C*I@9ph40l2>^6qHtH5vskYFew1Syk4WnKttU79kLAkZ&RY^&t zBw8PN7z<8v4&en5FrG<{@STsm2aC!n67^jTKum+1>Ukm9m&b=csz(D@)_RH;&^SFh zD(k+BM&Q#tnoeW^_y;AB;Bp+RF{7Y#6I zAHiLt8fukck@C&F=ulAv;qu}aZZvSo*+N?Z8b$XG8ViQ8R^JGsFmUwE0koE1)#((OX;VeHh5SFfEk8A zI=n%#CFAMfel(9Ng{$@qA(;^kK&S-!pfL5`z%NH52|AA?Ic*q)i(+ZTtzxRoyU5FW zarTF3j(sl_x6#2{xGH)T>57^Re?G@*w^9d`(N4VeUNWKViqVFfsL)e=PIuJT(|RI= zSIT}f=lDBr4j)*tuft%!P44kY=n{&$Q2h3vbH7&;izutZ{kYxL>MN846q5!U1}7(N z>FDU{rtFx6H6M6;*Tap%N$ow8-Rw;(7=|}o+f3q z$B~%2#U^w&F+l$VwALgw_{76*m#N;(?g@DxA=mV6JcB$OmWkn{hsjF6H zWNILb-llg58aR@WjM(dO1XCr}7eI z1~4Oy&FY3rw_SlY+3B-GHt^_r^ei^_jc<9{7bQ;%uSygON~gFDv*@U9df4g~yAb%E zuTbtGY4B~b8T5YRvk@X~3u~%4%P6LAVS5yi++;KCNLhWBbZ2xrb0o(IL2X;x5S!g? zOiw}Eyb)YqFq#r*Uc62edoj;wBC(aFA`G0O!5AEu!-nj(%0zUlc!fAP32A`cfFpR~ zi#5DDDn;j}X!I>M7sEEt)8$_)I3uy4=jC*!3&8~0^MV%qLY|8E#`FO#%O29`X(#Zv za^P>z1?=Ljj(}spo{by^&VQ>A1z#g=nk^HT<}FM&G8$1X6x`R_)R|(o^<8T))(`C2 zt(z`x5^y%^y|uR(HW@fVU1!e2xx{q)M#cGh!~B+s9gkks6w-|Zv4xKiDyy7#K9P#6nbq|n%-YBlTdLpiM zYEr!kT3m6ZPKzpDpE6#*QM^W7t7_EyP5@hL9I<_TT@CmoCyp*PCxEhTi(alYhDB|q zZvySG%GvdIgT#HnPpC7N+v=4qOjuahweo&PxZ)uZyc^rsD05lWs&xRmqre~V$i=!n z5C{>Bd1HA?{oDITfS#hh52X5Txn@0&7D!jC;L#>66G!;&@ahz?T;n1>z(x&-5!8&*$_ zy%8sm-P%PJ9LQsw2`VPr+ZDZ--C-_4;j$wkh#RoPJ6wMwOo;GWDM;Z*A)Ql$_<5_039k}*9?&j5B9Sq#Tho0wa z^)yEbSgjS9I~*+((*u0d9ai6!>B{!c?yrL=&*;4%=7h4UNd+~KzQr0!TzM9|1vT!? zcv8Q-QqRd{ACmECyHN1wst}1h3fekC_7QxgYp!YWS)iQqYJ!V`6S#qgLuMBKr+gli9&Kb-~)!*gj z|5QW2tBjY~jfnCJp{xx0+Lz;G>@&aic|8FYkT}(_AnyC`YUuZO{`!}&on z&l7A!{J+QY@BPDHPwD?Xmj7$C^m`Sr;b=Q6?e;%VocdN|WffIX*{_Ut77fbqY}*x$q2Uz%GAVvN$CDDm>XYPQJzxbpUs z`%dNj_Q7jo6F(f72W(s5>pQ9){2V-@@1ywp<3sv5cuYd9ctY<^b@ufQ5BA}mAy{t- zBKa$@{66wup9sPrI1p65wVjattjO%}b3-Y{l7!*rfD`qt$YbLEiC`R*HApJx8idH&;M{wJ5EAdT9w z=Y1IukVRAR-vknHSR^N?q{a`JC+94VqHG>v%pTt#kZOENgx8wqcYpb~r(Kj|QHs-S z)L+GB*(oeZoPRt~LLiaf*1Fi2PW4|i{$D3sVh54P&e*Kk_ifl#9PdPZA?&2065jRD z%kva7apDkeXE#bIWI4uz^TKg<*p4rA!C=-rM1U)Sk|A1rOreV_LPHwE&od_YK4dZh z4|n|27UjRX1pjCUNL?T%gIEUUFrIn^LJFKWCmJxWxRCX_GETzYs~0E;`Glin<*CUL zwNHzZVz~uzztO9U8JOdD3guK!eBhb?`Z(Id?>akQX|^)uKF+^x;-7=epNR~_voo)~ z-bQ5m!NjM#&h^TLQnhN^k2fexc=w+6vEcIxtEnIFqMxo8nlZnYq2-XpFr(Uc@~ZIg6ElVlF#-uh+M==U+2y zy?Pb$-LH+S9mr-I2}R2OUiAL>bnYsS8f5oi;-i(nv#j5@Cv0TBbp!EJ<%z_t8A7pb zO3E|C?b`1L`CB3>vU`x+bbnsK-_QFT1CBXlCl_t)z#=tr*78$SnN&we?d-$ZSn=7i zSlKHlpK|_Wh5wG`sYWDxXHt>%;$9ZGmWrt_?I;Md%XplX-wx+;Uf7M6Tr7KMWl+#w`xH}!_5ZV7}-uC5T(Q z0w#Lap$c7WguUSi_QU{i$$*csyDEB7L{t6gDL^N)`01C+tDO<qA{XS7Ej@kPkCcH!L2|(9lZ!^odU0$cPt)>Hoo73fXf$uFcd`>#c^Kkvcuj zXtgbS_f8|)+yYmgkA-#0)YLTo{o^m4t&yyqqx>d9WMtmcO>$j29t>jE8qUD}sV@N6 zoM7$9ev|mmsd%L72WdPl-SAOPJTE_jXBjIbRZbtG|0BS;)%3+jV_eA8$VWU72aF#O zU)QaUe)z{ zR5oxAF04scy2>+~87`FnF;d-G-r&VoWD~oY zdUvFEU3S&vLbRi>?yH$6o6h_38D>IN8{`+RwV>=CH^?$xIFC ztc|txeW&1z9e|+_@Ie~t=p~-={HPEwEMHz5xg?cZXq2*|gt!Y4--DOpn&x{d&}eYx zyjWRTWgC59kMQ*krj>Xe4@hwVx1Nq`SPbXU_I*`~3%ZaZS=pK!^IOs4?=@`609y^L zPAGk}5e+$CYs+Upb;R*;OVgxe3ugE!-@6bT0i$)MEN~+4)eoA>u+=epB zKRBVc{lv`MrJBn)3Fo{tZ4i57D=9V>%vHg?8#_rHl38Or5}qLi_^CE$mhx~6*s2sp zh$DDwfe01FsEk>K)F(+3sS(QFuR%$PO1AB3QFaquD?_Lt%#TL+7kvDI4|2>N%`$~P zf3}rC&hLg@6T?=0u4>BKTQj7ZRX(#Bp;s>-EwlF;RE9anHnjiT;p4lDH|#L3QRSho zu)qFQ3HA|Hz^7mFZWQ0gz(7z|WPNf(@=}g~sgcv(YCwaPRev8Z<8e&{0J)i)CFXVB zZ=03uG&QprTTIxhA;1xOYAl9HO0-*HG2qjYJG3!QQ&7VQwXG_5mG~9it|+T${oX`z zuGih~wun-1YL0oOfu+0La&noJnb~}%y&Hq8K@nQ!#`{;>! zp7TnXh$5#_5Pz+9M#>ym&dt7o?odi2!rHqKL8x=!Kmb%!xCxVf}fNvie zhEbg0FI@IeQy;J$D7#}Ah+`jYBe#EnIoae}7%gk?ZkX_FOQO=^NIhE4N?#)_V{Q|Z4sbOTrMPujwIYFO|9Z(%%T5#nEjQ__%~ex z;V>0mTEpb$x-sYw1!=V5Fl){$dPITa=MQ>GSS^#(GyWfywC^Hxw+gm5N;J*q+l8T@ z3}shDJ9MF+C5ILkNBT}_X_qV1jl}ll!kNsrzJ&`%b1aM%{@8jx82_-3yqfj1API%* zXVkWv*;Gwv8bDb-{i;F)Y0W-WJBSK;rJNJ8gXHq}Pn@b{*XgyixUlqb$rC3Lnr%3) zTU)DzOXxz8mpwvD^sqX1^Z1|?os)hB*Y!U&x71m%*gzpGm|FF{3|#ARv-?v7v%`5p z`4CF1kI`91D4gh2D#4OZ3&~N@1l*Q?CR;@uHbZ@nFyghOWkG#IJa@J zFIQ|xvyiK^6*e{>29v0{^ywG6 zNNT$49_lXQxl?h-vC-p!Q;iN8xM8v{DQ<>*man;IBJC{!ZP}yn=<}F1^YKA0^YOAa z4&U$1h6tgMl_Av6^QOmf(J{G@j zg3;>Sb|HB(7K3r_QtHnRbw4V=UNs4|t5l=?pCOk_UJcws)Ps{FK2qG);c?T+notTn z9eXu7rkPf5Jjh@WRvQTbnER!fJZ3}P-d7?FZAEM+Z3kA@Xu%%NS)ma)|1xyNx0z7T z>6^59_u8G#$y={vw>g!G7Z=^U%X}^NO*5q<8H~z4X0VmHY`a<MT3_U4ts$~bf5f}Op0s%ceJs3OpFcp(X5~O0W`9G=ZFThs)sdE8{kAgUK6|~ya zcq;kGwN{dGqA$Oq7a?A3L+zEVnve*iI_K)KiuysJcFRL?k~Kv|*szw^TOapsK0%ii zQ^DC(T&LD1d~?sH!KJ!er8IkA=T*CVD*Yh_xyEbp54%4N*_h!>@0RSN&Z}!`+KFN8 zYS{=Ma0e5w3b%du#v#ZZEsS&coittL-_O-wnhVU=-nx9RGZ3CyJF><#!}&6;y6ZML z3`kB!$-tK}Bv?(KL;uFNnl>`QH1Jh&02n3_r(SDKeAv?VT_ z%dQyE!QPg?Hy)5B;d=K@az>Pfhd-&pw!C^Je^X zsBYxC8b8h}*15g4Mu$hj-lfVW#ywz_?NBC2Yi&yE7aL1m{Xv6%a#T~(OslPYg$}3W zbup{h;ry4tN}x)#uSCSC@|^_oo9pU#Z?kF!=1(J@s8bzR+M- z!uR8|hm(bzr2AT=`NoDQu0t!27=F^-|Jpm!;g4zr z^fs+PssiI!UF017>Zc!SRS;DUsQ|UFvWlbwf7j2}iKU$J$)R!({V*Jr_o!Tpx^?Pt zBD-iq`$;?V1jle2s_C}R>I+u-4f0V0N`RSx7d|rmwCS}j!Kp_ zV_R~l+qyS_)TQL=W*DQH`TQpr^R96;gL2>?vhdA+64YL1Y-6^??W4Y3iOC&y(g*eT z@ve^OLR2~mL}jL~MMeM2qV{s76s)XfR?2PCxwZ2}#1xh6URA{%%F#!~#8?4>ma)Xs zwTBB!_}gcjTQNF~V^B)t?zBGE^C=dY%CPIpA!_T11fK6RMXYVD1yK1&W39YLCKcGae+ia{O)WbGg&nf%AEg_bpX)}P&cM>kq#OZHk12+yjciU!m(M2Yp6wy*0oJ*VA;;m~q5vRG6`KHAh)59ZtSU~oGfvm+ zXIRc$#RauwlG(#)msOFM+Ic!(uKVdo`N;mRV1Ir}J%dAyvjMSe+!%fX5g=)7)q`Xf z+cp{!kjjDIzx>cxVJG%Ecck8;!gk7B?yF5ZvqMLByFh8iwy%H4d!Xc8eWqgDsF0J_ z;ePS$jN99`#kFQOZvf{-&TRhTVU)qD)BsbArUsoZNn;3I0)4sI9Q}VAA8edJzbR-g zY%v>$mBk@{;ws<|Vyi_j**nVnO){UvbhuY(sc*ui2uK9 z@**RUu&&W8oEW@0hZzCmH)${LOm9Xy*VxZ6c33NAWo4G;>@2p5n6Uhm>!w8aUVQnf zEinr%rKIcHCEb0rl={=@GMogoWHS5)ao#G<{UqquxpVa9{pfD@-h>M^)QuJEAF*ZF zmJU;xHbLQk5|+jWkg)6t)3O!HlQkBcdB9_XvnIq-OI=E~$>L#AeGV*-!1h^KGi;%5 zWKc3Rl=Pd@NjeDx$ic$0gs<}gtN+11}Ngfwq@PhR}1+8OJ5wz6G~D-Y+(nLAXc9B;=fPwXw1iCfy}9 zA1Sz&fd7D&Y3=+Y=0BSo;-I&k%1L$PLJK8HRd}f~ysk@DWmFmS4cl_Kqi)<08`axw zFw|dN-ytL{d?W}F+Ewl@Vh}W6yCi7!eSh;oA1$X153BzxIT+T`*x@BiJ6bJhg|KW>?z_e~;!yai7D0?wdiB32#s9HalWwj(2T zb=GCym5;+!-T(9BV z+L$WG#;U>Ljzf|EY~Qm)JOHPZNp31B9qPuF=jZ7*uD()$74y7w9~ba=Rg~@WGcZw~~-SE)iGyEvUK*6B@ryds_WdVU#c8XX2HYGqpb0wuH&d zz*9)PAKHnV?O@A5 zO!Zr=^zWocI(qsqcfJO!{zH}H*Nn*pfwShC z=yZ4bwr^v-nb0M$XuXgs4KXb&bSO&4)Jx@O*7X&hTRvvuztRHw1+uU3WT&qPmI|)Z zL4a_kmpWt_$**8OGX8{)F3GkgqjBc;f3`FIUBRp<3;e9~Ybjsj^*3+ZMP6kbuh}mY zZ8uo0z=7!Ex$9Jf1@ufBbuEkTnsx|VSXdlz`PqWtaHe;BMX@Mji~#3Q*~E(Q&vAdE z0S1Glw~6IXI&~u%c?867{Wvbn&SBdel97=a4|Hs!uVLt|^*zmNNS|kPKnYi7?#Ve5 z_!Y$TPEHC9%2)koFBr32PQpRXizNb^-MdL*0qJy+r@ZQNa+2kbmX<*Cm$ttuGA= zNk(n7On|1oeZkH%8;eSx-Gz#Vnwp{y)rcI+Y`1oL`uDI<(uE&UXyx>fvXLOS0d=)5 z)G&`=xq6`-rhux{!o#y;i=z*N{(_Br@yRhA|LGkL1`gUPR?_@t1rgZn6OnMe; zk!x9RGUx!JBtX>rB4ZDFC{P*-TCA@2F{160>t`AA)emB9Cg5Bx)Gmf520 zyXEF58`grHZufVVqm=ss}=Zl$+rD~TNE251hG)NUN&w58J3l+9psc~c8dEy zOC*`9AI3B6e+W)%XZOV(sGqRE254W zlE6hbvj-1y*#!)cXm;rJ3&-Cec9S71FY&70+;HPK*Jr+5 z^y3{dC0qz?<~`_O{h*j~KfjTIx`_qOPP4FO1LosU?FZgiJJFuF*!Z`Z1Or#CUaEbx zF;YKXT5#FxC~1zex9GVJS@%UoQ?xRD4{t?sEux~vh+NcULp#s4Dk_t71bh~cI-l+8 z1}m*~tyK2UU_z`B8t^_IEwYV{fzeJ4&^27(@Rw(!z+!H?fR`_Y_*N@x@Y#WK(jA-X z^@<4@_00SHz}dm4PYNE}V2c4jXxe@VXr3B{niw^Z5B?8WrLOyTCEeJZXne7@^E2GmDg?+UL+bB9C6jpPpELN81y}E9)5=MT#Bqg>mY3FEefx&V#Pq8owq4t4yG zbamgDh@QKJZ3+B(v#CzFUN4VaHW+G4{nXI_$uI?09cGc$Pn!LvEU+ybDjZ$%a*V}aHev$ArGx}l-finWch(sr_|r`huTH&_knwN6_UJJZ=nzfAXjWfUf@rj5`GJ%K!R z&43%gGZBia%rz#juz)@HajUHAUkrJG_&x3amxT3RTx0Kv_VFo2CeyD60VTT>8Ex|N z0<*m%Te)(+v*&8Wb7hZ#>o6opZOj+D4a~>hYNl0}TDW-5<%b;`mD$Mc^yQ-tZFAZSE#hMi?oEnjHyFt9Tw)o-Yl4z%awjCRpnr9{SFs6liDCr zSK^uFA+2_nC)!b@V;o0$BiXSZ?K-^jy^$wI;6Cz_7^T#TE3u=KnE2b;502XI6BUe^ zD*dKqii|m1l2gtl0R)ObYunDzkr1Lo)j$gM2LiU8?_y)2fg7I*_VIy6&@HTEHQ2Rz zQduwuBnQ2!G=0tjxMHgQ{8NE{Eaq(h#UQWscJ|k3SYr3Dt9y12$tMa#ibIKr)hl6HGVznFiUz1WAkPl zK|n-h31vyZF&?416q`(K_)wQNSv=Ee*$-?IbXKg3-M2FcM4sa@l6w{2$@m!b=m#KD zgUB{YJc39mVA*Q0-nn60HH{l=n;#+|Ek45SlR8ynI|43X}9a{;bZSOqwF{ z>}>+?vfq~Ju%A8MgNP@}o{EVE*LBp-)!)Dg;8Ya3<*;@ww7npsa{c4Sw6`#^uCNlv zHcTOopSazSFu4#011zk%n(OM8unBGSjdy3rFG=AV7TG|AJ>-Ri<;<;@Gjk&T*>$@l zkKMHHuf3=L^Ck)M8<+UgKLA^qcaiH~Y|nb<&Ml3QXN@llc@AC`R53p*k2tavfIzn} zLvQW+vfOM%I!xz$b23h>-WdnjkN+stj}6I`6mlz@JM0l{QR>FrV*LhHkCq=1^cdaz za4TI_P7+`|X6Ndx5^Lm#FIe^^-~)}6YL@W%&|QJm)X6#>!zZVEr*v)#W{x(vrkPoC zKn`k+2=vUuUJNUrxw0&vE-i<#Nxi#e@oN2i!{rF6R3Ihp3@lfwK-_-Dc|U#{NI#6! z1C^{hK<4<-5eL)2q1o#E_mav@n z+Ib$`lDlZRFo{D;Q9uoWNqE;BrCsoDJprr?<}L2AqYUtG6R^UG4S-N%T1^cUYrdBV zP{g;{cE9pq!o)TRVSr9iP@z=l0JOU{fLQa|Dflun>v;5V0|?hdER?1I%?pS05l%Bu?i_V}XDUTI2bXR`Q2#baR3x{)QZo6`!UdJr?h$G&^rXZYH;PG{k3z}-m7 z4}ufPt}e|mT_w6PriaeXbwO#_?H2AL%B)wjHCHY3wQ`5Wo2+4I*;UhP^rp0z*C040 z+oy0ir$ZuB?L>rU$t>90-e`9TBU4`N=` zuR#HirQUvvbW}eOJD_=_l(2H-x2c@D0Upq%YNAk^rfeEIcUN@EIa;#p#_=&w<3q2W zT%Z+bzRe7q1yZ=ASjs4W%kph-oFYV0?rpMyR)A8@^8!>Af_GN?&^nHPIKz*M6Y}Bp zZv1%f5y$q^7t8Lw)JSK6D`4=v&%2Sp(PvKj0`kK>B2SF<>4ll+*YcK!Jbfjbk9$xt+$PW#&SNy$`Hs$R-hfxq zj!yzg>ofJtJ7f>+Cx)S-5a<5)5IVYS%Oer@vw6{q@6&JYXQxYbMuk3=9>~PD8rHbj z*^^8sD%RB=KE%nLpi0ME?i1AiK11e2F5+s5d+i-|$TGr_fVLKFrpwVn@M3riSXE45 z)mG<$oUSwQ0Zcx&Wbpy=fdoL~?zz$gdC9%=zpIQ8$GI;QNVqw*0KrAD` ztMc;Xi2E)%3I(PaF;*gq`W=P9v+sA8$4axLAiO$Apv_##Fj;wKUaq+qIBy7eY?(4| zwBn=(a~$C2wBHEkBRK-S2_GRc>b~l z$aBwuDOZu!Ta4w39gi4#kt$2befcrBxTpk(8fG(}gSLQKgdW*x!pyT2g~9`_xUMoGWBy;?v5+`}ym5_yqZ zE8YzSlE$W0+p(}&wioA!lCdKBH>d;;yIX<01`D(WXa(VWu;*zTdVH-0HWKf_1QC_I z>BsqNe`9*lrmzII@Hz8-5KB<01zx1BVGb94b+!wGZs_c~UPbmN5|if&Mz#FbcF%L$$i6#Cb;z3o)6oUnDzmv2Jw@MeItiJDO6{v?|5*#k34;XpSJ}m|TvRHSua=S|2G`3vcRg9d=v zark_;kdRRrqR!jD})_Ru_8m?D*r)o{6XhY`eWGdms=qL%dPpII}6Qm7sH{CG*@4Cp{HQ>qV)c;XZ~kj zhDW>Iy^G}Ia`iW!_CUKe@ii@zl1Jb@pCE)%R(-~6#|PV*m(9L?%97dFF^q-~t?A(& ziod_#4`^?jGB zQ+$;(U{Ql{|4xhk99)PWjjZOa+j%MG>to`dSV_Y)A?Xj(gfKj7CYMd!!g`zfKNa11 z0+a|i9-WuDUp+JPWYCeBDwKtedN=J0-FqN_05fM?r!Ro`nZXlgwc&@FEaQ~bKwPw} zE9h(8p4|ftoTsCYUsaP{TjibhPCZesN|ayM14ukg3Cq^>O$n^f^h3jJzFT$5g_>Nw z+`&vW`LjIYX7M!LdOor5 zgP(5q9bIrJv(sO6`SFPEc1Le8u%19@&Ieq3`nhG5$>q5aK7@)2`fUik0wmD}%CE#) z5<^f^11GOYHtXMLkbiib|8)K3D{eZV3jQ$J-TO=U1KfS*M<>Y0P4k~`RR$N~MjPv# z0~rmPuBO7i(zS1R@n?)h!M>==FIV4+d#Oh7$$1H?hVQl3#qIdjyfox@2f-FvaAc3)SFLE zUlmX;-{xg4eTvx-hfyA_Kb9fh?O0+{AR|;GLr~N%;=*uV3Gqmw zv&Lg`tA^ehae}qS%A;<~oJhB#`_QKW=a_4<1fMi-a(KMxZrYxD1(Eq;`P|n(2+kt% ze2{)5;ySRm3bmA{d0|^DBWCSRT|c}j{W^tdd~o_0q-O#%L3A3!MwXT=JVo(toeD1` zjng)yE`{SEldhD>7U}`MQP6vW06qzoGQ-zxMua#GL_|~wi=j`NA09uC;7#QNZIFqz;B=tm;w;*T^YnKsI{Qv0_T@L}*B^`x_TRk3uK^TOe8nqoI1xVk?)5JhUwfXJ zld}%iM1#09Hh#W`RR2!ey%mzQ$1{b#CpNV}1~tHKig^9Z0>_}HrNPoI#2XoJ)R#LH zL3=Z|e$LZhwyshwY*P4AEzb+;xPQG{S49>FV@aE}c=A1jiLCzF4bcXL%TdJra6}2B z&mCiA_lO&5$1D5;t+{WXaZ2{hP3FDr#nbTB;XCKo6S&g_YQM&8iWV7%ySvTGboA~MILX^g@z8;H|1Z!+gf zk#Wn@(HE0Yd{6XzkzV{o{9GLl83hdsmksXcF)DYL9}Jh@_q>Yn#C0TYX2`;|-DV^c z;i4d*|9bW|5AI{-CZ%S$DzJzar3kpV%XuyJ)pw}~jS9ZHu3D}6zy+$&3cDoji|B_= z*DV|HHD&J7XYjA@j$L^dPMLq<#cV}|-DFi?e>(MUSuX>TyvFrW69cn%lEM}*sOwZ? znPz#I9A0Rb%regIs)wJOGv;gOet~&&U`w8vBei|cvVg#$j%jzy`o`HgzSoDg%)l)d zk1gMR$M(+RA8(4ak*$#tm9Q1|%SL;g-(T8;qt8JDM`tPDsuPpLP7kXb$&08?Qo8>c zxuT~XMM1gp!gu#G+J~$)cL;NraH+*?4~R;@D^u#a6Y!N3MU^Vw3F(VNGUM{%P#qOA zv=6!4s#Fy33(@)ocT_65yx|_rEU*5gD2|htmJpe5P+u(ch0Z;Gc^Y6~{ zw5ALYstV3LxiFQYAEo1 z^E7Sb6XXHObC{TxG$BJ@Ll@7=<7wS}Ir?00p_xL@Gj~^Khiq`o3f;Z5ID)Bp1l|PI zSM;(v%3Ufvw;T1qA-K1~y(hh%rudtQYV3wKa*9^aym`fXe0sij$>0#!^4hE-ajrhi z1KzAHK>>}!ciT2!TSaL`la!qK=X|N<0Q05R=lSIAzg9a%2$8$DQHdjWSuQ;atnVJLW18`&$WU^88`|8s_xkE(-Vga-Jt+t|fr$r!dQ4m# z4PtN!N^8hJenAFdOfQ4hv93mBsWMZpaEy}8v&9x20)2oY<9P?9FXm(zjyZAImbPS| z4{#+tnQe@MM?yr~1;}%S71rn4qVqv!uWP*pN@Ktbwx}-gm!y|N40zo%SmL%I8ROZsXw*h~UX|g>qJ!chImZ%1o@7 zpx$^niFvygQMHEU&mwm?7_9iDN+zQfDe$e+xu0S<8Ccx@OPl#u&St)foCPso#R;8a z{PpwrHJbTa{6riNFHGw>oxZI$`zBs@EorkSQ5Nl9Nb!w==+<>&_WJ9GqBr-j7GSe8 zR9BL#{bgITtucOY0E_ca%E+q$zLqg~AG! zEgz2of+H>cxBLG7_fVn|TeRmyJs_7r^~g6^Q2-^ZO)_^#@ezrxoi{u&3JM^-=v45a zNs|oWjyhy=rVwy0QZl&bwpDxphyf+zzY5)WBCZ_JJ`FojDTEAMp`zi?N$`$z!$HW)n#ojjB067sYUst?qDGn4Q@>S5ll3W5N_mGdex zCE0EQ)q~>TkO1gwkPr#+Yu|pCpcQ8fkJbKuO|l^{K_+)f7kdBtRWcp|(T!*MtGdJQ zZCsB%4<)pMma0c2wn7Sy)lUG=66G|n?1{(^n6#i9?TY6p8{Umu+;*THlyvfio18v# zo!-0ZiqvNn)NMRVxuHk2(hpCn>r-`C<|)eh?C0D!KlN@|1o!kRH9f#Y=Zy0rnR=do z!G&j)m)Z;iOsw(*#~)UClM&xwU#E?Mx}zKu1UE={6#r|!(3jl$A*Pr}ht1LR*|&#s zmx*pW#Z*_7yVaX)-dy!BIgnz{kc{sUtyzQ{$*JcO!@^_)d`!^1>l}LaL8;@UrLJAcST%Ha&g6vwYRySOelsvDHga#Q#?)Na6w@Kjz+M*@2PH?MIeSKx{H0x&uD#* zb2~^w#dTWVr@eCh}%l0vJT$G9qX$JfmJZm9!Y9tAcYN-oO#<)O4XxDoe=? zl5Crkt%(}|o>sUmbY_qSc}IUGiKzQ?2A3~{iBiw~SWVdI$EE0Ioju(=tJj|J0^s;~ z&G~+hxvu?@sU2WPW2XH)HG}OYRIV{z}xRxb(%Y@Y^EB z*k;f}#gz#-Q`ll6&XajB#9B-A4orjBPpDWT=g7Kg|GA>=jPN+cv#nO_cbsw?3f42p zR&(_V)#rd$hr27F>vi^EC_VLQ6o%}D;g{#j?`E%;as6)YPINtjiA8;KjtvSGGp=!T z`qWKMMX>PxVUfz$PFcm=M2!0z=*bA|qZ;ZRR?`us1)7Q46^++lO`bocj=I|_EJcNZ z(Awn0DRc#jYs@+oR__OM3rL$4jh8*)BvdKnaAKMlmnYi?0Kf0A&cmmH;>~a#tcoik zX}ms#J1XfO0!SluvM5k<{N7b&wfktF-AVLYl1b}LAR>fvrpgtO3e}5nA+cVtt;ngS29MW3GMzWY-2>#>WFn757dikH z3gIN7fCdTza{eN{4L0|1l4g|YW_Ez?fN{Fd_@xk0^U3a^V86R3PEnKS) zX<$b-|Ax|e9)XP7m$Z&-_Rf8Mmfc$V5NDfDy)ULJGTP0Td--?E_#HOrNT^8s#!zL* ze%2i{p;Brlfs{O=PyJA`=(hs=@AoYs;J3!z6S;Oj|1z=3=)mC44l}om`fc?LV&{Nlg6#Qw?o+KG8yzea}}Y zt!FEVY~mOh_iLLEN(FjG8u7q@P+`N3tFI!fRG#Ha@G4 z+kMP$Lzb{{%lpT4Zvny)uWmr|Osgn_*4z}++6bT@7)hcYu*#+>$f|cVdBTR@4l~!SBG2a2uQO$C0Dae`SA@#POAtsCvOQ|?X^UCEPg>L z?zcYT@Oeu#Nz<9_0Jt#tfF`{#N^viWsJpbREXS*BHN}F_KD5I{xb8fjf3j3r91Q3} z-J&un=QMMjt9M5^Bo(;?f%UV+=Vko3gM93DN8vLWveCDuo?6|^Q_M>nh?sBDd?~n_ zNxTMlk3PTIoqY}u3Q!MuCVeY%Q;k5C-lkBm?rHu>Q}b21%h7-&t26Htco9sFG6619 zNyj-_B>4`TovAVKps60Hoa9eW>pCsAzPH%7Z4*1w@!FFh7P!Nr**X<7r_PRy&5CSu z0GPhz;WcPDYr2D#;aYHZs>ql{P8Kd7J$**D4fbSR`uCi?_iAr&_&oDcW$;*Y4A5C! zg=5FhK#Klal@|#IT_aNb!X|5Q=_5yPsF3?bhjEDD3iMqfC7*iVh_C1&J0UT##T^EX z{Js6#6FhTo?Y%49b_<-?wMnvDs{=tayc@+t3NLr_0*>H&J=k7P2V?JP7K$HAWcgt9 z6%O3&=`F$Omc4y^5I2FaiQ{k!%Bj^!IZ$o)Ztkw7jF}od{o?=MfJw^Wlp5I6j#BQn zcj&m1F*sJtVtE8&)#XVlbvT;Z8`ov0IxmI$t5o2|*4zcL>h4|t&91IITGuN1(_v2Z zJS|hBmFVWl22w-^?6e3kQ#FJ=LDBWB*9Jtj5Hs6Z^AIt_*_ZU2@#qu0!N?M<4MUV> z@meKpdrh%Mn{hp4CbLoabeA&>WMP%blZOzfhVglbpi`jufu7b39VNQ2J8zXd?1=3I zhhyK$i3ZZ`Os3x3Q*DZNM(#I-{z*xMl?@-4@p;6`B;aU!tOv0;$HH{cVLfW`pK5A4 zW`rw(3yE%@Ef^ZO14k3(ij%cC4+@X3IO~N+SyXqCwhMel_ZGJiIz6tFJa4M<6Op*~ zyudKvXR3*UFKP7(XNpzVCsV9w?&N;xk7bb1mdNizo(YqyXlq+Eg-9QvmA%_hw^lxike*lkpdRQ z3~A>9zwb;`ILL?#a@Yka_lnC!1@AAiBHhBWOFk+pc4E}FK}pzz7ys27ahlwQvq$U^ z4##nK_!klsZD+BM*#?}9XKP*B_>rq9hb?JV><6k;Q;&Sy`xf{9Oc0atHty61)y{gJ5JL~NO63*XCGRQJ<|5_NaV6k1=mVDl{f z{Bi+$7_69yh@+=-o`oOicmW%(>b?mO5m-PWypep`F7wO%{>o?8`jPSZj%K)iu1pVB zc{uf0RuBp^D_0>XqibT{<=ei4*DU@{hkg+Kk!~qYm|uL7lxU&x)IK)XPwx_DxDk7! zh^WF-fQ>v_P*y+vO`SN(M)dJK#(jk3t?!azn_2kKhP4{HOer>_osg|bISDhf?!6y2 z@;2Hp7@4^y?#90Kd&VknuC;E~fN`#&rrNmZ8G(1RNfA_GD|VJ-LAy5=k5Vm+ZI~Vf zd#<8vYNB58Y->P=!C`m#SyyL|?SAkpQG9b0L{epxW`j+TxK#G>ZRYu?Yx=0bI}70I zpon{f_9;KOoE$+TB&(g+Q;cukeuXs*{9mXJ4HU=gB2x+7#Z%m+mIW)O-4VcEU_Yb6)P&xg+r?4l?i4Tb~jfpl)Cwv-{YP&SG6QiY?4jxG(inxNj*< z2*zHaK5YJcre2`E`KG6<1cWsMCZ48EqKNzW&>hGrXy`mrKPy^c`^aT;u+$k9;Qi&H zp$|bW<Hk#M-ITk~K6M-7zX3hq)S#($wAvEy|y;kS? zAV-bMqev35xGhuft3A9Sg4<%S>$|;f86ECN$U%x)uYI>%4%3 zBj}Lgn{$h9<&D-tq^@!e9gZ?(Jv+Ph`4!c5d|R7FO70qZk4Dj|KM_S{Wt*`QJleij z`NE=Fd!B!f*p837%9IcJDEn+)_;$RAp&p{eOd(!*_kNAC)^p~mSJT=mrnVAKK0U~7 z;MpXJjHDUH!4Eege_n`hX&L47bW7?qO%G!OAryVrum0VsFh&#%wei4nH*ZH?pc(cZ z_ogqQa$Kuok^e6Do1);mm~XU+@y#C~qP;Tq1TAdt#k{E)jtx&qDh22~sBGp^1vCW2 z4Kzfs{HyN;y0`rd=!yu6FwKf6#;&Vfm8IPE>=J9s9O;0sqp;(E&(;SET;&hsMAcOi zs07_IJ3<#aA6MuYsfhV(A69-wuAeCBj4gkemRB&&CXG8==0MD(J6MLwq`MJ4Fmds= zg;ZEnM)3*Z+Z+Kj>`yEzXJE6yB=WP$x84X^oD{-7o$Qn@?f(6D2fr@h*o6>m2n_yY>1H_8{+Ozp&C3 z`67q+*VyoFIx%J*POm)1_8uv!$XIrp}0s>j_zLOo-ZQ6e{%z^i! z@*ag}VuC-nBRfm1gHD=P7+%3@ zkx>D5_jUDIf?f?HM>zPzt55C7E9kyBjy1^BkffSDe)pq*n}KzF{XH)N+*p)7_D)ST znv1VGjEByvNXtuy^Ds2fx`j-1ld4Al;M~}7N#MYYij3mRmX`j6F@kEfVN_q*z&^z^ zm1CG4LWvmJG$uY1NCOEw&83Pf%*%_+H=onhqW|7Gm2p^Mg0`aa(TEBO(e)S+4c0rh zJI-_;G8_ku#G10q`EhT`j-{|53?ol1*T<9Sh^$F?81h6z&5J3lMr5I%j2Hc+Yl|VR z+N^u_xm!nK0$=-sD}6IRm`Y~P=Zx-#h^`MZ*=?~bbp52rK+CT|^ZN3%GE8&a$G^d! z%gUCV&L|R0nqU&5yatjzT2fh)=?7i^PIP}6%d0yeWys1*jETXb77te)qr+IDR`%b! zr9G*$7I!iAD3BD*BIgw)5$=)d$TtT)>=bgN_}qSppVGO|%zMNM(nU1E`tG>Ki|ST` zjJJ46kafp-4k)v}Gh_Z*Tn9vPaq)W7?&ztZ7w)vBFW5uV(~&2P4HDnvu0UVS|g zzCWbd$H>COihk6+W+ca|#mpfy+SyfNT{KwO>O|doz!c3xuwI$owYE+ zzc*=}F*WMbdt9yKMD}44k;9h2Giw=Ei-MW(rPQpOr*yS@)Ow1pMn@yt$;RA>Gfq1B z962<;7&q-8C@9DskoiUW*Ak2Z`cIbODKxzFM=%saPmIL32c0Vly?Fm?A&P?F76RuU zH3{8aYMk(J>NCX$XvhpA58Bu~?z5K1zyD@M$H~u~7PheFd`0;Aud=p zP`{an7uwv8)(q#~9`>MYFZYfdc_M3y`5cm>tw!KS;R6EEj^y>AWZVh2e(?$>NdxECf*td=RJ90#7hmaQ*_ZCz>CXV!^G^Cm zrD!PP72^d72gePU_Ywez7{e}8e0&)9GW2$(N3Gi|!aG7a1t!zE$SYb{cT!RN%*&Q{Z@5!NI z<}RpFVa4W;s-01sr(bE4_BlE-j=fB;?zPplq=&%i<1BlPx>cXW5@pKQNKdZ$9il)p z!{q!#nV_(NP}-*(?`J{vDA%kqQ>{j%*M}puon4~*5cRT3k0Vs&kl1+*Pxj-Ok8^!T z>2&MipP8!APD>f{ZX`_{S~6>}esuWFH>_*I_{udtr~?&^9{u}=E9~LNWZbcHOD2Bz z2aQhV9hSoW5!C-(4wa&!0rk+0;`FZ!vf4RJka-jXT}YV-3Kij_`|nSnDS}%{S|xC+ zbsOP$aWM4*1PwM-OJmTKn$i0rNg?vPY*c3yYX+K#1~yLFQ)4Clpnl7YBFDLJFH5bM zCQP#FuliGbF@DlV2c=hUcFC|}qjT0Aodf+0QD zDIVY*nolff?FtVCd=1DcUOulela}w>37?=>861j(*(V^;Lnul;K1wzwe_&a?>{UdB zA}H!sJReoJiVp9RQ7ZJ>mdeukH2pwr@AkuU6{_ViTIJ3d_m?ID#>+VQ5M@H>UJ~8< z*_!|^4~Fndv!9ivFSG;9X%;U+IN10ZJzwaRYA+Rm)@6oHC3TV<*Vhj_kwqMvb}aw8 zx!*VX#ssk^`u-aO()IP1;fY~}uMytNPc7xcW7I|)@#Opmh*7Ls3vaJFS4-|Zg2@^z z=wmO`Xy1(aFDMaQB6s%gF#J5d%lh-=E^V5`?cxpNu-!d-l@JJ)yrzcw%2bUdjw~yz za#&T1jJYd6IFhIYJteRLidU48(?zE=hTOC~u7OgE#Hj(r5Xvq(QtAS)3gY*xXX{>@to1iZoC&|#n@=FV3B=FFX-VsXu%cYFFAnzHY;%pA zIMSid>gljX;@*r=NqS+xj&;=t9^f`IGSZFsUwK1?kfLA-xUaKil|KkR8CoLm&(^NZ z8c#}v5w~}t|FaA6J2ZBrMO20O32b3$!km0ZGMEtW+o2(}{7$4%k!56Me@-p~ZBV&S zUeEKpXj9YjW|8d}xm(hot8-4yx>>cUBu@^Oe@n(Zg(Df)(KH>3(g<){#;3Kr!tHq$3J%p{V zkiWP9IC}ZIckVc~)PPlg_jDdeT?DC;m)mGOKm>|>?)ED^_Cm1Nv4DI?mpjTToV0Z^ zu(qLh2&;iBw(*<(oRC8l%q=90y5(e=z8RKfH|f#>IAxt+%hCEam4{RFC;wR%J}PhP zj#sUs@aG3!HQ^8<2z6P_n_fHD<)bn_F}yurIWsBL#0nim3u6)4AjiwIZ?Evp)esYD z42H9mPC!)<$8s=T+Ml&N83Pv0qHm)Rw;%lW`uN^NZ3d_Lc_A?@$4q|I56s+`jP0eA z%++6Z>=}yX*VaxGPl?IS$hyT!Hte1!%q1q?IsLVZeqYF6@BP{!Zb74-%2s)i2I=^* zMI&9*mcB=YEYB~TFVQUfYmdHrN=4LZ2``#g0iovYDB_#kA6@y&a70R#JzI#RP)kgiVzxit+$bT(Fh{Jqitq*%cteMm1 zs2nFHR7>kimX}06r`9A70@`srW769}n>nYZ8PJqzrP+f|P7Gx#gp6c}#4(=z(gRBB z)7S%JR9xnBUohp)F1N$!Px5S6x_)Q@4UnjOWi2g|!#T|Bh^SZwV8y!jQrx;c2n^1J z-{5H9>|z~-76T1K#ZoQc`>ztqNt`uPeeo{H5emB551Fl;>c^>PlcV?6iT|i-Snz~t{)2#OjBy>Vt9=b zUb#&rR2sa0e-7IYH7pyiAC#4B2MtF%BVvF+=fUL`5bg=NW3;^dcq^pc z_C*NLM4flK=@nga{@Yx5t5b(YMv9|Ab3>QYe=qC+8l8mY`18#ghwku1!D>uGgzyjF zeqbS$)zqWc+JX$=L;^~%*~Y|l2G`nI2F2t`!|kMGby)Gmu2h`%jpy1`-?iC^z!!d} z6he|h?`6-(eTKsoSy|Z#AyA($N)a=;5;8Vrr@PA7n@el?N1Nx36(Xsr*T2qRvw3D_ zCO@yBo6KB&RrtVz88quNB=TGDe@*Q6e?r^BXle*FZdx>zUuaMIQY7r~2xb|)EIi$@ zv2{c6)D_w!0LoH~6i-AXa+W>yDC!~vyafK}-pcNS)81@nW@biwvN56(iUX~FvyMO7 zCHrfoyiY_8G%#+Wn+{+zr{qp6dAtk@yRqe)coGE%sNJ4-Tr--Gs0;P=(XInKpZHzh z{TKtQ!nz#N(_`F;_=ko0JEWL|n-&^@ium}%-ekdBG8F*`GIEdhVe2Tk2!y$C%a@Fe zwE|QY5_a&_(Vq`}D7a+P?^l!#kfcjlz5U?1hC#5~*!VOD4uW()3V5%{=&A zwl_C)9^P63O|C(vPIuawM7WKD%_eeRTYlklkIEY<62`r08K2XHZQi-QUYq1nk@$Ge z%;+YBl=pKKEX<(f(;wDaDp?JUuCgnS++Z-zMyqK2M?`JyO^M@->qLdM8|pXaSwwgw z@J1KW_ZBwOxSCUDGi<`1gpcDzPt^B0wn(|Siedlju4 zla55U$1AV`SEz2_pZ;(oZZazLGYNI3WC$W!gvr^SA)w75DqHzDm@x8tX=>1aCAyW} zAGWmSenhTwn91&MluMoInC|H1!^+b$zDoB|%$S}PQJ26xewv*1r>N`cv9;U`FDxYe zYjJ;BLGYWJcMw5{k=M>tWhIwMhe0MzGbCTXL5YcpDZQeiZ3+3^qmOdG{~>uyV+j#+ ze4kJp6DEl#XlynfG*ob()hLoMv58z9b%qriOLWb;g+$c_3>-8dHQcV{`rBswqat8F zguI4;`*VM3d6|d?Ih4bBFA;a;$j6Hj0mEzZ4(<5UUpt|H06kB;t_=;Q(Q?pGl0*95 zep=Yg?rjfZB#XfKAd>gf1ua?{g?#pP1*lhN(0e(luT zQpu!x23#Xt3=4&sn4@z@$GDr*55!*EyLFpi>0zZUd!oL*sHOkYih071!Ot&1RfNy8 zBUI3It)DtPVyNY#!TsbXjm5uGbWkis2LYf|zrsw-qkrV|I(~dS7``}sgx>QZW_5dWHN0xK z(&-p!RduVeNIA3mb65YE_jn(^LZ z-171tsQna)l&4bg2c0iXg>Y`jiC(nFn8NkQ-sTUtPvvcfpZvfrz7fc;V2tH67k|(w zd=|D+08jO2X48j&%Rnw!hO|@ScpA{AIT>W`OqK^e)dRpb+p|T0V}H zPvBEJn|l_pB@e$8@=so2N@IZt8+JUWw6menFV7{X5u^VJB>C&>yieex&*aXo*RL<* z-a;Z6D5&b(mSgS?BYEjT_#qKu@W+)*i~Fz`cFx})9#Ya<^XVm#cKCx;n*VccV7f^W zp$MsO+7FRbU>wwq<3HYcr2G2vDD3lR!U)0`88323*RM#ZzZV^P#LiSykW&z4+O2*z zb-rEpep^FbUB*Ez@*7s^7(*f|&+nMyAB#xWFNFROeebqVrQk_NH#?K|`kZRXgt~X% za5hJJUfwf&0=3LkLn=o*qrWPE<{HTOd;a=5n=)E1Re8^A^j6q4t=w;x-=yK1Yw!#P zaOVnV9Fnktw%8711Adf_V)%2;=WxL`07j8k}sosLRO( ztKDwu^FawRX$c5cR#O8Z&3o$R4+)xnVt@5x=@1mK@;~Sw-%Qq4sS>m4(U8f?vKG9Xht(Hxfcqd1_Prql={SLq{{p zoXq|PmxOuPJ4+&MMZA8*WL^}wQM&3twx~N6JoL1GPI4$`4>6y1RYRV)Ov*q3BX5)e zBbErE@jm~@;}}%JhdM9?F1EIQh(3#GFdtZiEJAgpr5{M3m7y47=H%r5z_1{fp@(8N zt`Ncv>>e#a|E~R^$h7C~Rp73Z+^uOS{~qmbg*&@`CNxkuL1g1qA@*zAIj44b$o$_> z;`e9hCde+6QSRc9aSz`|c+63w2}FV-dF@xa1~a;YA46HixIFf8L{o`%V^rWNG?*lR zjOZZx@_1`dmwlmY2KnvSZb|-D6ec~@D&m@@`uxnnjmr@Hy8h?Ye-Cby1J&=jtDzUA zW_2WmB^qXar6Xhh&XqeRvp%Y9Oq3Iq!c>|}Lv=f5>sNAQ{Sji)7akwa^73VNcvK=o zITzP>3KP=gxCH;2BPTo5h~~_fvVTgNO113kT_u0^Bu@QI1u?J3Fn zE0Dx#7In}C=H=x0^r!X;dP?1#Fm-}P&I%!dw5d4&zH)HDaGR9^TYX)}QF ziU{ke$~fdErY8rY(q)6(Pgs9F6DTHrM|U@?Zi$Ki3~O%gb0sZ9?McE$yQZ~`9P&|$;${O z#4JQArJr{>Jf1QOqsuG6pAoB=!QQ;LMAOvc(pP_6NYE(}1W&GvVru>e2(DP}v#~i2 zDyzg(2v*DP&eoZnf7mrS-rxc=bi|+l6-|mwYsf;z74>F@@d<*$e^2Y*LG!y1BuY(F|yV$%88`!y6qKEe22i4cC7n4kapg8%ca zqF@mqdW$7P?iuFmsw#JanIcjKG?)q1Ld`G9!0$|2 zS~}dEZt?xAxqJ-R$Pr~dN;p=Nr?O}&6}dhdvj)?6$vn~wU@6ri%2_*VRMlne#>-fB{nX=)H(5mU4dZ3%Tfqy#VC@&u+bocJ#$GLGP8R;@5(h~Ph_b4;cl(eB+g73D6B!Ls2JnzeC)ySUipRV_O^>%!~Ys` zVAuQAL0r?Xp7@mD@($;H1r!$a&C$p+3rl8Jq8$y5F&SPK7<0GUnZBXMXALP+u4W`^ z7rXKR)IsFt4ut+}6~@lqUPq8|RwTbS_33dR8p=!Gy2r?#7G#y1%{>Z3ZyK#|S=72> zoTp=^7r$ltretbGz6wBFyjO6?OH6}~;pkY?J?Fu2EEIZ~>A4q^ytQzF`QAWEk2`rn z077l0yLM_-!(!WTT_I@Ph9#|_Q27ce--8kka=iwlY9dY3FQW6t*eEf0{*>ifd%+*% zJT>PFOkUmzrt$Z5(+|vh1>}xAAKF}qI1v$sM? z!4J+a$3=B{vHuWH;Jy=Pw%W|=)A}oBqhdqML5BWtS)Zyb1Wh-dgITxT^F`7-qwAnF zT2ZzE#=jm?4>rf@91k9HO3BJv4mcFlUu^gRbtd73vLSiPM;W3)HbqEs0S{$YZ14%r zlN8C3%fO3s#PrODd+GIcOCDy1D;cy3Ta$;DB}O(>acI6y1ye?Y$%5j}yVMcp8c;q`%J= zt0+*MQie}y)7FkX*@6Kj_bWILjjo~XXui(s6xhcsGiaaDK2eNBfZ%e$_o_pfN(#|3 z8kl)Av#(h9p7aZWnWXq31z|x8fiuw98PEq627p5H&e+Tcpl~oo>UpO2PFF|AatD1D zyJ^3=7ku$@GbTIM?9^eOu9Ts{$MwiH6Z+h|onxW*6=f zT@e?KC;&Szt*lIf5@e{3O%?qUTl;bIImSfiW*C^w>Vj)+)y?h9)bIo4*l2q&8psb6 z+RXQE786dv;5El3T+6f0+MySuDb;4&liy`g&xPu_U_;iqAo5UY~&VR7}jmyrWDd`?+T}vy~0(D za{;ochMsvYvmTe$b3&I}#uuqcyfnmbeJ$^kATnu&aN-c6U|+b}J4+J92Y&K>ID0e} zDvqmud7FQ$=+;kwT`0J9O>Mxl4WwHweJ}cXkKf*0o!2Mb9AzTJ$KJU5;nO%5jNum5 z@J!o@{cs6=Gyd_s$JWF6BKvWgC+w!l1$1twLFzNlB|tH4-^h4rjgo3)V}N*9VyDz?KlwzoJ6M!cIkE@? zb9!Ydst=>4_CEr2)eEG|MAw{h_e)15%yZ^~Z=$T*!6R2+<>E{x%U&LUqCA{ekM+nt znWbnRufK8f8epmk%HNIETW|>(G(xI2a&lV_Au(Vukf54`T{ZMm)mm!Mz5B%JfLMLw z0hOY`J5#Nc85Eya_T4TEep!>R${`H_VV>XYsxd#f`0hqzn!z+sR1QwbY zdD$C)L%zoo=%EY!Sa_Bs^qf=c0BF_Hw%@b=c(gM!^&>lxvQOaf&N+I~p%pG8~mQp7PGqF4a1K-(&GX|$K zv%Dc=mh>#Jy3q}qjEt91SY54cFY{!N={+=1?GFz2AGwnf`qKl*eP-rExtNDdnx=EF z+R5d#<-y3WFJ#8wVCEX^-nZ3vkbC4@A?9eYU@#@lL7sP+kR-)3StZPb{)=S;}0%2`rDBg*_A-Q65+{za2&h5j79PiT{U_RW#x`L{VsPpm6vz8>;0 zk=~dCfc)1!pqRUs!w5~2&w^^w&dKr25H@D7lLhCmB6t;vw}tDSUM>Vua^F(XN5Xtd z^5GT-qu)87=WuvZlI;aHg}}J@lazP%qIQL&Fg8Nuue-oVZ3l6txZ)pej`bZOq(Re# z^vFMBAh-+LZObPL@GiccBA!pY&vMnEcuQeG>RxFY zq1xGP5+OX&8VyhaNgkc zEMJOb4~my1$%0Rr{%|Uy17~a~kD>r_s$-t5xeNkC)+M23v&;VO~I+ZQ;-_bqBultSLM+nzi$>A)4_ zX2KEJ^P-F_sK~`nR|+ipkoJkM>j!Ef5;;k6BWCN-fB-fge^K~>``!N<~4?^d;Hi27gLt0PV5l{Up&P zND)_FXu#BaQ|!W@o`7Kn{WT#_J`y2srbTbwjH&+Bf$P`wL=X?o$OK5LDyX!wKG}7$ z;ph2R)NnB9FtY~fn;S54ZI$nZ*e+t@+k7h+kL_g6(^DlE-G`R5^jDwafBg3(Io#vU z(Ebeg?QuUtSc)-k3mz#Fbi;twa8at#IbbTgh6D26PrxFQu3F!Zve)YQpOGG315^W{ zUk|$NCui&ozxRa=M&B`9cI!@!nCNNban;uk&+kpYi167gGQ?&J6K))gIhIP#d$qZJ zD_Du`WX5R@v2}z81KUr&B=rpeUGIqBkAYaS5itZqLON%P;FYKK2rwTM%yfXpmO$t? zxAViGLmZ92{T1aW+3~lFja=&`NWTDrV_MrCBD6;)-_6bbSdu1#=g@JI--CZYLO%lN zqMGC;=lfg=TB25q%1{m{u1>x5=4t=_JD4q73&Bpn3A(UGkbAk**kk*ZXWj8zt(0gF zmbO~$I0%~^brgnE>mO`-awOm2p!E^pz7R$*jKC1x6zci20Yz1C|0 zqYSpMu>f`~=|{i1CN@%`7cXROY<7C(sSj6rIPAS5>V_ixhqq z$Grno?o9_$#j*Kl9u;R0@y!8IQ+mL#hCNC9+&m@~fFEKR=*&M2(;GbpvC8H*df6y6 z{3n|OqkF>pd3Rrls{bL4hLX|uQxidAlW}~Is}?^nZ%Iiv=V|PEOjo>^+%yYp?c0BL4u(gZ(!y`YRrOMQ(`4sEo z-mrcR+*@sL_AjrCt0k;es84&N0HR@Qi?9SBeK`Ujn`fs+U1$%-Ht>oc(mERhl6_LF zF)#PfVPnpgm3m@K{ND1pxbUk$S6?tXK=iaqK4e+$1*Fsua) z1Y2qqwYlzhS&uWM5s(@h34_KTw_lt3SqGeDMmcxqXNd6ZImkvRX73$qx0_#ivVs7y z81#Bj3e#@q*ebF^$oXB5%$qLc96}}vsQn!)oKL-%>iPNA()MSkgGp0W?~|z^nxj+d zsHhlk0Eiz%q~KO|PR`@8{i~G2uNfJgz$FZ4UGlER6Fk|-N@KC^f7*kVO9zzWKa2R@ zx@(KvLz_GX4+za?k(eN))F!{mqyH*2K>6W_Y<>^t{PN?8(ifLpc@nQ*mr$jPidyEk zxWElUGCdlgKkigo*q*MkUj`V_{W;K$ZyWRk<;r^CgLRnhfv4Z21Tm-4#uhKNp5o?R zPmi#=kGpt6VgBCDqLa7Uaogf6GZT|V@yp2UC_Cnxuf&R&mxF`KLp@W`JZ&nDSb&16 z3Mkzl)LkU-f=Epm`wA>u#c(s_u0Ql{vI}mi{M~}j?dx&nw@1j`q zI?m!eXMz2w<#!D;0g_jZsUUc)TG|8nih2%CE(atjwV007gKYiYzKnuUsxN$ELMZcV z&pBPOWp5We8E>(20O&t;*&LkoctyHP>_x8rLp^7;zX1+X0N3+Ng#IcEoa7{@BGl z<4Yk2(Icf!AqkuA9qpN=^L9$lEtOdSd#e^Mvo|zM$rvO3fZmxR7MGe>s9nxvK}PmJ zjspJUuP#}#pisH`EC&$D#fljMXQ1qf_p6_6ME9KMy;CWkfl&-y-7Q~KulJZZyO0-> z!~!5i`td|Fd}{v_fYYB|uO>Jpx{s)7Oe`-i@87l@@rW|zU0+`>0nX!mRTy?RTG=A%zC_|N;H zaGCzuxoPzC6YMTPoZ+so0uPlV-d zZ-KGkIub1%4K9l+8L6V`I25Sy_vS}!*1n()7F(9*APg*KtbG=*mQlg;&gP$ zl_=w~8z9wt{PxU_7gUolX^!_d^qdz^z5p&gr@O~l z%??*)u_r_F{5`jK-;H0P?D0JufdR@d((=I5u}uIF9TxVyd8@P=q7UY-vB6})QVex> zG<)=i4;c0o+c@qN4cFVYrT`W_P>sClv|NhG)(;N1)U{~6%RRh+D*>8S&JeCs z4WMI{mx01pO_bKywjn@Z!ICqn;1z%iBO8#Dy$Zoi{D%9v3b*;&9-`fGVyNa&-g{h( z9l$t971 z#TP$on&#@)+}ja`k&ShtpKI zp4pI{V!g0>F|$2G5PpkiBZ%wzj`0o>FNDSxtGq*ce%7u9+e~gL;-RFo=h`D9IG|?z z5Ezd$T1Nx=|3)CKRP)!Tx2nIL@d&}(Xl^n86%2<$RT1-30E@^QB0B@6LnVD)|{P^ywC&z2`XgUMdouO{*~OBu2mb6x=O!=<;7 z*>6H$`ebfI&TlKB@bFRQsrSU=o_$L+cs3^H{6WI)X$%&e_pJa!A0Hq8V%M;P2B6S6UNdr_fVT}M4edZ6Y^}d`)^CqJ;jKv2 zRP}j(Ju_)X@9qBg0t{o^HtO+PLBr6s@qT*+1UpdZyVHlD5c>5)aD;-;#GV&dUhW6< zj~q#Hw)GPve5ifER7~FXb|!*U8@IaR+|(72&`H*XITJ!mO-+UFNBG+34DP{!HFI&Q zo@7UqHWxF2d}&YoBi$M;afC&scE)yBLC2ZI$1!Q_c9YYd?q=<7ev~p~Ej~o}#<}rQ zXmnRajoA4EMo(Na;-?mLG)kgQp) z2ZMhrmC93(Le2&EM?Z2Fj;_B2obz8Bn6Rnhaa_IEcjH~Vs9C~xdXBsg#QJE7uY)Cg z&&e;ovwPr8PXQ{wAF~Aj>zvluFLdFCj#BnT!}HLNvK~q`kCSxvLiqB_mqP%99j*WB z?7~=UN%3D}Nks}0h14)Z!M`>Nr!Q2N8KOF$>||XmJ)aIy0knbD!vRALsc+%Zt+8g5q2(X_t z=Z0xQvQaGixkW8HJkvTUWo5&aAea?i6hc*{PC>jw=X_*IF8Y!)R^gJiHsFf_5mjOR zg+-cBwL-aoKacM^YWI!Ts5lb^ftqU8i>J<%<#*fxK@Jj=iD(?tBhur6M~R5ge8~~~ zN>J`82(Z9NQmCU>RZ6kXc0w>xxop1N12e7j?TpFl3yJf5s%I20UmO!}uIzHvV~#IunV2l!s!Z_VTEyfxbx&PT$cysn~4Mj}qekH%ulWbZ2PWy6mb>YG^~B#aFi`O0Stzo1ITV zlyl(Uo3<2HoYFyb(b0D#%bTD@K+FBqc)51TpT^-sVp!7TlIWX4F_f zpDa2JGTw3=iZPz)9HFXRg5y5P^& zvm#Bs-JsJ(iTT2TxAMjeB%+#=Y9VY?kl;ngjYBCDid3( z_z*{L%~mF~hbeDpthM}6CJUOAtfwl(;}|p>KudV6hLM~^8X;Nqfpu<30_DpIgBeg5 z3@(9Qq5=XKEnT7q z_X;yy9z%eFwA^2VDo>$_5_Tq8>e*U++xE5|?Mg$u=gm1v(}(+vF)iNhzO5~6U4*|z zC_y{vk1ua-FpBR5nCuojuF#$vyHfEcJByy&5qmZSygRbASr0GE6xkd7A_MQnRDE3g zCpP{*@?HW6h`Fb??EUqiU}F)S70alarGYr8svULUAwk=TE4L@vmHwsK9Db^Ke!ZNAlddxk>kmCh*%j99l^)FN&9X9C1@tQ^4xQ+%>67b zEF2?Q)KEsMjlsR!_o36~LpPuop&iR|NBV9Y)*OL@C@0lW2ilBs@EAXdGX@mL2ilNg6Jk7|1Aq09+#kzu0tj3q)UMJ zcXqDZ3G^O(<4ycSp7)=o2NV_FsQ-vs4eIklInAEIPIK|8hNfkzs;ZoK0Aa0J0oSa% zy{w8`M^6tcn1U$*?OyUr(2Fb*I$t6VJu@NOuO{eQK9! zA;6LckzjttyeOsweMW*?=;wE==tOOlzj&uNFUZ23-G83J|M}nuubs6z4<|N0_df$w zphvFUHrh3g`Pt7D6m0v~deBAOeJ`BZ49Mq9c$|r{a0T`GX|nTi9$m05JyQIC?7d}M zT-&k++PF)Q;F93M-GVy_o&doEL4r$gYak&IoZtk95Q4kY5Zv9}-QBwHWUaIJS$k!j zeeVyr-}>R{X3jB2jgq&j-m(L|K-x|U5O2jj-x_X}4Wht!>skTBj(uQ>@zYQi1V}0h zVgR2}V?c0|{J|0ZaVZ6mWguFivRnl?!7d>%tkg~+K_ID&K4btg4aT{BT?}uvGnVIp zbJEYh*abAnWus%)0@+noT7-q=`_37J|9%4+|F{A8uiiH5?AQU6gC9>MzTFTvlJT(` zHqE`VaGV~3XZCi=Lx(5aay^(6tPtF&6U)rY=2O6?RFx;W1VqkVlSksDT9AeYy$8CE{BFNT zLJa`#OjYMUG!KdZpz_wvD z9v3K(t|aRt^l$(eQR*8#FI^re$9yfkvrmEcxL*&j8Bd{h>96py+`PJ4r_Cfbc{b~n zd;W01SqJ9CR6}n{zHLe?M9MS66Wk_4|8!BK>uSBWC(ET zyU?B`bRJ9;CM$H^?i#{$c!F&!GM7Kw)+{3rU%^VeA|{juL~pI-J{>1<5Pd;191FK# zeD>@_V#u~GltOp{`!N6XhcUJ0iplxAA%8tkrvI%r9WH7YUA!LNsc3}Ag*FN$h<@=G zfS^od6Ey>B&t^BNH4rPLlIOqPEuc6W&+ie9Y)uW-W%gHPbn5Lw7?Ag|o?!Ua4pYONM9^&&U&kncsWaj8F%5*}(~kx;QH52>O+Z>ZGi0rb zjR39%?TMG`BgL+9zWr(#^tS$i@2dY=hDd)6C#6>y0;FD)>JOG50*W?KfIo`F@$%*+ z1W1F|bDQ-Yxvw_T{DQw;R$G!pOTV1?p?}-|O>lG($Y63u99RJ;cT3~00?_0nY`5^7 zs0X7OC=s*;Xn*v3%U;4(!-WS93xT+LU=rSLMH<7S8!sXe=@}I*zgddl5o0;_ z4%)Z7*)l-JBwl*TFAiFOg94hE&VI=crLHc#YJFp)4wYyvga_y2WfKCnK=YYKiwNGq z4S>YGg8^9c8qPipK&Ox%#?6Si@oedkwl3ZS?29YP2^>${?0)j7$+QBmN;OnDIx;f% zkZ6O27~V;CB4adAcmcqg^HI}l35N%g=iD@(I>XEKFBtkL2y;Nac-}9WO{-V-#otw~ ze8`*&idB(-k#fyBKL4a7frb>|8b8w(WcnixZ9b3{M>^R-D#@bK-n^yZ3`n1H1;YB8 zEZH!0^pW8~)bR4MvbnoEFaBs5kL=n8p-u<*qgM~D=*wv7{hUlp-LL5oSm5Ce3>(?? z+2dlt=!a44)EoekrnNZ)U64aML`_`1xrc&vMm$etg_-~{^aHSB8Uf8s)hAwVh957{ zel!~v-c?3isRPsk+fehJ_9l#>Xy;Q z>rX+?Xp0(7dYS>Z{5}kM0`OC#-7kP-rTCa=hDnCN@ostkB{`bSiXhsZnCB%N)usLd z&RvwP)!|#+sWcF&yh*lr)d+|H*OK&|oIBa-uTK~*<%Wnbq!vdB&~;`YM5ERx z^oU)|X(RN5t82*QZY;q6G!3YCncLB*GsZ?5Vg)_s|}h1TpBvLureFxHE5njZYVJNqdrhf5f=^q z-2ar~97wBF9>U=bfhzmuN8ebfflP%jf8YCoVNCTDPlUd(dKfLcO0?519Vkl)vl`LBdjoXuT`&^6~bP0TQF=1$?pwEI2t^&jqb#rgf8J9XoW)&Aqe|J)y6 zByD&LXcPZ)?gxPgdEH#A&2BAf82^^oRP4H2<=Nt8(K_yNcs@0{^cKDw!|Ju0KsUv$@RI@!^>j426KZpj#6f8oy?K2 z4b?O>G<=ShB!CHO!O;%!vK|0MXjqPhk2?uan#N=RE%s`G0sV79QXr zHd=%=)zveDLHa9~0gK1X|K?x{chuE?7x0(l+Xy?sLpZj_dD^|C;tC0_P;9d|KG8H-`M}p zGG?#r+L}`_-`XkkEKP9H;EYFqP6+FJ7HHHDa0X&NJ5 z%1@zbeE;SG=--WDmFXtLSod&C{fGPbFZISD2;2;Md!bjm#a@|pvo)8Ae6>R)WdU0l zj8OLAgwT|BW{xm(Gp`=>QK9>`$zcH+}i z*@uWXk;`!@ba{lw{q8D%x9fjd=&2!~IM5boLeu0(DCWq6ql37Fl;<7;r3!t_VVG^% z#JcK-wC@rE;7e{a=Nw={TJ0^w1CP1 z$I)Fa831jqmWhLFrbP*sk@+i$CfJN= z;5?yzGF$6qba?)M(WcGh5C0uy>vTVIhDyAgjMK{jv)^tg*FWk9#hu*_cK0!c_sQC} z?Q{c+LXmEY*@Ekj@fs6dvM6?>6d;sDE-=-G7OYi{tSayS0Vf&ItF<`Duwp}%iB)M&Cy zp-89m{}7z{Ufk1A`WKcr9%g5D5v#@B)^gxfRVIOn7Fsgmto)ti|0xVc;SVyt%5U_9 z!oVDHiV5_5$Wh!zL~Z+EH|jADI<}Du$P87x)AGLK`%`+j$0N5rPC5Tp4*&65Af(5+ z;wvi+EC%D&7_?8((yQUdkS({-x@kJ=*ADIrPnJsAtOmD>PNqdgFaMi#u433DH(!Z* zIb*`5prv!e57B7DXZ#pL^0y#nS#hM@aoT9nfmEh&!*vX;j{fZx{vwTL4?;f8Z!jh| zL<=539J;+&D0%Ku)@%DXbPQZs{k^xLinh^M4dH0zFF*R*$3BgRv)d*sk5tAe^{p2a z7r^^kWv?ARaUM*@4fib=jot%Zdk?K7azQEe|MU(@v=I|P+AN1_=ZBps*yz1~}&a`}_ak+x-M}SUY7j_n}R3!yU;aBc$13^sJ8SpL_^gblb9*TkoUz zyo0Xj-*<-Uyo{UBQARYSDAJR z;hx#>wrbm6pl$oZ%IO>vTl|YT%4(ov`-P52Qfx}>TbtRk;Iu=noA36XzwLE1CI+BQ zTGQv|%i(7Tw1)_4^4Xu7Cn21)F+q;&b9If`A#6l>aDRH<_LWFKRVHIxU0f7(t?FxG z%}?H2T1Hh>VQ+8m$BG+qz~Kf<<6cGdh!d>B97b`hfu0v3y!Jc=5DFg1Hr_1~1aw`* z7W84#h=~nejo!z~8fXH%d~s%lx?1YamBnG5|8>J?(L#B*n?jx^{UnBvUta0?FtL6LxgKYnNvlfAJj z5&O=6X5Zo)(61-Er!6-6xeN}l){MbjCp!fo{%AM_(X{MrTCz89NMsFc!|!ee>P_3r z0GxbNPm9l($V>irwbK0hF<{1?1+Z7PzHjc@mXACVfBBOPpkkV464uH|a7&WN$hl~3 z-UXCM(NB=&ReW_-b#D{GV8S^OX?9VQL{hSDPGh5n$L-CYqZsL4#HG#fmd~BTOR6oa zfIM)>fNnKOZQGHwu`y5)@R*T~Nr8)NG>5XYuB2eN=G4_UlyGA+Nl;Mm8zBc?Sj6wa zo0bBg6!H-pV{oz@vMqS%QyL!$PLeibf+<&{OekWjZF}C~88N|(j%Jv|{oXwKk(b9L zVmN8>CgM3gFYjhfMxKIDecc|O*xRv-h>Qjm0twH;O)5iVxw~CgPEX`!fQs8I0>}P7 zDc!1#$|}=Hx=b7&$xXo==j`HKQOu06f_m8grtgh{TyXlYUtZ-kZn4@C&uvMn7rF+$ zbz=#Og{4EahY0jO8PecZhul95Iz&^&AeAnY6qS*Bf5k>Lf zsQZh$DNX#^86t5q1gi+Jwsnad_~5O@q_!ND94%Pt{PJ>j%IEe<$MHvDVgJ>lA(XGm zxSgxq=uK*6(MNB1Zoo>IuI&tqfhk)UVMMA0dgtc|Zy!_G=ceHjk&>Aq2;NV-^HGX(igt;zgH zY4PBujFNAJo9g7^r)W`Z9_aWv?-o@s=9Fmwd`q)x>k>z5#8fbxaBVvCN~?h70hK=w0AKD;$l zVc0=&IOv~#;h`dz!oX+e=P&G3imx1YcMhuLZZDS{sBtUI(YH36;?T>gJ5Bb5aleOV;HwKNKpsI4zW;=%ohjyFO^8W}i{{>@^K z`(om%x0qF-kl2&C=}$)?3vFzIFNRX=jg5%YRuH+{M$r5cf6!6`8;#s@r`)=53O3KS zQrt_P?f${RSkq6}(P{C_g41v0k+a`0L(@Xa_pwRA$73owD48Jp1Ma4{7yFIDo~88_~0?JJk04bJT6y}ZoKGCw%~$(}Fyh%d@2@RQAp8b6uf*SC0S+0! zCVofl7%zS)9-zU<&#Hg_NN|3Pok&QkBX&PZR|BxG+B)#O78mudu(wWPviDRXE|N29 zCWhwP8A}68B9yzA3k22r3k+iXDw;)lPj)Bbx_-to7w8LD$M(lesjj4>-{uc_^YK)~ z@bU8NAYHpV-t-CP33@K1K3S6EUJBBOGFT2Oi%UGMr;Qvy-uh)Fq}#milDLIoRL0)? zvQc>McfT2AIvp(Tm(P^`(pGFB+mD{D;eEv(CK@oBMnDJOgoJ({2gl!ERj73p44kG62M z>+Y>ns4idpW}mbz0nNlx>FO0;tQGJI^>xOhcdvwQevOP##N{eGYZ*ZJ3a!Ic!anB$ z8j7PBsbcwX)DtK17KglJ>!y4puuP0pn##H&-x-GAwt1?kX-2P5lFBAm&kW@T+xlo(y!(J)_|yB0dW_2iur^J28YET9WLs|76U^{6+_ zYg;`n-Z0`@A|F7^rUXj?s(fBQCduf{B7?(EyVMYFCE(3>f7`9>&fd%r5X<_Bgu zoZ{E7UkhW_-nl<*P=RN>#Ye-L62Ux6^3T9cLay##8OE zZ8yP6;A?IUIrt>F<`o=FBV=*y8_D4ohO0jfZ6H!OMLwIa@u27`A}|c+gNuRyxh%~& zV(`ESr3R4IQ>Oa1AM@)=fH37?(`v`;JoOsJ;nRA<(49t)9-`C3lpJ?AXwY%{T{Zv zE;wmuWwTJxw5J&1%3H z=4@PmpEBS#wOKNScxGqAMo_Z@FEpYN?-P1=xQF$Ex(!b};OU(rpaP6kLA*!I5m`*O zt)FD;HJgw-&dW7=q3H^2q)@ZogL&b#I{nEmNi=BZqHYQrkivNfK-daZHuH?kafDOB zSdCRzBUGsJ=i8)$E_d1Ff{q2xq0bM0cPw>SywYxg;5wM|IhG5x9>S^+ zf_P`A=X2ZX65Fu7xl$$cIHH7Oo5w5OZ{NNh-v}e?FHGjdFmlgGK^%f2>+H&#BOnG? zpwEr4$%Uuz42(~(ZCU9D9pViBX51bFMh}Dm6VV&eO7~0UFoUze1OKV+g3^!ewrfw% zW$!=HZ~xkYxxFE8ynK3<_BDlWHxe{e;#2rq{ne|=p?;t!o%u^hNWpo{_qjQ1&&T2o zYrX^!syiRZx1WN{n*)<18238v(7$Z}pda&D0Tnv_c=MPIFX%}PelRr}NBXMHpLri~ z8~qe#^vc8f`umG5U&3GpI4&-(Y`1a6H}xK|mG4*U%GxtuN2Zw=l-q<9JbPC5mE0xE zMDN>x7i#9UgOpU(*!VawrHA~PApi|mGN5?-}fEZ`SQG8Zxu%H z6-1=$_2C~Y*WQ6oNtsvo&5%e+(upL-!ijAqAxAqrbvGUFAuM~p>S5KN^qMfF-*WtC z=9kwpy*b%6sEr_X>0a1YW;s;_nxm&@JTN53AW3%?>lVr9_vfk_Ndjx9r(HoWXNpW0 z?j*YP;lpY3eh|@wP58>n${egPB{N;m&u3>BPeGznWHJa#fL{!#h)lHuE*{vt4&NZV z=}fLxWSaRUeAhr<$yX|&QwgHR!+>CVIUQZ$m$wT4FON+xO_BC;pyfj1NFJEu2P`@a zNl574OvG~D)y4S*BYE6aAEOc|JBRj#+AtRYw)VrA6QLvi&+fcul}(O2y@8ULS3D?vvWioK*kfBoMrktQ3EfZ%?Bj_GAy9{A z%M(S@*ih^{?rA%3kBe|q{+=}|;Q-uV?S#YGOhK!*%Dp8>XE#;5%<1o#CYgX(UcJIN(R!w$$XG|%P@K#(JLiqNpddhFO=nx(s|?!O#*s!= zMgCzZBwQbfXhf2ZbQX8=>hIdl2pfsmIR#Txma+p6Z$Ej-hiSpse^)LCYHk*)N{$2K1g zbS4ZrzhQ4I0uqG-gT44^n`8D=AbkeXFo667faLcXV+lBpUd4(Ie)_vg2F_&|I5T2^ zp>p~oqpY#4M>~l%zIi&%y5)!Ln^%(at@S!?I6Q}M6^5lbdL-E9ZbkW|tN3qsteDhLDO%8aP~37AAZH zmcJW*RU;w5cYc{3$AMcC0HcJxKhC&L@BEYqGWlG|Fg5$j^Nk4`TClQ1&D@IpZQJ%m zrgelIo<3zTT^%h)B;%9Dc5*IUDM|6KaHt13an?fw&07lKt5YW8`?TZ@S$HrV`0;`Y z%3uPZ9hm$UQVp`G{PuB4=dtNkQSkGU0K`ls7HCgO6dLoNQc>vs`Q@h^CBL`m-NLYJ zbCL~ZlILSg_SdxXcgi%6gfm0B+%j# zva+5r3=3xTj@kO~1Z}AW3k@{Kuyo6ahk&N`xdv}8dPO-dh(Hgtc^i)}|D;aS-Xq!oLH+DDz?6~GwspqkGJ-1ydY@&=U8U;(t^{`BzV%5xkT@FI`M z8R25UP*lozmFMpT@y_2Vs!0C)I6sYdZu)}bi7CgE$R+8gAf=DaX3eOv($Y0t(J5t5 zdqWUGz`Y$hdIH?rkBCpBQ2k+Wh*i#KfZa7YnzmfTYYRgZS5)LmTCVG-lQuT|Ik{RI z?IkMZdx#uGKD<%nA;(E+z5lKS7pVDGGnzwWt*5dXZ0ZHRb(Mt+M5&Tk_3)Upf zta}47U~UC}&UopP`nI+oSE?5b4DEEi<#T049En*1M6)>9#h(G;iV;Ue0~Ov=Nf-$m zK;`(y5uTfjr!M1V`FO+=w?O2{Owc zXW)pra^rhW$i5Z8B|%d)N+Iff{h1l%-!gjdr^8nc1a$4{%ZuqQx zK@N@(faV%OapdUiz^(q9Z}e}hLk>L@^%e&y|7^Tv8eSeg-Pix42VH5g2Km8b2P9qE z42Y7kf`0UlK9C$1Ws)bU6%+mbAVq#5NGrh;A!+(}i_^go()Ej>-i>@nP3bHt^e;0J zUKGe>pE z4NV_-*$mTT42a*cPa5EO*J|}~+<~#|6SSV3ws9$=A8EO~6){IAr5=pa3hgvOvq*FMO;e!XH>&L?A z;bhq^HtFGy9?_~gx7}K^R{HpE=Rs|Ep>fE9+lMh2exlo$pJ;JqRy4VBuh<`TqovV< z^eCCcHC8ISw~cuXOF|n=P0O7PE*O3FF2VD@vb|5YqlV0paVl7bjNYeD3km%bjzSLX zBTM+DgM^+8rXLwf5{#aKp|{LmZ@R=Rr}BMk(ibF$ljjP5fYY=~(o2hBvenp~>u4Yw zY;yF^d$i5gR$dcApkZ$=zgew79Cq?Rv+w@O$vd$Pox^yGirWb|QXIdxeWLyuU7G_uybu zp)Nl_+5LK>kHYVdydzN0^(Br-^KKm&)muCnU;t7^yhqu4bm$?oY%lFG^ju_!Dj^D^ zU|+7$7qaM;U8#0p_cL~b+J8@;D;=qHev98HvV{QDH1p{)4XbGt%?6dDLg%H9|9yfW zZnW(l0yiqAoL_=lDN9otbM4_F25}N-NE%EJ+9LtgOpb^d6l={mZdUzWUjX3R z1OBG&7QcB#n)QQb-?5gKnOW+gsRzg}1ObT&>yS!)R>dA}P zm=?{#!$p3J`b!{LZOK?=Af&u#ahKig;o3!Pp8ig|JtB zFXcPD2?lo=Vc0RyZ5a`0m#UPpFh7|6##`SoqA|5|nd6{97=Jb;)rvrW`6bxaHzK2) zN;0D)1vIofX~1P0udA-AjBOAn+W86o>m!Tz@0sogT$@bJX!1U6XTs7(qt)~B^2+e& z&<$^ACQlX_RySt zuA=+2q@AcRvdP{Hn`Aon;^ri~&F<3&!^05|pV!^3sUAHz#3jDWL3)&FuJ}}T#_Im| zz!q|KFQWCqL+2v5uDZ8Ehji`j92??IHcpF|jqo=Mxf7Xh@&Z>w5!9XgU)P)7b)R0* zj;@z}g%=UXtrKUDM)n^0S&m#%c!5ze`0Dd*`ADmT*S z+b01i5sH8(n}Vnyowe2iFa6XY=7pk)N-GN}4wO`Ms6JOzTRZ*Z#}5uEMpxcpe=dy; zVAAbd?~|`-Ja#}s5V9`=yza;1!|C$Qt6xM-M#jj|!M&@4%}>1|+!g-4BSuVUfU=HR z*w6PWJWlE-EdVkGafP~kz|11D!szIy&?nBZix`wZ$z8%CJQEXBK-P@6!SjmAs*4)(voraM5`hPVIa{nrz_Cz zQK>p3x?9(_ISs*Rx!=;68F*fqGU`!X%iAvwPZkUL!W)E_p@x&hQlx#!nelZ9#B zBqSu!iHWtUByJ1;-ccCYP5=Et>2+90k)+?+l27)f%K(EA~dtI-;AJ%ZBTv zrHENBS*v(D^?kyVpq$Gr<$I5VvJW_oCx-EqecA$_O|~)z=O$HkYuV^bdcyo>XJ&Is z^Av6ZcVM%(t=E>g{8)a1O3yqS72+O3!LNiv;t*sPwTX7Qbh}p-a#^V-KKjD%=Tw0 zd;7Y(OM7~Ii!BW2{=>;~dceH&AP6BxSI5IiE+YDO8I2CD9<>)UDP?upQ%ODRmgPP8 zP`2IfDzoihT@7+Nq&;>(Z(2f+XbszgfA#5UWNhLSAVoAXGD5FkYTF0GJwphZ<~;3#PI8bMc>;B^@@ zP3KHPlAu{h`Bc1RHbG4vOZDs!p154SFlSc!BxQ1OIY?=@-PCD4cVkS&7Ocfd;YTQKzYRFNq3uK1Ve zS;v3G=okjgY>V|v#r{Mb#fWDSR#-Y2sg*2-O)qV0-d4|fjbd#i@zN1$2o66U)AufG zlsT{|iuc5Ao|&9BqdawuQ8uO0iwFGOX30lg;5O4o<{bv%|lJ428Qp%cWyn`-+Kj$P0TP}>IoRW5!oKJ z%e_)XS0K9=(hr!-hiLKq$Q?Azyi+lg4b0nY{RCf5myYGfZ#`uckK_R`lWfdGL>i>% zN-FG7<3o_lp2FE5Q(ja2)lYp~^#J1D-Y2`77fX=p^BBF>YfvnLEDKFHxHne5ZLMQM zf;m%}-+F{f>F+zj)4UwaC_3{i(wv?gHXDB_6qN0`{E%QadW-tbEIauV(slH=XzPfb zFxjk&pv%3zmv8*u?et_4PSiz5D}>35j4zssg4ebi!zvw%s&)N|V~e{#wv>rI#SZTymK>~N!ju@kZ=EAXYIL4MukH<)e_MZO<=8e~j&j|FZ8YM$UN2)vLx zeF_TIpmS~m&DL-j>`FCj8)llUh6ap4|E*-Y7?z*!XAuPP01wo=7s{I73v~GfKcnGv z5CT3780z`GUe|nu7AQ36(C+uS@jmo4yx!b)X@+vlkBsXz9}0#xOYYKT+|$84WyGon3@h z`8o2gKa{2q3*0Wr?CxILdGG2nS6ffxwFX^i15KnaBY*;zOV{;LB}=be-Nk3V zUN%%$%_u!ljKMiMIqfGWC&s|Ed6V@B-@Db2fmNrpTfOUW3gJB`RVtCs4}WaE9oi+f zQA>v0&q7$)nV5nzK74Jw2N)UfrQE-(Rm;?VKY4Hf)*q&o@6TtrN?pG&IEg-SZCIsf z-JP76h}*gLT_syANz)j7Q0uY1ajYQ^rTUZxD&Avfz#JMDiv78>T9eN3*rr|m!)KGb z!?ECDT97ST93*IO8{Z;*rSfVOG)2Ali zaP4*%>>kRu+w|Q-aA^8VnOLLCk)hl@aB%?@S5v5{9BX57<;krGvsM}8n#`G5XQQ9F zKibV>OY=e2qS&(FrJ1Vc9}a$_!#eT^9mrc@@U-BzI{cD1=1Jc^68#q9;{z)y`tJC` zqvK;Ivrf}o(8pFy)Z0-<79ejiUEZTf%^8{emD`KbOWbc7Pr&D{=G$|3JZ_SB4EXXa zXtGp5>!H`Ca3Rnmzw6k`THS?VcqJ+94sfN2Ubo=*7(Jb`fnNH;s*eFCTc5Fg*#084 zunb_(7@9_|sjddjVD)K`&D}*cq_PR1Jtalu!m(SNowfQV=DV=ByIaoQn3dHk;Ck>a zBL}QH)7AF%qK?#vjhVUpsBX^ZcHf_x-^^Tg9hi4_0`ztLx&t~<=upSV9(p!DU)mhn z`?LQjwEKfbfZE>Hd(VWb9J=WX?OfpZpLV?1ds7QgsQu;nO9B z0u)z1S9iliEg!LNexxIk#03T-Yv6yDGL3$AR(o;bg0N|4$&N~_EnlI%li8mk$9E@_ z*?cvB=wtpsfkV$nCK`AkMP#k}#Q^WIVjLF4bw>WC*$<)W(9Y+Stc5oLc2f+qq;j>- zZz%?b$zAqFwUrL;hYKIzehcs(VC1JWyx-C42b#+84;vjumH72t=Av$wP5vkc`X7Mf zr6%AhcPlzHIv|}zFzDk^1V0d5O;hW&*c#eDtf@i*`BhwEXg5;FNRn^H5(hjz(_*)A=ttkmc zE6EPN{6PD>W@W_(i=)92kO2<7FUj_{x_Sd18rXV16$9xzT{_J0$nE*6ZN9`i#0!~# zy|kOLToxNtukB2lhGCAW5yWvA^bm&bKTTBVKYV*3$^pOls}9+}o#;MU2BE9ibAA^y zG$Eb}di0rIc33NPt_6C3+xxKF$v~%;982gL|B3>tb=L0dJ3z$4A+|n80aO*$^MG+b zV#D>SzPY_cQb5OOxh?}(tbMO|zp2ofQCNT%elfZr&`hxlRWB3Dxu?5ef0kMRj#U>> z`NPs+&N6m1JWLmRGB_U$tS^NCKVh3VSF1EGWTgd#s^T&zTY5Wr;f#Ja9&(qHL9@?} z^?~NSIuy7?^UW5%EhaJ14R5br@^lpqw#Yz%#V!28KszSl{JH3J7XiD9C_MzsgplMS z)Zi=SvCLOE59s;%`AG(jcb7Vh-Fy4R2H?`N(i#U+tTYnJGBad%6ZWx`d=PV^Vrft@ zef_#M8GYeavEdNe$AxFuxN(EGn8ZiT9BV1T4B%{Q;@_y8G|+t{#;>Q#bYX6M5tJdu z?LO8OsZX?r>VrO^wftR9!h(cWWs3EROOD_JNr&6;zWWwn6TPb6TgdHbea^TO`Q9RpazZ`^Xz6E@O7WNvF;`N5-Xu3%mPTmsU|fmUAs<1I^;rixLM2WuP!WI|;W0dCj2sjW@=z?n?x7aC0iy{XLVY#I19q1#k<;}%yfsu!{ zsZhB>Rw=llGdp7yTz!Z^?^WP%Y*B1e)ENrvl1^74^$I8^#T49Nom z-q=*Dm{8Q3_Ddqjq`Ek9i$k`TL2{q(4zd}4)#213smn*Tt z@>crNP6X82WYwm*Z;-eH&8~aHCu!Gx<~&!1j}6V&L}XobMMOj#2SqMXdj>+ix_t@; z?*SGMoGbO5ZC9aBV{D|4@Y@!ocQcHx2#LBaw-#jxV=#U$j+Q^cqTH&-v+4tpPn<`f&umP9I!yd>``fzI5Sp)?6vLpF5i?yfxj z>*XTM?%1AU?|rL|hg<1Lo5)sIb>-vs*UEjryoVMHLe?VBL@@MGLPC&Pv~5IzUM*f0 z)V15W-i7)KGR(-tG$Quyd&TqlVfAYZtFP&r6+FlYE|x8iVC87OxtFUrO(qFQk-G}f zH2M_lx+C1zpNC^U)^Y(@LK+eehs(~(u-LSCD_VCEMyFD`q?a~jSDU(m`s3Mdt*2F@mgAQL&$gC}>(hak-}r6#*IyAicC zP2zB<7V1iJmxe+*zC7ft?*q!7Z&(y(^eNTG<+$%#846n-i8X~GC-9<1IC2C|tVTqv zL_oqHos4;isVZoV4%;{1N-L`K`yEd?N9*_5@i%J*7lHmqg2iZ~A>3XPsz z3yxdiJYGAxXk~tRBo4d(@&*x~UWBFsN?fM3pY3mKItlIEc z0F1X0>X^UoUrJS(xqEY)P1N#~!CzD8#*he$^wT4rpZmM9Uq$rQiMHPOpFC738HJ9CtXG1QEzEXOg=DtB3{x^2UR zlY+>Rq(rGi+-UDEWQ~PAKpe=LKfV-yRJ%c|X@^jp8+f8^am_Ho)cA=#)OV*D7#OUW zND<)9M}|FcG4F+0;k-c|v*QWH8KGgk6CuESlZzJ#?M4}GgFo_P0kqag1poXdEQ+~# z{_NVz--+`2%rqjZro`$nYtlb7POsKF;E~GKp5OWIWbX&r$wgqSP2d6gVPl?YYKQI+ zRm)H+Lc%g0Z*K@iR|=Cyd(gs# zXHA?hgg>t3tpbIUL^P6Q9r>LqrUR3$!+y&C{#ErFQ=q)k(4s^T}g{iMf|+x z)-bcBWh*tEMnU2-o%ZIoZ!Rh7jb>OPh;iC%M^IY0f?QP`IZ(0vJ^5n9k}k0V==KsV zRmG`ZNiRguKrsE&o#^r0W&{{{q`bTw>3n>DnsYTJapJ?I+Qv)_VdeTd;LTkg!R#vN zVXCA0?km9z9hgJAsICny-a^)5niXe#L>N{K6&L-SMGT?pSBNCpkPiUz zjfvNNN<@8ZCZbJK{5Gba{(Q_xU2RXgN1Ln-|1l8Fly~SbKpQ5`sA_>iFX@t$edmq7 z#E>4IFg+Dd_O=KOeSMpy%AZli5trI9to(A#%IT9=L}&OMXXRKT2=sFwepC#}Y<`j* zCcLWU;&lL4B06~k*bgg=a%q+`Zer+ll6|M1-(T}#8V{M=FGeq%&)t>`iw$AvN^;yC zdj&6ruOK$NX0<+-&KmtTXcr6{CTpLVR)uKSzQsaFB*^ zYcw?7zA8QE-Cs4<{i9Lk@i%bzo!Hpu+xv{=hCQjL6@o+TQH-RAmYE+I)$%2NS|h^j zdRZfr%XzQTO#7t!VZkL86{?kxR(a=IOp$mg`5lAif+9xI7H2as26uhM%gBK<#pc?q zPYuh1Xm6$t+o^8wDT0bMj7Z^0c_ub$j@5Qd_Tkrk5ou~<a&Jmy2oB5rWm#RO)EDY=}7OpJaEhwi=HxM82MZ{xQxwtg6hu`vmO zIwN@F?vatACkwmml|)jPh~7>QDq9E}yZ%R$iyzuU5kX`SvjvFZOS3xWy6cCw*NTeh z-<6AF_h+lR1}J8a<#u$+v)_$Zz_bOI4;f)L_kyb}aUR0d+3QbvnyJ)mZms!h)f@9;8fsneXv~O2;AF}2K(0T60lVJSo z1nJ9mk9(l+H1$~@k>~GOxO$q}(HdJYRsX1%3ZA8wU06BWb&fS;qo~qRw(D^0-p3#X$M|ra?7(ZSgj2(D2J?LY$v9_jnF_WWW(M=L6P~CKz za)=4}_~<4UwJoc=QVIY7;=6NXY z&DT%50n9#FDW%|<>+chTzLWI}rkM)+6|j3gFHY{uW>w}hTwIBfE zU1t9iYxR{jx53I%SMk@DH)V}Dw#0~4N5ou!N+wzw8Yy&)lOs9nM@e@@U$Mw0R*z5K zkKaZZ63P~7iq6eg?Cn|-o`FgbZUb5}u1JX%t>)zAYAnO7DAaU5tBW%tyc*yVClWpu z2DIHVI{D##1(0$2%Ak6a%QR&de@~Li`KdV$zvb)$&t2ma;X$@rr!bKd*bWkG^(1+; ztjE%$zQ>z*n)H|X#oInaV$7@EO#!p7%dR1!zM8#oZ)v=W$TIwXeENK&24KQYZ0RuQ zFbER2Cea8z$(Y%50UIEU&tzYYbToYxd_NY>-oE7{g)pzP>0RK+#XXGZ!Z@Y(>GbE2 z)96RA>6;E9JTb+BCR+$t-2rxuHM!4y@Ir`g3j>U&WJV2a_?xOh*QyA?@%zoq&0c`* zi0#HbVMlaj*W~->CU%eV;@9}_7F^ao0N}%qd&eI|i`Tc75RZLqwIuSt*kAK6TzNg> z-0t#*BzV%hNts@`tu7u4y4TtX?7~IU&#D&<=!-qpdkX+e$mMh+IqfS)iW%hCYWQZ{ zQ+PI1jAyKS`2c%cl41(3kH;P}{B2mV%XA}hU4@slrvM|-kkN^xZ?VI)qSr%JYV%Y4 zp}{_3tHHJdsUw(?c`^^;EiD*y)72ik6*4X^7eN_M)H#y;P$1+R;cO<-RK3wbWSIX3^2v z0w{{#UR%ssN`g$op7|Vy7!uD!KBfx{9iB+e-vr9Kp8~awUxgYBa%Yq+*GY8V15W-$ z>IeFE>0Vy|7m3SyrMd@@2UCg4Aztue6%@wT#5iwP!J70FnltpU#($q<-gfu2-$0|@lAcc z88|<2K6%Kd+vIgk7$z??HEnQhq}zDTC|i;WB!G*CjPM1NgkDx`k#f#eOq+TNQ1g0= z1P;I_SP1`F{|1L% z|A)D^42!be-iPTBky5%85L5&ur9puq1f(P+rMm{CQ$?jDBqbz8x+MohrMnxY8M=lT zX8zZ>_w(%EqkDgOKfPb(m^qZW@42sat!u4wt#zK$@i_cf1LYe$B)o6qrl?W8z`~No zQk0d^dOm>C-u1I3_47kEZu|B1pY6ui0FJ)N)e(C~N1>l^n=E6$1|Z0NteG`C2Uw%C zB&7yo0dQ*EEet#9Sq?z91%#hCvBlAU`tr)Bo(0G9UYu%WU%}xiKwNQ@(T1m>;NsNn z7L`t7sjj>2*(TNGm~YzO&Ishknv~!qzh6w7*|ST3oIu#mOZu^IWjno5OY60KGneF- zxg}T7zgyOY%(lu_^!HB#6v17Wh7qvC(UheZybTYGc91Jcyq+!ll+_MX21gtIxd;V_ z2lJ|}AhUH7C=)!%5gB$$IG`)pLgzrfh?z#QZp!}r$-Qn|gNO4y2fp)btTjwgS~>lM z?P_=Y1u$LfI>2>$?7rT9YBW5$P4pt0j4gbnYmg;USAXBw=ZZDIiP(f#+~l{(!lOnvLXnjj%C(BRgj$*?1`o26sO`>+qfZI8=?9O0fqQewnhGD7O6qbYkZS# zyY2_bN~ES$AfaQxC*ukE-t${yt}_1*35y5r@ibuHAd)BwP^4zB9Zg8F(0^qPfm`}+6Qo=0Dy7euo_3b--A4UT>m1=A#rKMmC z$`{F?%lAK#j4khW@Hxd~iyA&gy3cBwc9#)ZLff0`{O<=O4itg%J&FDo!z&>ZgaB$?b2iOwQ$|;7ygh zu{>N4%;%zzuC1iPS`8}_+XMT;O3KQ&qMv`}dQ^nHtOGk85p^|sWH(D+yJ;Zxz3}-> zcI`sYT-|bJVwh|qGnjo@F<7ZVhahur^c4bb- zSzkg@Fe9u}<{h1#mzf;iC8UHzn~3Od&rLRXX-+&eUQDZw-;>z9EpMS5!nnOAyCW5- zp6;rC^5zB;)mSda9sN5Y8MrbW|BCGkq(AUhfl@;j_p_vTKb<>Ti;fxPwl6bnmSn9k zStAQz$pm-2o2p@Ls*yrjkk0Rw58=Y@R_2DTpUNcmf2x;PIJg%Uv|d8ad3}(NoHyvp zv{&a(rUtDi$3%vkCU*j#OUF;$f@=(B3p`7rx=23NoeH{KH=wV%|di$Suwcp9sBn{--VQ!_y`m6861hl1@TA+|h*Z?SGXNJVz4 z`TnGOd0lr{Ud7wTX9-9k)#!{xA7apJ$BTl4jn*_TO?7F5oLI5VzRV8Os4sKLY#**j zC%i6Z^WWO_@yYtYn{46*_&zVPvs_e>Wz}W5#lurwqBG~i9=e#r>zB{!WqO&K`f$yxc`cHgVrvvKLj*8 zm68pRI_* z3r-v-;_*}PD0b+zqM`K+@F?aT;z1uiK44Kd;q6zS9xF} zTC`m9)-7dntr`Jch5&~`pP6l3GI5i~+S+uq`Mk{CgEIYk9-EUE*MMB3q$&`bLjg>- z*B1x}fH?_*T6O()?=81iDxhB)tTRLK-- zT?h8*dC|b!T*>weUl1KW>|YTR72QY=d3Q^v6M+6=%aYgKNFSW>9j+*;2%Kb{U#sly zlWp-im>IO=KiZfu`zcW^)hf6XmD_UU<7T7BCh-jTi{_sc(tCWl=^b^frZ)bIY%Fy9 z`_{V#f+f#?#V^Ym&8W5=gS6)R_dHa3COTh!zy6>L9j@c%-eefoRO{U&>NmaVF%b}V zZQRMb(b3LaVsJv-sR$7;J?F3~N4`2xEidw7K7cn_!j`=Kwe-k4ZqQ*zNlV95)8MOD z*5K&1tIhJmn`5QV0W(MP5R#DtgB?Jf5^{(~&KTf}w4LXx;Wuxi7n-e}B4bB_QtPRh zxQZ5H4B|%h-o@Q4xZCNkQa-Cuw6b_PPG#lT%%LK$v3Bnv5ShIYF++i@vAQOZ$rycf zo9Nn|-1Lc8NHWe_2R^q?Dz0?APN~>mir=_zC+GS}@19J56@g4k?ssZ_iEnvXJ_X-J zStS31XD?@g?uPwc!yh7`;C5Me8iyNO;5DKqJKIw}ggy4&Vy5nrvlV1XlgUB;iad&C z^-HBkoc}6B3ZLo2`YXuC;(4Vir$B-R@4KHW8F~VJ2nil;L~f5|ul&B;_;HN%IjFe?ok0@8$ ztkk}i8yfNBPQkf+d1D9_KIy|XczK^a-F9A8u7VYaKfnIh57S7u_=K5x`9wE9fb7P#H2=7Ud)lX3~RPE{uEu^ZC0qHC|I;i;+j#R zqH1>F$C~@@kdSRbluepOCB8b#qlN(9XOen3PkE5y7!*GE$t;HV$iw^O*J7XoKS$EA zCGl1Tty!Xq?UZDsoT|1vvsR`0U!PlVU)gLKeE#);-5nFDJpKk~CCq;oj;<7z@#XZs zq@Ua#CVw!wipYJBp$t-gKtA}%{K89TH$D=?mlDK zdM=yQ437;lRP?Brkr%)koxn2nm9$l7JzpFN^re+KmRuzChCY_Q7nr1nEwa7wPKzB+ z<1Pv7#A||GE{2sB2hU+tzv4Ktj_>Gt;jNh_9V?po(FjT@{W+;u6GXV)mq`y6DvUJA zPU(t-i_X$t`mJQJ^h^#5OvJ@oM4cL^$2h789Y;l@M~8gUqrvccHko_lgAY6XNxT1r zLl9u>V38p4156t{H*HWw)!}keBVmNqCFW?^@VCZ><%1tTc{emv9(;fEn&+vz ztv(%PM$X4wr+D@}x=BV)7Rpl6AR_?-vWXNsa-$(Ek5H`h{;!nxo0}x)-wLsVEtTi~ zwF!5?yOBk|$6p0zZ6fu?*&k2EKp?*$%AKwoZW~N91%G@*6;iSWpWObIvKpt|aijc( zpr&I>J9?AOT46)A{ADNInynd6l|tnEd-@KWGXal|nC@|b5#^ZZwCkd^{~lod`*{9k zBMX?RkR~$Y)aT7Xk)uiiCxVxtRh`TNXCsq+`j+=Y9#mX8h8W;Y!O`U3~V zO&atJuX#8$K(6ZH$Yy^O1_lf#d7gZ zrOOEOXOce2c#ciF%$91)+eff>_~h&P=HbhlX55*%`QYs4{&o6;F}?@)X!Mqq<6{=J z^wMLl!Zn9-?umTUPfymPn6qs-OQw%I7xDe#ZQCa)qUT4@5&*l%>SCIH_KGd4hO9~c zuf6g&e5BinnHE@jhQ3gIuB!t%@ftpn8d^w5A4#hYd3Z?cFc}`5O>aKPNAB@@4*VqC~l$cMCckyAQ$6kz)&_-*p9Qut`9mN_Y;rES!R2HRZfwo~9hw#yqEa z@>TQ9e0vrbu%U3s46lu``e?d^9pK|o$}AjoheN4WbcK@kWr+oRBm_=x8%NAyLGCiZ zi5w@7b>N4H=Y&)_`l1eZbfsufEXc3lDV%p&Xi<;Y(V49E@kewQkI4wcYlcIQzIz5DL&qd(Z%mim8~4Nec*_T6{R zlnP3tMlJjo8n$LU@Ms=leNEt{?$?ki`XK4`sp6aX%Zx#nfQ0v(@G=JJqpXWj1m*|J zrOMI<4V#62q4!3xuIZUFvN+td+PpIjMk?>X@ZWzlEm8LKYh*o^+NQ#OrycNGTVn6? z`;U^hnV1YI<>@M91vVqrSLguc0@Opv%`T;~>KQ$TsEeuxrR3@Ed~e@A^Mf z?s}Ln(w-hON*36;6tt?uR!x4@p{l+*d^l!=t-oO0g&bEFZ)Qeww+-zM2R0QGW1YW$ zmU$_Pgd;45JP!2b<&gLIC768;h~|}h%#qWs6n74~2#PKyba5Z#JSN@pbv0(t*HHOGIRa6-h#7C)lft3GwKki{m2MUvo3lfH+6o^& z@KBbZe<(zFHsC3I?VN~Ju5y4qScV>%9wvvYDVlsG7VFm_NGblZ-8E?jRGO$?@AR|z zw~XRYEq(PP&ga*cPfu)w`{YRdRV>*#ITgM2KY2D9ot@=36}@O$ykMUtY_p0;7)2nS ztWY2!FzR}(Cz_>db&rZw7*3AZevnI_q~DQL5TwZX;eY-xg7Exs$j}Yj+XmAlCM%C55@vv@kcIX_^4R*IjNE2-R&wk66mqNUG_*(v_49C zzWD9Y-VY_!iCH0v4)WA1AJTV!`Tu`<(94N+AQ00Oep2~lW7|4kR*xvFylR%2$@>G$ znEvkl%ZgZQ_nj4#<;LBQLTw@S!*1-dOPFX?Uc;NA|1*dTa#~HLPZJC zQ6!f1uee)cS-K`k;8J9PVzg0scEa#ugEh|lg2+=($x*(K5 zLYsl`w>lJCpH*htX}v{p0PG<&(l_;Qi+(5;D7DFTaB3z#e|lT~t1tvd z-kM@KVLb0m%ADHFi>{W#1inc|eBdTwt1MWEL*U~;dXW7N(#b6rR*0!fD&2#de_r~+e zN{?|12Ldz};{}%mH*Ss_dex2-=7i`P2R*5*7W9DgOG|>dZ^m#Ug08BBH;*+Lia)+2 zJoc=jIpewcIJZyd%ABR-EE;IrbDsk#2Al3Sw}oG^GG@<~Lh&mLe}MF>6pHN$>QtCv z5KfG5Yc6g%jjsckx@;bBgMmfe&BqCw1)gYev*hO(BGGml2RKj=Be&N6;73UkA|SL zOf9^{uVhKU`qRYxPAgqNA5bF6W`kGL?>aOg~N7lI?LrXef=^&J=FmnRfK%{9$k2{H6`g?$>D6DX^P9ktT@*w1z6jQ zS#kLfdutz7N$}4MNpyeVM)OIG7`nqpi;VYsd_@qmZYfU!F9qUdJ}L01FL>r_v+Oi! zGTQm2CK6(HRlZ^{==Y!{}v#Ls|3#QHWoaE{{9EJxbX zKa)`N2!)vs%-w!clhoNc!CE4eILFvtvq_s7TeONN73ahT@#6j@0K- z7yTG{IxS8EzBdpHhp3V_$o2&N)BfL|DzTWbh@W$)L{|8ZsaDvZ3GHt)O{rKWL$R#C z<}x|bHzO899bj8_sMu6gpJTFefYa)4 zw34+tbq916z6C8-)%F!4Xu-`WkLJ!2o(b`xEFWQh%VB5!t>yvCMQ3@`na2jjuZQ`6 zUNTPbj*1f}eGk>CaEh*H+^vM_A%%^p`?!_*1hOuK5ues1OA6o5*d*5K(7#UVKz}TS zM((S~$-P)U-OkKHG2Ddlu0R(9j+e$BY&X?^Z9vSPEf1j!W24dUs#`Tq-)_c7Cz*O5 ztn#p-2$P5jBi>D;A0WDdVP|{31|h(5ei`WBFPqAM!g&*ZmRThTDw-wnrj=`axEc4@ zl#R-fyqIl5{vbn89(h({SmtVRyri>|a)rg;T*ztQJzBeb-sw%xi824q9Q5{0frYk+ z=xB6S_p5R~VpM|G@mK}qua)?hL1CUjvL?7(PrjmI5!d`gp#EdqPiy&1SbVsFq+ror zQTI2ayOa2zK)=EQq?k_7Pgj@vpLU#}TgtD1DW5~|_5MlW2hgFs>;yVV-;bME{qNCu zXg?-kRG3}&e_r}2aiCb0&u_X{CrhU-qP7!a?dmMdUp(2&ohAa7kuB}+Uge#%M~87(!PPNwt*zX$pt4n1b6D$v2ey zFfJP1s=HIeb1nVP<(0-bXl2f+eKM(X@?^d003bFftGz~_grAK0roFmY?dNw{o=!@+g{AL&G1<#(QF&W2j z%4V1mV>;O#0U&w;SZnk*TO?8DRbl}xPxm9aPwU;MgerXcn8haN+PAq#QF%(q<-Uq? z0#Co*;kWsDk3X(bs_1COneE>W;y*Th=jA0V)RoFx*st>@z6J2xQlmFh-`Sm-FMZxz zziQ15{NbxW0x%f$llC1LJL6DJcxmi)qO$)X9649#UNBLSr}PEo13B+fBSz&V`THq9 z)SDm&{^>3yk_Bvj+IYtMzgtpJJXHp6#*%%0?F+n57P=0(;h#1WQq7bq-005d?)Pei zVd3oJKlVeG)gPyFSpImjowzwwlQL6pw}R|zZ!Z-=cfBMP(!9=O{_@hme1MG{l3wJ` zy+I@Z10JKtNh#tsPye^YoZJ*#SCa0GX(Tm_0m#Q-<7MI-`7cHE-zwpCmej5Ol6#*2 zDJB|S-d^IEwtOCqX!Jf5FoTdKyhmqo90y>rp|h~PqW)>8z-)`IOA|^LsJmt5Ua&r8 znGrZwi|Xd)uP!a1i%D6T3_)Tpq_1eQS=aa8f)Z(5v&Bz$0-GX0Yc3*U9D#mMF00gB z94V9ibw+3g9-5FG>6EkmV(S0wg@0`gX}_RGim=l*kznZ*s{UcA8+_%}Pvqn2SlQ5_ z%U9~MbdfbZerf_lL6O|0wui~{y(LY2r=LE3Dh5}3b`YSXL^IR~yxZFRu4U&Uv8D9{ z`5+3op^M@eC-INnwOHOv(tkui|^sgSNX9BL~=Md7LiUPPN6AR|8WJ(HyC1z zVy(Z~sqG=&P&r>nf-07&bCcLb@^HoL=zCbyLtFuO*aq^=V<#eES+LwciJ;NU*i%>MOCB@lF^2x(W3YloFDuyf zq`2_n(X-9B4QOzD%FRMU6xP!fzezkye_!Zqw*CQZyMM0Bzd!cVfxB1@pYs;;TDGMF zgC2+7?Lrp#FP1Yh9W9~$L9MH3k3IashkxwyU;BN$1US|W9e+n=HE9U?8F0x-H}Wl| z9n(Z~kN;Q1@-K&!QE{OkyI&7>B6K=UFFsV3Hj$s7s}+o<;y9L4r$(htHtHnA zY;3#{nzT=TfrXQ5AU*U>Td~=@FHOR7*mSn@k!b-HCBbDjNyPL%8kTfiU6XWl@Lf0fCFyc%qGgx#t7X z>p+w6ogU0TMDMi4b>;BHX`w2{jor@R=v`jn--F%ux2Yr! zQstQs{UCU!h;0}sCOXC!9k&C*t8X3M-OjXCv(E!q_Yfut-B`!It@0J_Qy#lTk?UC2 z_Ztn~{N|*-WQ9*M*}tgfwj}7?wkcOdqZa6)AoO+v!K%uMtR@4|sy?wWh)$ z^1YqGk0me;vm4_fMmY&h)dXB^SxjNiJa@%w44Gulk(EADcZZIj@SYF=KJdLAxHuQk{eAAH#Rr52UrHEYGWDd{% zkv{JX!^W7w;k$SB_K@Z#?RM=lyvmRln84y+hVj=Ka-;&j+Rts_({&Ps#fPYb>-qe9 zLxVj#$ZKMIb8DlV2JtD|)D`niu`lw$pClV+=zkiHR7U62q7M_2Sh8n*47Ll}CX#Pv z6dSjpVIGL{6n&{Nw%;~;{{Uu}t~MyH*`R-g$z@#SIlr&3b|KWMyf;O-moVudpx}m$TbU zkoRxDhkVr&V-^kDl|2&CEENMUZ9ZRrfP=_Q?8l;M{vLHA@u8WB&?+xjUL2+cS{B#H-I?h6AY9dbax)8r|jenXxDGAtK zrDRqqtQkQ8q~ls0$~31V(dBYBbh*YJ#dL&A5VAk3LBBJmC6&ad&;P{eg-uefv1&2h4H>kH+)VfJm#6aQZMaMKKV{_RdtPQZ`ixhp#hCf1s~IM1`RoI)I#9qt6jkba-L+&hMA5 z3lcHrc-P>3v$T?ZxEdj#CrBEMp%TVQV zH^q}7I&|4iH&LZqkW7eDUd=uvdGzO4Xxp}SVaww|C?FtpLnhA`05XEpoDLDn-0qVvKav;pxcjbby*PCj?- z7B*Uja%U_99r#{cGv?i<$2%zL3|~c;*bRoLh~Ip}Umq*$FyG(Kuc4ummR+`%lw{$d zqr-5{o#Y@olUs{C%xOVgWLk6DKh5i~Mr*~Ur#ZgiIG(%BrW}k+%7w^RuU$2o;pcB{ z-4DxqCd$oB``>>Bxiv1Zb@pRQ^W913l5YDv?h*z08Y^)8y9^}>ZNe3z(Sz*AC2M}= z?DHiz{GpWrW#Q{%6zJ3S_-Hzw-u4SLITndOyC2G*KFbYY7r*t-!|3ndq8OxlFTQbi zu658M<$;~Ymb1s>&9aj%G0jV?=Y-`5ZGPO0>tg}&P#lGoR~@Q~?)-xqUvefE51o)O z?&5!Xo8LF<|CfIa>b?DTvX&)1?^i};4aWh4d&EFX?X+QVP{Rs{r1rYBF|Bl`OTjX4 zY@&eX8tpi!wtMNPHvzS7p8ey6s1iq|)U*A`TO|#Ax{n^s8@P6GSk5+h-9|B1SoU1- zSR4FS3;t{ObgkPHr2pAaywC6c>~Hs0`L?;-qAM7P3#4f|83Rs_I;X*-2q#}QjdZQW z89&5K%XRI~>AYz`($+7{tyd&ZJ8c298x%Lr0iD>{n~15thH2*C7B|S_*4shHC#tQ# zA|25PX)xx!s8>)>&;u|6SYObJTo^Sz4UYkBzS#y)sloz%rPta zGH}I9UZXb=sKHO2gZ2}hXU6FpYkBD&9vkDPFUeRO>pV8=N=7z@+<`8eEBpD|TMpwZ zM{RYxKrYnMU`%ihj6|BHF6;7N5C?jmTU2V|Ka^~5=Ft`8-2D9n(wEI}e<@zAFqD58 z^CU9(iLE)CciUK;gdC@>X9LiF`=BRK!WobeJQf(82ea|9S!3;7j{EZ=WI{)K*WV$m z=*=xHh4RztXwrceyWu+Xf?F5S@my+lrfm|#qd%;=hpQRw;~@X=)2}?rS}zE+Ki5H6 z{3;t?hkx;)_o<;V^1S*Px~4G%3kYxY?W!<(FXdis+3R(D6WoMwI2U=lOP?Q$=> zJfYtoNwQAhW$7NcqQkFC-sZ|+JHp_ZJJ^JbCXWksKiu#Q1~+Ny#aA<3YWhHWl-yY$ z@YY(umktBXnr^3nv#EJtyFFlBp9K-d+jPgv%S&FA(m+w=a?@&N*w0+HL67x1+&Wa? zXsySlK0nw$83fVp>SjqJ zs(7;KI9b`jGdf-Lq-N)8C?jPP;3J}+>76^@dKQEc>Fx#hvaG<6Iy|h!c36A5z1Gd& zcM)9s$K9!qB&a6TDLk@zn+A6L!eJQ*FVd`lew*q!<8(?e!P9_g?I<<_F5HSgorkyY$ogY`87!QodQHA`@NAdA_p723im!-x99?j z;CL24=9{w>V{HwyQV9o^FeFSQ^kT}RYeuMrp{K4rsxdw zsx?te<7b?o4t|8PTneWbys41@#qoOa;)Ri;qhpa7=X>Y-T#pSgd=c3O&=e$skl8aM z&FZ(%`#-+JI$XN&QsBw{3ZWoTI@l-B`NNbsWG?AX%xJw#BMdKzO$Po_6f1k^>LTGO zndGs__5g(C5F>TI4Ft*esCkVx6rCp+_$|YWp5P8o>ODf7EG9N?{S4!+ULqi4-smXA z+Kz@W?^^byZQ3-8ohAb(|3p=Oh>*j?X|Agde->B~yrcbDW5AudhHSZV3&*CDW&O6S zt_C0oj7^vC*tK@fVa=ieao4T1R(~Wv8OkmgBf&f770TqH%XNCXits0F#!SOz;Qgq2 zpND+{8oqjc4o7qvm%0*s%146{;8wiR=X2=IrNzZ@Zwn#mY0>jjAj>`z2+bYeqtE8$ zIE;NZs%JH^?>&bZ^(Gz7ui!P$hTY!27b$Gj`@TOIpJc<0SFfD? zV=DJxw5izb$|gDhk`|~nt-SSD8&qF7d6{73oj+;;lS;~ev`vrp8vmb%&sU09BiN-r zh^@fv`ZAfBm%+3C@90g&C>CQ}+kAk!GOo9H(xt$a^Z>~WCQ^;*6cM-F8kYrm-qGtf zEE+J>5!D7Nf#WduqQ>k9Vq3#lPPNf;7vW8e5Yaxnc646W2;gkm4jnDjqk8pBCi4{It-5v$(qEHkx>I3zCU50hf=#Y!EC8%tN7Z(S8`@iHV|nbpzM$u zB+YtxTjanqA|f^aLn?8ZkYdW>OaS^knhqN@<~!k(tq*TELZXLnXAfG zc%z-p94nw$EveXcIb7%E!k@19-_eO8(bsjX{w{ye=>O7ug(KEq&b7*}baaSf-n+=L z1B$i!D&WSt;wiloNNCHR4n&(_($1BHglLpE%zFy7eiBYaCc7T?o)IGHu=eyI=b^}j z-J{o{Smu>=qd!RL1-Kk{=1QK3VYBOQ*_v4r?yq!>#KW){?>&Vlqs*Zo$@>CZ0iYHi3u z3$P;`B+m~t+1O?oHsW6C$)twF@NjZx9Z?Y!!wnw87r+6f2GO8N0^;Bd9jV)+qI4@E zg*K0PT0Gg<_9CFP_gvo4>B){MSjjXD9Y%JYM`T|d{PqeyA_8b(G71Y!%)zu4Kl!>L z13ZvhyfEu%GasDBAB-s0-nv-yU?J8EN<84H;V#maQsY^CQ?0LEk{|r>GsQ4Jb4Eu# zV>=SBi@sqeS%1mln<)`cukHhs5{hH_(R#Zj>sT7kF87afjrM{UsqA3~qQ-mzOCIx^Qz$HarX9jLz zYRYyaWSu$ktoBKbqdc{u5PYmen6k7)+wo*?DU$i)r%!xQlBXY|)sL%s+fFlVlCrbD zrRmpuI22yH8)f6ond(sLx}=ola}sH&NOie_oA7tw(7F#qyp=0XQPmg^3y?3@MVJPfnqoq-pd6v4o*wY4^q1^0F zL9LG>qF6`|0ZT2v2M&X7q8i3EnVHw>)s8|v-df+hK4Y#vtDkVP>ARMb5Z**zZPU$rT{aRCh9u%ga7LEjGp2vuHPQGBzik-W zUzqWdn7c@t_xcS8WD6eLGO1X~JEg5-Xdy-(lY~7#R)X!Sm(|>Ue*WOBaxCt?gV)f& zX~aY&uC#HP&>Vtc?)YWz=pBDR6=7k)S&)K!(vN+ADpYsse)6kBp zUQLYv1;W&heoSZ#!onf!M`~|zf3uHGJ%CLdAvXs+emOCIi_W;6)q$*@pWniI01E|u z!|r#6O@#nOVyb(q86k{@&?CTf<&hgHBF^@@9NqFuEXZ7UC3Gh@QFR6 z0#bngcv7wUDe0cmOx*}I!V2ej>~wQus>bOUoX}}oEKZ$9qkcpnA`Eb=+%maioz2!v zaK11yF@7n+^U#^`_bjXW8mKhz`%>{+^5#{+A4@vH`BZ0x%$2igB(2Xw(Ni505D+&B(ZKWAh#=jQLa zt=1g5367nE_{HAH?@)UmR9;0itg;v>t7P_%;lWLS=kWEL$$c;c$PbGA72ED#{SI>3 z_S|DHOGNq&ePsdsU+KXyM;!x^??L?4{2n9~#CF40roV?_8EnJ=e!4hk$#vNkVOj=3 zfMEQRv0IN|akxUE^-Ie~_#mb4x4}1m-gGM!tGFA-nLWT)dazwfVfLAh4EtPtz4MPR z@)L_ID>@46w1$kMn5j3`uD3xC_-(ktdUcGvz6~G%F_<7{Dl{hGb4z0}*&isIG=x%NO@XWemL|7< z3GM;LD3UqTJimyau4YvkMTzgd^4e)3jNoCvVMB_^L^kaM@kyxf&7Jf<`{F8I3Y(l| zL>QJmm76cD=)KTcfuE8kxsC~7>2I;NEbcwHB=Xhz0!_~uZ)$lEP!j=4J!YyROL5L- z@S(jr$G|O)=8bj9BnLq9B14>5k5N6JuB@ycXxqA{s>_5qX7)kAdp#c}d4M_f zqLS5fD!&Jcg62~2pKTr8kNSsY_m_LZih~iq?Jn>!@NR1;8RBb8?2Bs_Gi{62Ahqv_ zH3@B09i&=qnsJQLzmF@Jw&>#Vmh2oT?~p0&jHe}N@qN(eT=0EsQh%G5 zt=HYy=?o->nCRcv%0OnN=&;+$r0jJS=-SK7D(01df|Hnmo~@CvRaNhf?LzXD{hw@b`(IpH^>;YSK><^4Kehl98JugAQ0nG|C@Dwwb~DI>G?(vmn?i3h;0&t@I))3s z>hI&mID90xxNFZMa^Hd+!^glOG+VdA$8T<2g*xkY!V9xj?^L{!RN0F6>_jXVLR;m7 z$K>U+#k>t?de9QJ7G3dmdHM%#US72YVOzF!qKacwwyTzV-B*)pmsdBc#kM}TMrc3I zTsta-a?`r-@ul=ASjiP;%6F~L)sygiZZ4()wa6Yh8pF$x+Q%;$9GyYua-ojNcR;Xf zBIYjCsvyoG`|+?MYEY+<|nnTIjO066^BrQU`MfM zTa{UD-Y`TY;QL%VmXva@fpruN2bPE69kTOvbEyIM1Du5 zjfz?bc&)}i0l|RCjQPNiK)m!2T!M?G7VOR2=YZCbcvIpDdIg++SBBMhC=-v^RUS3- z?jB7!L1y7)%W2pd?w!d*SBZ_LJirXgP%KQZ1rt#pH6=sPfK>2e=cK^Z8@65TZtG`> zQ(h~nR$U5MlE=Whro0cgn*m<2*5TJ6Jm)(!l;ns$gQ4L}J{x7#g<6ZGbbMb6&I;X~ zSmBqRgXR#^k{jII2iXg(AA9vxx32V0(1WBU`*2OU$qg7){posv16^u9YsEeLDfX&p zP7>LvYLMH_?Z0{R#y&}zr&iV~(b_QWwA~Fa&1*TyWaiEJ1T|mSgRV)Hb0p%w^t^-+ z3*_L==SiLCb21uXkcc@RVNz?G`8)}!2ldbM1~6SVpq-<+PP{meWJBm+yifA1Fs;VI zLT3EY1B)tLRxz=u_;(aPy$!on#IVho68h&`LLBsNTy~Q(`+iZ4`bE@}sK-2PUAX0+ zog3kH8q{)IbB#XFp2!FPARS#Xp3gfT=p{ny7B*J$vXiin)x+W<7>9ZZT`qmAo0ZB9 z@^6$Eqc})};)Pxk+bC6F^5!|s31Cwd+Z0ffUm&dX%~Tm}CmzXm`lKCYKyy;Ki3CeY zH`(OpGc7uD1_t3jnr~fPUZy8*ss~UWRCn^xE&Th1KBE#I@S++5jwl$A5zMOh+PBPF zBV|wgp_;_^O@UC(Yi19%f}z!F0USFiQBtV!Xp`uyF+;DVcS1d~perUiI@^?3V8kwp zdzco(7`W0t+kRNgbL~b8kID>1JnQ4*_inxU&j7m!rj_diPoVh>3@_gM6=W;F;E3{@ zo}-}O@mqSQ6LWJa(OcUO(DTYD?k=?d{1S=xCL(t?8mlZCt6T)R)7rWxHvaC?i_Z@G zFp+#tXY@HDl*8I_Xkm3b;G}Uy$u`Xu7QT!cn~R9Ou5zKv`uMrKEo1`#+XqgHjav8o zQ&F=4j&%6jopVg}tHnGqw<>7E@pfC2MIpg((%JpgXS2kebF6Hs*GxNJJ9UkY*MrW+ z_gMi{Fyx*x8{1kB2+$(3tz0<{4Hr+4)dH1cku*ti^VVW>0D!G9Z6(OM;kUS*8Vyr3 z)q8)>R16>>KHC!K^=r1GtPVA(#+Yh2V!6{>yed3_TQ8HJ&EVYMA}&Bz9;t?a?;>0! z*=-^i4R!T~R9~k=e|uGbyfX0ztRYQEY2A4-5X62hk*sl0fk`_Ml`O&>fwT6lv6&d zD|W@T*Dd`(U{@U;uw8e?a=r>_sY8t5->mBK*IR3Nm2JEe#pF33bXDrM+zk9A{131A zmy?t(Al|Se1X}F~qyU-d1=t6Iz9fFm#k^)(F^@uFpCfxUc@yvQjI6xFc57cX`8Siz zbp(4D7*hJdVMlPpRgrBQ&M%;ytI=zJ8Q)ztkNYL90S1uTqYD)Zn#*Jr>dlDnx8e8pMp7^v;yif2XTcYQiFF=>PM89Z5^lj-c8A9fZ>$moN zyMlvp8h-aYd%*pl_&dmwYDWwU1X-8WT-D&$-X0pE0Z-|Q z#O_xrBXC|m1CYB{-A*>aydXYkOC=K+rtMI-$Nd?(;92jn`GyXOAlm{&?6Jt6Y9yW2 zdx~)Fl2Nv{M!-aV_Ca?gE2_w-QFN7X%i^&#>*;&65o8;V6UD*7q3_hK;>t_7^c+TR z*3E~K(L2zXTCzz+-Tkv)?(c_KSr<5m%_=+^=&r%rQxVX?kzk!s-=Z?KXw53l$=Nof zhudLKDP5EAXY0B-_Q{y0EDoQl}-QS5SsFCP<A_qexEuj1yqAv)E&;69Bs3v zfR|aX%6CkSY5I6y6x*I5XCZ6LM)Hph{P(T*i{hXz1}gJ54#&>JIJ1`FjqJzU4%43p z&Om4eburim)B5j({Z64`q+%zLVU+ z&DgbcDN#^S5j3oW-i;~GGoq8Y9t8C<7@Vh7EHL9~I)3T~#=;XBoEio&MXTOcGAYJH z5jRWo%BEbn2w<|`6clwu0WkWmXe{eMd`!$!z*X!R<_E@qU%!F$!hbfi_#H$?3levO zIrR4cJ{Buim0tAKoIL=}^(TlSN#OQ`YeaW-4BfSLHIRwM(Mx-8InivR{rHAv?(<9rj-cZVyxvU_ktVe>wkUBv zT0^S?A7FQJSiV5ifXi|wMqXiv$_o>Ft)%ebHp8|v<-+cl-z!@wc`z@PQ2hFi%g?X< zq{7qWHI4pEe?~07Fh}r0NixjxDeahENgyi_+yZz=5#f=-w{(i3nFXU&c-d?%%rW>O zJ?+tKX2rbZekun(jgNy7%O@-8QIhsh)0tbvpRf7$2#$>}u*rwt;kz(5T4G15;5>V< zKGLYnOJgqhLns}g`z!M=^ z)wd0bMn()s7EmyXHmMi+@tlhOA_^V>;J>Oqg!1xzMYQJC%^L~o=pjt@{49DtKv#o> zZj=0NrBXJY5FAqN?yUDCc4I+6}EGI5|nz?ohl=nM58Y259%HsEM zR5p>vj%@tF-n8R^$n5|01nivB7cv&#>sJngQlTAuO#LK`hPT+g1SDRP+*v-<2@;?eiN%6M2o--*HAcQGQ3AI0}$sP9SQZ|hDK8#D3%>1myN ziPT;@TPw(?g+b7^{E97-Xhebqz7M>py#%PZ3A_5eFwXt_`Lj5vxB2^mwL-SKAj%(= z)Pk%aVsW*`!rW!{v&Et813P?c$jkU7seq;|0KiLW%4lMj{n4oTw~H(_#$%V>?ucmY z;;HYB6-UjcN%*WcvIlM}iZZEDpB}Q8Zk>Z{_<-m0JdPUmN!3^+gRm;%ZjBUbH|pw| zLFp0(W;N-)FGvXDxaqeIhaeYes9OuDYzIvOPVhA#ACi`k=y$G?E@R2)$@ipr=#2XE z<#9XE*HD~@Fv3J0gEU^JpKRnCQ@bUJ9)fsy)D5uVzFfcZN^}RnNCfJ6|A)G zZ)*OBiGg2JnFdbCelRex{r177-CWgF{cS;;L3V~myti)c3mo$lg*8+*aPKj^8hc6? z#DdY1vA<(f!oX*)unMZ1Z}7fzRH;f!DeJR5$s_2qu$19??8hKvPxLTNd?J3x!@zw+ zdlo}pU+>1H zaBvAPRbt!K`e^ttsq>0#Nf3YZK}@@;DL~pdZEM4a$S?rzhiEp4kY^NNIT}t8b}}mc za2n5R%q$9u-JmwBLFg4zGdH?Ftf}@wLsOG&31HT%-iO96->yXOH-%&ie9}=BrN0kh za@T4sM+w&H^ra&gIX{5@Y(jePOTGWofWFdL`wjvUC-+JI@<)z4pvteXezL!}_i(J( za8p^nC4tjjUF#}c3rpOY$cIPm1K+%I_L~*fnb<0D!WZtD68F}}tVqQmq}B)Zo!lR{ z0J9>~(V+NMvyUE@&9&-FvmE}YUN;(ei=XbA!%(@al#cv?SWJ0)cjDbv%zRLhk9Db$ z{7xta2CZ;gQOydw*hS=zNHmLlJ-i?+-Aicx!%Sz(X}ieo;=$Tb+W~y6FPvFIv=Bj) zqJMZcR{nd|_tz2gzn9}bMd$y&y|X1j{=eBz{66UpQ{h`*1|aZs^>bP0l+XkK<(> directory, which -defaults to `./data`. - -All settings associated with monitoring in {es} must be set in either the -`elasticsearch.yml` file for each node or, where possible, in the dynamic -cluster settings. For more information, see <>. - -[[es-monitoring-overview]] -{es} is also at the core of monitoring across the {stack}. In all cases, -monitoring documents are just ordinary JSON documents built by monitoring each -{stack} component at some collection interval, then indexing those -documents into the monitoring cluster. - -Each component in the stack is responsible for monitoring itself and then -forwarding those documents to the {es} production cluster for both routing and -indexing (storage). The routing and indexing processes in {es} are handled by -what are called <> and -<>. - -Alternatively, in 6.4 and later, you can use {metricbeat} to collect -monitoring data about {kib} and ship it directly to the monitoring cluster, -rather than routing it through the production cluster. In 6.5 and later, you -can also use {metricbeat} to collect and ship data about {es}. - -You can view monitoring data from {kib} where it’s easy to spot issues at a -glance or delve into the system behavior over time to diagnose operational -issues. In addition to the built-in status warnings, you can also set up custom -alerts based on the data in the monitoring indices. - -For an introduction to monitoring your {stack}, including Beats, {ls}, and {kib}, -see {stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> -- -include::collectors.asciidoc[] -include::exporters.asciidoc[] +include::overview.asciidoc[] + +include::how-monitoring-works.asciidoc[] + +include::collecting-monitoring-data.asciidoc[] include::pause-export.asciidoc[] +include::configuring-metricbeat.asciidoc[] + +include::configuring-filebeat.asciidoc[] + +include::indices.asciidoc[] + +include::collectors.asciidoc[] + +include::exporters.asciidoc[] +include::local-export.asciidoc[] +include::http-export.asciidoc[] diff --git a/docs/reference/monitoring/indices.asciidoc b/docs/reference/monitoring/indices.asciidoc index 6586a945b5d..c6432ea2e7f 100644 --- a/docs/reference/monitoring/indices.asciidoc +++ b/docs/reference/monitoring/indices.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[config-monitoring-indices]] -=== Configuring indices for monitoring +== Configuring indices for monitoring <> are used to configure the indices that store the monitoring data collected from a cluster. diff --git a/docs/reference/monitoring/local-export.asciidoc b/docs/reference/monitoring/local-export.asciidoc index 821a6b1fc0e..8723b226ca7 100644 --- a/docs/reference/monitoring/local-export.asciidoc +++ b/docs/reference/monitoring/local-export.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[local-exporter]] -=== Local Exporters +=== Local exporters The `local` exporter is the default exporter in {monitoring}. It routes data back into the same (local) cluster. In other words, it uses the production @@ -56,7 +56,7 @@ For more information about the configuration options for the `local` exporter, see <>. [[local-exporter-cleaner]] -==== Cleaner Service +==== Cleaner service One feature of the `local` exporter, which is not present in the `http` exporter, is a cleaner service. The cleaner service runs once per day at 01:00 AM UTC on diff --git a/docs/reference/monitoring/overview.asciidoc b/docs/reference/monitoring/overview.asciidoc new file mode 100644 index 00000000000..e4f58e4060c --- /dev/null +++ b/docs/reference/monitoring/overview.asciidoc @@ -0,0 +1,39 @@ +[role="xpack"] +[[monitoring-overview]] +== Monitoring overview +++++ +Overview +++++ + +When you monitor a cluster, you collect data from the {es} nodes, {ls} nodes, +{kib} instances, and Beats in your cluster. You can also +<>. + +All of the monitoring metrics are stored in {es}, which enables you to easily +visualize the data from {kib}. By default, the monitoring metrics are stored in +local indices. + +TIP: In production, we strongly recommend using a separate monitoring cluster. +Using a separate monitoring cluster prevents production cluster outages from +impacting your ability to access your monitoring data. It also prevents +monitoring activities from impacting the performance of your production cluster. +For the same reason, we also recommend using a separate {kib} instance for +viewing the monitoring data. + +You can use {metricbeat} to collect and ship data about {es}, {kib}, {ls}, and +Beats directly to your monitoring cluster rather than routing it through your +production cluster. The following diagram illustrates a typical monitoring +architecture with separate production and monitoring clusters: + +image::images/architecture.png[A typical monitoring environment] + +If you have the appropriate license, you can route data from multiple production +clusters to a single monitoring cluster. For more information about the +differences between various subscription levels, see: +https://www.elastic.co/subscriptions + +IMPORTANT: In general, the monitoring cluster and the clusters being monitored +should be running the same version of the stack. A monitoring cluster cannot +monitor production clusters running newer versions of the stack. If necessary, +the monitoring cluster can monitor production clusters running the latest +release of the previous major version. diff --git a/docs/reference/monitoring/pause-export.asciidoc b/docs/reference/monitoring/pause-export.asciidoc index 7a8bc664ffc..6cf02a1f240 100644 --- a/docs/reference/monitoring/pause-export.asciidoc +++ b/docs/reference/monitoring/pause-export.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[pause-export]] -== Pausing Data Collection +=== Pausing data collection To stop generating {monitoring} data in {es}, disable data collection: diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 8b459a8f098..0824f5068fc 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -885,3 +885,14 @@ See <>. ==== Pivot objects See <>. + +[role="exclude",id="configuring-monitoring"] +=== Configuring monitoring + +See <>. + +[role="exclude",id="es-monitoring"] +=== Monitoring {es} + +See <>. + From 41ee8aa6fcdf4713f31b98b866287b28a4caafa7 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 24 Sep 2019 11:40:21 -0700 Subject: [PATCH 31/94] Reject regexp queries on the _index field. (#46945) We speculatively added support for `regexp` queries on the `_index` field in #34089 (this functionality was not actually requested by a user). Supporting regex logic adds complexity to the `_index` field for not much gain, so we would like to remove it. From an end-to-end test it turns out this functionality never even worked in the first place because of an error in how regex flags were interpreted! For this reason, we can remove support for `regexp` on `_index` without a deprecation period. Relates to #46640. --- .../index/mapper/IndexFieldMapper.java | 16 -------------- .../index/mapper/IndexFieldTypeTests.java | 22 +++++++++++-------- 2 files changed, 13 insertions(+), 25 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 4e690640135..c3693f4ded9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -28,7 +28,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -38,7 +37,6 @@ import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.regex.Pattern; public class IndexFieldMapper extends MetadataFieldMapper { @@ -175,20 +173,6 @@ public class IndexFieldMapper extends MetadataFieldMapper { } } - @Override - public Query regexpQuery(String value, int flags, int maxDeterminizedStates, - MultiTermQuery.RewriteMethod method, QueryShardContext context) { - String indexName = context.getFullyQualifiedIndex().getName(); - Pattern pattern = Regex.compile(value, Regex.flagsToString(flags)); - - if (pattern.matcher(indexName).matches()) { - return Queries.newMatchAllQuery(); - } else { - return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() - + "] doesn't match the provided pattern [" + value + "]."); - } - } - @Override public Query wildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index 11b365ff16e..1b100fb0872 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -27,9 +27,12 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import java.util.function.Predicate; +import static org.hamcrest.Matchers.containsString; + public class IndexFieldTypeTests extends FieldTypeTestCase { @Override @@ -46,15 +49,6 @@ public class IndexFieldTypeTests extends FieldTypeTestCase { assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("other_ind", null, createContext())); } - public void testRegexpQuery() { - MappedFieldType ft = createDefaultFieldType(); - ft.setName("field"); - ft.setIndexOptions(IndexOptions.DOCS); - - assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 10, null, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("ind?x", 0, 10, null, createContext())); - } - public void testWildcardQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); @@ -64,6 +58,16 @@ public class IndexFieldTypeTests extends FieldTypeTestCase { assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("other_ind*x", null, createContext())); } + public void testRegexpQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + QueryShardException e = expectThrows(QueryShardException.class, () -> + assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 10, null, createContext()))); + assertThat(e.getMessage(), containsString("Can only use regexp queries on keyword and text fields")); + } + private QueryShardContext createContext() { IndexMetaData indexMetaData = IndexMetaData.builder("index") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) From 4faba9cbbfa67d1b15dd8e7f93be6d255b4b801f Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 24 Sep 2019 12:38:23 -0700 Subject: [PATCH 32/94] Mention ip fields in the global ordinals docs. (#47045) Although they do not support eager_global_ordinals, ip fields use global ordinals for certain aggregations like 'terms'. This commit also corrects a reference to the sampler aggregation. --- .../reference/mapping/params/eager-global-ordinals.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/mapping/params/eager-global-ordinals.asciidoc b/docs/reference/mapping/params/eager-global-ordinals.asciidoc index 49c5e5fce28..a7b6d748bd1 100644 --- a/docs/reference/mapping/params/eager-global-ordinals.asciidoc +++ b/docs/reference/mapping/params/eager-global-ordinals.asciidoc @@ -26,9 +26,9 @@ ordinal for each segment. Global ordinals are used if a search contains any of the following components: -* Bucket aggregations on `keyword` and `flattened` fields. This includes -`terms` aggregations as mentioned above, as well as `composite`, `sampler`, -and `significant_terms`. +* Certain bucket aggregations on `keyword`, `ip`, and `flattened` fields. This +includes `terms` aggregations as mentioned above, as well as `composite`, +`diversified_sampler`, and `significant_terms`. * Bucket aggregations on `text` fields that require <> to be enabled. * Operations on parent and child documents from a `join` field, including From 61eef2fd3139791e61889c9aee18493aeec873ed Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 25 Sep 2019 02:01:37 -0400 Subject: [PATCH 33/94] [DOCS] Add multi-level nested query example to nested query docs (#46986) --- .../reference/query-dsl/nested-query.asciidoc | 167 +++++++++++++++++- 1 file changed, 163 insertions(+), 4 deletions(-) diff --git a/docs/reference/query-dsl/nested-query.asciidoc b/docs/reference/query-dsl/nested-query.asciidoc index 619fe8c3dda..ff4256bdb2a 100644 --- a/docs/reference/query-dsl/nested-query.asciidoc +++ b/docs/reference/query-dsl/nested-query.asciidoc @@ -23,7 +23,7 @@ mapping. For example: ---- PUT /my_index { - "mappings": { + "mappings" : { "properties" : { "obj1" : { "type" : "nested" @@ -33,7 +33,6 @@ PUT /my_index } ---- -// TESTSETUP [[nested-query-ex-query]] ===== Example query @@ -42,7 +41,7 @@ PUT /my_index ---- GET /my_index/_search { - "query": { + "query": { "nested" : { "path" : "obj1", "query" : { @@ -58,6 +57,7 @@ GET /my_index/_search } } ---- +// TEST[continued] [[nested-top-level-params]] ==== Top-level parameters for `nested` @@ -78,6 +78,8 @@ such as `obj1.name`. Multi-level nesting is automatically supported, and detected, resulting in an inner nested query to automatically match the relevant nesting level, rather than root, if it exists within another nested query. + +See <> for an example. -- `score_mode`:: @@ -114,4 +116,161 @@ If `false`, {es} returns an error if the `path` is an unmapped field. You can use this parameter to query multiple indices that may not contain the field `path`. --- \ No newline at end of file +-- + +[[nested-query-notes]] +==== Notes + +[[multi-level-nested-query-ex]] +===== Multi-level nested queries + +To see how multi-level nested queries work, +first you need an index that has nested fields. +The following request defines mappings for the `drivers` index +with nested `make` and `model` fields. + +[source,console] +---- +PUT /drivers +{ + "mappings" : { + "properties" : { + "driver" : { + "type" : "nested", + "properties" : { + "last_name" : { + "type" : "text" + }, + "vehicle" : { + "type" : "nested", + "properties" : { + "make" : { + "type" : "text" + }, + "model" : { + "type" : "text" + } + } + } + } + } + } + } +} +---- + +Next, index some documents to the `drivers` index. + +[source,console] +---- +PUT /drivers/_doc/1 +{ + "driver" : { + "last_name" : "McQueen", + "vehicle" : [ + { + "make" : "Powell Motors", + "model" : "Canyonero" + }, + { + "make" : "Miller-Meteor", + "model" : "Ecto-1" + } + ] + } +} + +PUT /drivers/_doc/2?refresh +{ + "driver" : { + "last_name" : "Hudson", + "vehicle" : [ + { + "make" : "Mifune", + "model" : "Mach Five" + }, + { + "make" : "Miller-Meteor", + "model" : "Ecto-1" + } + ] + } +} +---- +// TEST[continued] + +You can now use a multi-level nested query +to match documents based on the `make` and `model` fields. + +[source,console] +---- +GET /drivers/_search +{ + "query" : { + "nested" : { + "path" : "driver", + "query" : { + "nested" : { + "path" : "driver.vehicle", + "query" : { + "bool" : { + "must" : [ + { "match" : { "driver.vehicle.make" : "Powell Motors" } }, + { "match" : { "driver.vehicle.model" : "Canyonero" } } + ] + } + } + } + } + } + } +} +---- +// TEST[continued] + +The search request returns the following response: + +[source,console-result] +---- +{ + "took" : 5, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 + }, + "hits" : { + "total" : { + "value" : 1, + "relation" : "eq" + }, + "max_score" : 3.7349272, + "hits" : [ + { + "_index" : "drivers", + "_type" : "_doc", + "_id" : "1", + "_score" : 3.7349272, + "_source" : { + "driver" : { + "last_name" : "McQueen", + "vehicle" : [ + { + "make" : "Powell Motors", + "model" : "Canyonero" + }, + { + "make" : "Miller-Meteor", + "model" : "Ecto-1" + } + ] + } + } + } + ] + } +} +---- +// TESTRESPONSE[s/"took" : 5/"took": $body.took/] From e974f178b57833c8cf6b02f440abd7a080859783 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 25 Sep 2019 07:38:17 +0200 Subject: [PATCH 34/94] [Transform] rename data frame transform to transform for hlrc client (#46933) rename data frame transform to transform for hlrc --- .../client/RestHighLevelClient.java | 6 +- ...aFrameClient.java => TransformClient.java} | 164 ++++++------- ...s.java => TransformRequestConverters.java} | 42 ++-- ...quest.java => DeleteTransformRequest.java} | 10 +- ...mRequest.java => GetTransformRequest.java} | 16 +- ...esponse.java => GetTransformResponse.java} | 28 +-- ...est.java => GetTransformStatsRequest.java} | 8 +- ...se.java => GetTransformStatsResponse.java} | 24 +- ...uest.java => PreviewTransformRequest.java} | 16 +- ...nse.java => PreviewTransformResponse.java} | 10 +- ...mRequest.java => PutTransformRequest.java} | 20 +- ...equest.java => StartTransformRequest.java} | 10 +- ...ponse.java => StartTransformResponse.java} | 12 +- ...Request.java => StopTransformRequest.java} | 10 +- ...sponse.java => StopTransformResponse.java} | 12 +- ...va => TransformNamedXContentProvider.java} | 2 +- ...quest.java => UpdateTransformRequest.java} | 16 +- ...onse.java => UpdateTransformResponse.java} | 16 +- .../transform/transforms/DestConfig.java | 4 +- .../transform/transforms/QueryConfig.java | 2 +- .../transform/transforms/SourceConfig.java | 4 +- ...ats.java => TransformCheckpointStats.java} | 36 +-- ...o.java => TransformCheckpointingInfo.java} | 36 +-- ...nsformConfig.java => TransformConfig.java} | 46 ++-- ...Update.java => TransformConfigUpdate.java} | 50 ++-- ...ion.java => TransformIndexerPosition.java} | 14 +- ...mStats.java => TransformIndexerStats.java} | 20 +- ...rmProgress.java => TransformProgress.java} | 22 +- ...ransformStats.java => TransformStats.java} | 28 +-- .../pivot/DateHistogramGroupSource.java | 2 +- ...icsearch.plugins.spi.NamedXContentProvider | 2 +- .../DataFrameRequestConvertersTests.java | 102 ++++---- .../client/RestHighLevelClientTests.java | 2 +- ...FrameTransformIT.java => TransformIT.java} | 216 ++++++++-------- ...nIT.java => TransformDocumentationIT.java} | 230 +++++++++--------- ....java => DeleteTransformRequestTests.java} | 8 +- ...sts.java => GetTransformRequestTests.java} | 8 +- ...ts.java => GetTransformResponseTests.java} | 28 +-- ...ava => GetTransformStatsRequestTests.java} | 8 +- ...va => GetTransformStatsResponseTests.java} | 34 +-- ...java => PreviewTransformRequestTests.java} | 32 +-- ...ava => PreviewTransformResponseTests.java} | 10 +- ...sts.java => PutTransformRequestTests.java} | 30 +-- .../StartDataFrameTransformRequestTests.java | 6 +- ...ts.java => StopTransformRequestTests.java} | 8 +- ...UpdateDataFrameTransformResponseTests.java | 12 +- ....java => UpdateTransformRequestTests.java} | 26 +- ...ava => TransformCheckpointStatsTests.java} | 32 +-- ...a => TransformCheckpointingInfoTests.java} | 30 +-- ...igTests.java => TransformConfigTests.java} | 18 +- ...s.java => TransformConfigUpdateTests.java} | 18 +- ...ava => TransformIndexerPositionTests.java} | 14 +- ...s.java => TransformIndexerStatsTests.java} | 20 +- ...Tests.java => TransformProgressTests.java} | 24 +- ...atsTests.java => TransformStatsTests.java} | 36 +-- ...ava => TransformCheckpointStatsTests.java} | 24 +- ...a => TransformCheckpointingInfoTests.java} | 24 +- ...ava => TransformIndexerPositionTests.java} | 16 +- ...s.java => TransformIndexerStatsTests.java} | 12 +- ...Tests.java => TransformProgressTests.java} | 16 +- ...atsTests.java => TransformStatsTests.java} | 40 +-- .../high-level/supported-apis.asciidoc | 2 +- .../transform/delete_transform.asciidoc | 2 +- .../transform/get_transform.asciidoc | 6 +- .../transform/get_transform_stats.asciidoc | 6 +- .../transform/preview_transform.asciidoc | 4 +- .../transform/put_transform.asciidoc | 4 +- .../transform/start_transform.asciidoc | 6 +- .../transform/stop_transform.asciidoc | 8 +- .../transform/update_transform.asciidoc | 6 +- ...m.json => transform.delete_transform.json} | 2 +- ...form.json => transform.get_transform.json} | 4 +- ...son => transform.get_transform_stats.json} | 4 +- ....json => transform.preview_transform.json} | 4 +- ...form.json => transform.put_transform.json} | 6 +- ...rm.json => transform.start_transform.json} | 2 +- ...orm.json => transform.stop_transform.json} | 4 +- ...m.json => transform.update_transform.json} | 6 +- .../preview_transforms.yml | 20 +- .../transforms_crud.yml | 90 +++---- .../transforms_start_stop.yml | 90 +++---- .../transforms_stats.yml | 50 ++-- .../transforms_update.yml | 38 +-- .../transform/integration/TransformIT.java | 24 +- .../integration/TransformIntegTestCase.java | 92 +++---- .../integration/TransformInternalIndexIT.java | 18 +- .../upgrades/DataFrameSurvivesUpgradeIT.java | 28 +-- .../mixed_cluster/80_data_frame_jobs_crud.yml | 82 +++---- .../old_cluster/80_data_frame_jobs_crud.yml | 50 ++-- .../80_data_frame_jobs_crud.yml | 86 +++---- 90 files changed, 1258 insertions(+), 1258 deletions(-) rename client/rest-high-level/src/main/java/org/elasticsearch/client/{DataFrameClient.java => TransformClient.java} (65%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/{DataFrameRequestConverters.java => TransformRequestConverters.java} (79%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{DeleteDataFrameTransformRequest.java => DeleteTransformRequest.java} (85%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{GetDataFrameTransformRequest.java => GetTransformRequest.java} (80%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{GetDataFrameTransformResponse.java => GetTransformResponse.java} (77%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{GetDataFrameTransformStatsRequest.java => GetTransformStatsRequest.java} (88%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{GetDataFrameTransformStatsResponse.java => GetTransformStatsResponse.java} (78%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{PreviewDataFrameTransformRequest.java => PreviewTransformRequest.java} (80%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{PreviewDataFrameTransformResponse.java => PreviewTransformResponse.java} (80%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{PutDataFrameTransformRequest.java => PutTransformRequest.java} (79%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{StartDataFrameTransformRequest.java => StartTransformRequest.java} (84%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{StartDataFrameTransformResponse.java => StartTransformResponse.java} (68%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{StopDataFrameTransformRequest.java => StopTransformRequest.java} (87%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{StopDataFrameTransformResponse.java => StopTransformResponse.java} (67%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{DataFrameNamedXContentProvider.java => TransformNamedXContentProvider.java} (95%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{UpdateDataFrameTransformRequest.java => UpdateTransformRequest.java} (82%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/{UpdateDataFrameTransformResponse.java => UpdateTransformResponse.java} (69%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformCheckpointStats.java => TransformCheckpointStats.java} (68%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformCheckpointingInfo.java => TransformCheckpointingInfo.java} (71%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformConfig.java => TransformConfig.java} (85%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformConfigUpdate.java => TransformConfigUpdate.java} (72%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameIndexerPosition.java => TransformIndexerPosition.java} (89%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameIndexerTransformStats.java => TransformIndexerStats.java} (84%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformProgress.java => TransformProgress.java} (82%) rename client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformStats.java => TransformStats.java} (76%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/{DataFrameTransformIT.java => TransformIT.java} (59%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/{DataFrameTransformDocumentationIT.java => TransformDocumentationIT.java} (74%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{DeleteDataFrameTransformRequestTests.java => DeleteTransformRequestTests.java} (74%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{GetDataFrameTransformRequestTests.java => GetTransformRequestTests.java} (74%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{GetDataFrameTransformResponseTests.java => GetTransformResponseTests.java} (71%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{GetDataFrameTransformStatsRequestTests.java => GetTransformStatsRequestTests.java} (74%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{GetDataFrameTransformStatsResponseTests.java => GetTransformStatsResponseTests.java} (70%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{PreviewDataFrameTransformRequestTests.java => PreviewTransformRequestTests.java} (58%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{PreviewDataFrameTransformResponseTests.java => PreviewTransformResponseTests.java} (86%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{PutDataFrameTransformRequestTests.java => PutTransformRequestTests.java} (60%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{StopDataFrameTransformRequestTests.java => StopTransformRequestTests.java} (82%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/{UpdateDataFrameTransformRequestTests.java => UpdateTransformRequestTests.java} (63%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformCheckpointStatsTests.java => TransformCheckpointStatsTests.java} (51%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformCheckpointingInfoTests.java => TransformCheckpointingInfoTests.java} (54%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformConfigTests.java => TransformConfigTests.java} (81%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformConfigUpdateTests.java => TransformConfigUpdateTests.java} (78%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameIndexerPositionTests.java => TransformIndexerPositionTests.java} (85%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameIndexerTransformStatsTests.java => TransformIndexerStatsTests.java} (76%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformProgressTests.java => TransformProgressTests.java} (62%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/{DataFrameTransformStatsTests.java => TransformStatsTests.java} (57%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/{DataFrameTransformCheckpointStatsTests.java => TransformCheckpointStatsTests.java} (75%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/{DataFrameTransformCheckpointingInfoTests.java => TransformCheckpointingInfoTests.java} (65%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/{DataFrameIndexerPositionTests.java => TransformIndexerPositionTests.java} (81%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/{DataFrameIndexerTransformStatsTests.java => TransformIndexerStatsTests.java} (82%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/{DataFrameTransformProgressTests.java => TransformProgressTests.java} (79%) rename client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/{DataFrameTransformStatsTests.java => TransformStatsTests.java} (75%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.delete_data_frame_transform.json => transform.delete_transform.json} (94%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.get_data_frame_transform.json => transform.get_transform.json} (87%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.get_data_frame_transform_stats.json => transform.get_transform_stats.json} (86%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.preview_data_frame_transform.json => transform.preview_transform.json} (75%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.put_data_frame_transform.json => transform.put_transform.json} (82%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.start_data_frame_transform.json => transform.start_transform.json} (93%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.stop_data_frame_transform.json => transform.stop_transform.json} (86%) rename x-pack/plugin/src/test/resources/rest-api-spec/api/{data_frame.update_data_frame_transform.json => transform.update_transform.json} (82%) rename x-pack/plugin/src/test/resources/rest-api-spec/test/{data_frame => transform}/preview_transforms.yml (94%) rename x-pack/plugin/src/test/resources/rest-api-spec/test/{data_frame => transform}/transforms_crud.yml (90%) rename x-pack/plugin/src/test/resources/rest-api-spec/test/{data_frame => transform}/transforms_start_stop.yml (81%) rename x-pack/plugin/src/test/resources/rest-api-spec/test/{data_frame => transform}/transforms_stats.yml (87%) rename x-pack/plugin/src/test/resources/rest-api-spec/test/{data_frame => transform}/transforms_update.yml (91%) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 65d517231f2..171dfb174dc 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -256,7 +256,7 @@ public class RestHighLevelClient implements Closeable { private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this); private final RollupClient rollupClient = new RollupClient(this); private final CcrClient ccrClient = new CcrClient(this); - private final DataFrameClient dataFrameClient = new DataFrameClient(this); + private final TransformClient transformClient = new TransformClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -477,8 +477,8 @@ public class RestHighLevelClient implements Closeable { * * @return the client wrapper for making Data Frame API calls */ - public DataFrameClient dataFrame() { - return dataFrameClient; + public TransformClient transform() { + return transformClient; } /** diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformClient.java similarity index 65% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameClient.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/TransformClient.java index e56bd442292..9b18fbc06af 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformClient.java @@ -21,29 +21,29 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.core.AcknowledgedResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PreviewTransformResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; import java.io.IOException; import java.util.Collections; -public final class DataFrameClient { +public final class TransformClient { private final RestHighLevelClient restHighLevelClient; - DataFrameClient(RestHighLevelClient restHighLevelClient) { + TransformClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; } @@ -54,15 +54,15 @@ public final class DataFrameClient { * see * Create transform documentation * - * @param request The PutDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}. + * @param request The PutTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfig}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return An AcknowledgedResponse object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public AcknowledgedResponse putDataFrameTransform(PutDataFrameTransformRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse putTransform(PutTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::putDataFrameTransform, + TransformRequestConverters::putTransform, options, AcknowledgedResponse::fromXContent, Collections.emptySet()); @@ -74,16 +74,16 @@ public final class DataFrameClient { * For additional info * see * Create transform documentation - * @param request The PutDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}. + * @param request The PutTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfig}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable putDataFrameTransformAsync(PutDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable putTransformAsync(PutTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::putDataFrameTransform, + TransformRequestConverters::putTransform, options, AcknowledgedResponse::fromXContent, listener, @@ -97,18 +97,18 @@ public final class DataFrameClient { * see * Create transform documentation * - * @param request The UpdateDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}. + * @param request The UpdateTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return An UpdateDataFrameTransformResponse object containing the updated configuration + * @return An UpdateTransformResponse object containing the updated configuration * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public UpdateDataFrameTransformResponse updateDataFrameTransform(UpdateDataFrameTransformRequest request, - RequestOptions options) throws IOException { + public UpdateTransformResponse updateTransform(UpdateTransformRequest request, + RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::updateDataFrameTransform, + TransformRequestConverters::updateTransform, options, - UpdateDataFrameTransformResponse::fromXContent, + UpdateTransformResponse::fromXContent, Collections.emptySet()); } @@ -118,19 +118,19 @@ public final class DataFrameClient { * For additional info * see * Create transform documentation - * @param request The UpdateDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}. + * @param request The UpdateTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable updateDataFrameTransformAsync(UpdateDataFrameTransformRequest request, - RequestOptions options, - ActionListener listener) { + public Cancellable updateTransformAsync(UpdateTransformRequest request, + RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::updateDataFrameTransform, + TransformRequestConverters::updateTransform, options, - UpdateDataFrameTransformResponse::fromXContent, + UpdateTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -142,17 +142,17 @@ public final class DataFrameClient { * see * Get transform stats documentation * - * @param request Specifies the which transforms to get the stats for + * @param request Specifies which transforms to get the stats for * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return The transform stats * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public GetDataFrameTransformStatsResponse getDataFrameTransformStats(GetDataFrameTransformStatsRequest request, RequestOptions options) + public GetTransformStatsResponse getTransformStats(GetTransformStatsRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransformStats, + TransformRequestConverters::getTransformStats, options, - GetDataFrameTransformStatsResponse::fromXContent, + GetTransformStatsResponse::fromXContent, Collections.emptySet()); } @@ -162,17 +162,17 @@ public final class DataFrameClient { * For additional info * see * Get transform stats documentation - * @param request Specifies the which transforms to get the stats for + * @param request Specifies which transforms to get the stats for * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable getDataFrameTransformStatsAsync(GetDataFrameTransformStatsRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable getTransformStatsAsync(GetTransformStatsRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransformStats, + TransformRequestConverters::getTransformStats, options, - GetDataFrameTransformStatsResponse::fromXContent, + GetTransformStatsResponse::fromXContent, listener, Collections.emptySet()); } @@ -189,10 +189,10 @@ public final class DataFrameClient { * @return An AcknowledgedResponse object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public AcknowledgedResponse deleteDataFrameTransform(DeleteDataFrameTransformRequest request, RequestOptions options) + public AcknowledgedResponse deleteTransform(DeleteTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::deleteDataFrameTransform, + TransformRequestConverters::deleteTransform, options, AcknowledgedResponse::fromXContent, Collections.emptySet()); @@ -209,10 +209,10 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable deleteDataFrameTransformAsync(DeleteDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable deleteTransformAsync(DeleteTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::deleteDataFrameTransform, + TransformRequestConverters::deleteTransform, options, AcknowledgedResponse::fromXContent, listener, @@ -231,12 +231,12 @@ public final class DataFrameClient { * @return A response containing the results of the applied transform * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public PreviewDataFrameTransformResponse previewDataFrameTransform(PreviewDataFrameTransformRequest request, RequestOptions options) + public PreviewTransformResponse previewTransform(PreviewTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::previewDataFrameTransform, + TransformRequestConverters::previewTransform, options, - PreviewDataFrameTransformResponse::fromXContent, + PreviewTransformResponse::fromXContent, Collections.emptySet()); } @@ -250,12 +250,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable previewDataFrameTransformAsync(PreviewDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable previewTransformAsync(PreviewTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::previewDataFrameTransform, + TransformRequestConverters::previewTransform, options, - PreviewDataFrameTransformResponse::fromXContent, + PreviewTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -272,12 +272,12 @@ public final class DataFrameClient { * @return A response object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public StartDataFrameTransformResponse startDataFrameTransform(StartDataFrameTransformRequest request, RequestOptions options) + public StartTransformResponse startTransform(StartTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::startDataFrameTransform, + TransformRequestConverters::startTransform, options, - StartDataFrameTransformResponse::fromXContent, + StartTransformResponse::fromXContent, Collections.emptySet()); } @@ -292,12 +292,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable startDataFrameTransformAsync(StartDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable startTransformAsync(StartTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::startDataFrameTransform, + TransformRequestConverters::startTransform, options, - StartDataFrameTransformResponse::fromXContent, + StartTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -314,12 +314,12 @@ public final class DataFrameClient { * @return A response object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public StopDataFrameTransformResponse stopDataFrameTransform(StopDataFrameTransformRequest request, RequestOptions options) + public StopTransformResponse stopTransform(StopTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::stopDataFrameTransform, + TransformRequestConverters::stopTransform, options, - StopDataFrameTransformResponse::fromXContent, + StopTransformResponse::fromXContent, Collections.emptySet()); } @@ -334,12 +334,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable stopDataFrameTransformAsync(StopDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable stopTransformAsync(StopTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::stopDataFrameTransform, + TransformRequestConverters::stopTransform, options, - StopDataFrameTransformResponse::fromXContent, + StopTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -353,15 +353,15 @@ public final class DataFrameClient { * * @param request The get transform request * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return An GetDataFrameTransformResponse containing the requested transforms + * @return An GetTransformResponse containing the requested transforms * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public GetDataFrameTransformResponse getDataFrameTransform(GetDataFrameTransformRequest request, RequestOptions options) + public GetTransformResponse getTransform(GetTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransform, + TransformRequestConverters::getTransform, options, - GetDataFrameTransformResponse::fromXContent, + GetTransformResponse::fromXContent, Collections.emptySet()); } @@ -376,12 +376,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable getDataFrameTransformAsync(GetDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable getTransformAsync(GetTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransform, + TransformRequestConverters::getTransform, options, - GetDataFrameTransformResponse::fromXContent, + GetTransformResponse::fromXContent, listener, Collections.emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformRequestConverters.java similarity index 79% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/TransformRequestConverters.java index ab8c79a1784..49d347ae2b6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformRequestConverters.java @@ -24,29 +24,29 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformRequest; import org.elasticsearch.common.Strings; import java.io.IOException; import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; import static org.elasticsearch.client.RequestConverters.createEntity; -import static org.elasticsearch.client.transform.DeleteDataFrameTransformRequest.FORCE; -import static org.elasticsearch.client.transform.GetDataFrameTransformRequest.ALLOW_NO_MATCH; -import static org.elasticsearch.client.transform.PutDataFrameTransformRequest.DEFER_VALIDATION; +import static org.elasticsearch.client.transform.DeleteTransformRequest.FORCE; +import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH; +import static org.elasticsearch.client.transform.PutTransformRequest.DEFER_VALIDATION; -final class DataFrameRequestConverters { +final class TransformRequestConverters { - private DataFrameRequestConverters() {} + private TransformRequestConverters() {} - static Request putDataFrameTransform(PutDataFrameTransformRequest putRequest) throws IOException { + static Request putTransform(PutTransformRequest putRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(putRequest.getConfig().getId()) @@ -59,7 +59,7 @@ final class DataFrameRequestConverters { return request; } - static Request updateDataFrameTransform(UpdateDataFrameTransformRequest updateDataFrameTransformRequest) throws IOException { + static Request updateTransform(UpdateTransformRequest updateDataFrameTransformRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(updateDataFrameTransformRequest.getId()) @@ -73,7 +73,7 @@ final class DataFrameRequestConverters { return request; } - static Request getDataFrameTransform(GetDataFrameTransformRequest getRequest) { + static Request getTransform(GetTransformRequest getRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId())) @@ -91,7 +91,7 @@ final class DataFrameRequestConverters { return request; } - static Request deleteDataFrameTransform(DeleteDataFrameTransformRequest deleteRequest) { + static Request deleteTransform(DeleteTransformRequest deleteRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(deleteRequest.getId()) @@ -103,7 +103,7 @@ final class DataFrameRequestConverters { return request; } - static Request startDataFrameTransform(StartDataFrameTransformRequest startRequest) { + static Request startTransform(StartTransformRequest startRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(startRequest.getId()) @@ -118,7 +118,7 @@ final class DataFrameRequestConverters { return request; } - static Request stopDataFrameTransform(StopDataFrameTransformRequest stopRequest) { + static Request stopTransform(StopTransformRequest stopRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(stopRequest.getId()) @@ -139,7 +139,7 @@ final class DataFrameRequestConverters { return request; } - static Request previewDataFrameTransform(PreviewDataFrameTransformRequest previewRequest) throws IOException { + static Request previewTransform(PreviewTransformRequest previewRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms", "_preview") .build(); @@ -148,7 +148,7 @@ final class DataFrameRequestConverters { return request; } - static Request getDataFrameTransformStats(GetDataFrameTransformStatsRequest statsRequest) { + static Request getTransformStats(GetTransformStatsRequest statsRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(statsRequest.getId()) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteTransformRequest.java similarity index 85% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteTransformRequest.java index d28779efd1d..7eaeb1435b6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteTransformRequest.java @@ -27,16 +27,16 @@ import java.util.Optional; /** - * Request to delete a data frame transform + * Request to delete a transform */ -public class DeleteDataFrameTransformRequest implements Validatable { +public class DeleteTransformRequest implements Validatable { public static final String FORCE = "force"; private final String id; private Boolean force; - public DeleteDataFrameTransformRequest(String id) { + public DeleteTransformRequest(String id) { this.id = id; } @@ -56,7 +56,7 @@ public class DeleteDataFrameTransformRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -77,7 +77,7 @@ public class DeleteDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - DeleteDataFrameTransformRequest other = (DeleteDataFrameTransformRequest) obj; + DeleteTransformRequest other = (DeleteTransformRequest) obj; return Objects.equals(id, other.id) && Objects.equals(force, other.force); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformRequest.java similarity index 80% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformRequest.java index 3c442b65010..f0238083f6a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformRequest.java @@ -28,22 +28,22 @@ import java.util.List; import java.util.Objects; import java.util.Optional; -public class GetDataFrameTransformRequest implements Validatable { +public class GetTransformRequest implements Validatable { public static final String ALLOW_NO_MATCH = "allow_no_match"; /** - * Helper method to create a request that will get ALL Data Frame Transforms - * @return new {@link GetDataFrameTransformRequest} object for the id "_all" + * Helper method to create a request that will get ALL Transforms + * @return new {@link GetTransformRequest} object for the id "_all" */ - public static GetDataFrameTransformRequest getAllDataFrameTransformsRequest() { - return new GetDataFrameTransformRequest("_all"); + public static GetTransformRequest getAllTransformRequest() { + return new GetTransformRequest("_all"); } private final List ids; private PageParams pageParams; private Boolean allowNoMatch; - public GetDataFrameTransformRequest(String... ids) { + public GetTransformRequest(String... ids) { this.ids = Arrays.asList(ids); } @@ -71,7 +71,7 @@ public class GetDataFrameTransformRequest implements Validatable { public Optional validate() { if (ids == null || ids.isEmpty()) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -92,7 +92,7 @@ public class GetDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj; + GetTransformRequest other = (GetTransformRequest) obj; return Objects.equals(ids, other.ids) && Objects.equals(pageParams, other.pageParams) && Objects.equals(allowNoMatch, other.allowNoMatch); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformResponse.java similarity index 77% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformResponse.java index e1ca2df503d..de2418b3852 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -31,7 +31,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class GetDataFrameTransformResponse { +public class GetTransformResponse { public static final ParseField TRANSFORMS = new ParseField("transforms"); public static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms"); @@ -42,30 +42,30 @@ public class GetDataFrameTransformResponse { new ConstructingObjectParser<>("invalid_transforms", true, args -> new InvalidTransforms((List) args[0])); @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_transform", true, args -> new GetDataFrameTransformResponse( - (List) args[0], (int) args[1], (InvalidTransforms) args[2])); + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_transform", true, args -> new GetTransformResponse( + (List) args[0], (int) args[1], (InvalidTransforms) args[2])); static { // Discard the count field which is the size of the transforms array INVALID_TRANSFORMS_PARSER.declareInt((a, b) -> {}, COUNT); INVALID_TRANSFORMS_PARSER.declareStringArray(constructorArg(), TRANSFORMS); - PARSER.declareObjectArray(constructorArg(), DataFrameTransformConfig.PARSER::apply, TRANSFORMS); + PARSER.declareObjectArray(constructorArg(), TransformConfig.PARSER::apply, TRANSFORMS); PARSER.declareInt(constructorArg(), COUNT); PARSER.declareObject(optionalConstructorArg(), INVALID_TRANSFORMS_PARSER::apply, INVALID_TRANSFORMS); } - public static GetDataFrameTransformResponse fromXContent(final XContentParser parser) { - return GetDataFrameTransformResponse.PARSER.apply(parser, null); + public static GetTransformResponse fromXContent(final XContentParser parser) { + return GetTransformResponse.PARSER.apply(parser, null); } - private List transformConfigurations; + private List transformConfigurations; private int count; private InvalidTransforms invalidTransforms; - public GetDataFrameTransformResponse(List transformConfigurations, - int count, - @Nullable InvalidTransforms invalidTransforms) { + public GetTransformResponse(List transformConfigurations, + int count, + @Nullable InvalidTransforms invalidTransforms) { this.transformConfigurations = transformConfigurations; this.count = count; this.invalidTransforms = invalidTransforms; @@ -80,7 +80,7 @@ public class GetDataFrameTransformResponse { return count; } - public List getTransformConfigurations() { + public List getTransformConfigurations() { return transformConfigurations; } @@ -99,7 +99,7 @@ public class GetDataFrameTransformResponse { return false; } - final GetDataFrameTransformResponse that = (GetDataFrameTransformResponse) other; + final GetTransformResponse that = (GetTransformResponse) other; return Objects.equals(this.transformConfigurations, that.transformConfigurations) && Objects.equals(this.count, that.count) && Objects.equals(this.invalidTransforms, that.invalidTransforms); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsRequest.java similarity index 88% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsRequest.java index 579dd715cbc..d226d5c67bb 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsRequest.java @@ -26,12 +26,12 @@ import org.elasticsearch.client.core.PageParams; import java.util.Objects; import java.util.Optional; -public class GetDataFrameTransformStatsRequest implements Validatable { +public class GetTransformStatsRequest implements Validatable { private final String id; private PageParams pageParams; private Boolean allowNoMatch; - public GetDataFrameTransformStatsRequest(String id) { + public GetTransformStatsRequest(String id) { this.id = id; } @@ -59,7 +59,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -80,7 +80,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj; + GetTransformStatsRequest other = (GetTransformStatsRequest) obj; return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams) && Objects.equals(allowNoMatch, other.allowNoMatch); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsResponse.java similarity index 78% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsResponse.java index a1c4fc81974..4debe2ffd75 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsResponse.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -34,19 +34,19 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class GetDataFrameTransformStatsResponse { +public class GetTransformStatsResponse { public static final ParseField TRANSFORMS = new ParseField("transforms"); public static final ParseField COUNT = new ParseField("count"); @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_transform_stats_response", true, - args -> new GetDataFrameTransformStatsResponse((List) args[0], + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_transform_stats_response", true, + args -> new GetTransformStatsResponse((List) args[0], (List) args[1], (List) args[2])); static { - PARSER.declareObjectArray(constructorArg(), DataFrameTransformStats.PARSER::apply, TRANSFORMS); + PARSER.declareObjectArray(constructorArg(), TransformStats.PARSER::apply, TRANSFORMS); // Discard the count field which is the size of the transforms array PARSER.declareInt((a, b) -> {}, COUNT); PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), @@ -55,15 +55,15 @@ public class GetDataFrameTransformStatsResponse { AcknowledgedTasksResponse.NODE_FAILURES); } - public static GetDataFrameTransformStatsResponse fromXContent(final XContentParser parser) { - return GetDataFrameTransformStatsResponse.PARSER.apply(parser, null); + public static GetTransformStatsResponse fromXContent(final XContentParser parser) { + return GetTransformStatsResponse.PARSER.apply(parser, null); } - private final List transformsStats; + private final List transformsStats; private final List taskFailures; private final List nodeFailures; - public GetDataFrameTransformStatsResponse(List transformsStats, + public GetTransformStatsResponse(List transformsStats, @Nullable List taskFailures, @Nullable List nodeFailures) { this.transformsStats = transformsStats; @@ -71,7 +71,7 @@ public class GetDataFrameTransformStatsResponse { this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures); } - public List getTransformsStats() { + public List getTransformsStats() { return transformsStats; } @@ -98,7 +98,7 @@ public class GetDataFrameTransformStatsResponse { return false; } - final GetDataFrameTransformStatsResponse that = (GetDataFrameTransformStatsResponse) other; + final GetTransformStatsResponse that = (GetTransformStatsResponse) other; return Objects.equals(this.transformsStats, that.transformsStats) && Objects.equals(this.nodeFailures, that.nodeFailures) && Objects.equals(this.taskFailures, that.taskFailures); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformRequest.java similarity index 80% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformRequest.java index ab06ebfa4c9..4eba5c60aa7 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -30,15 +30,15 @@ import java.io.IOException; import java.util.Objects; import java.util.Optional; -public class PreviewDataFrameTransformRequest implements ToXContentObject, Validatable { +public class PreviewTransformRequest implements ToXContentObject, Validatable { - private final DataFrameTransformConfig config; + private final TransformConfig config; - public PreviewDataFrameTransformRequest(DataFrameTransformConfig config) { + public PreviewTransformRequest(TransformConfig config) { this.config = config; } - public DataFrameTransformConfig getConfig() { + public TransformConfig getConfig() { return config; } @@ -51,11 +51,11 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid public Optional validate() { ValidationException validationException = new ValidationException(); if (config == null) { - validationException.addValidationError("preview requires a non-null data frame config"); + validationException.addValidationError("preview requires a non-null transform config"); return Optional.of(validationException); } else { if (config.getSource() == null) { - validationException.addValidationError("data frame transform source cannot be null"); + validationException.addValidationError("transform source cannot be null"); } } @@ -79,7 +79,7 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid if (getClass() != obj.getClass()) { return false; } - PreviewDataFrameTransformRequest other = (PreviewDataFrameTransformRequest) obj; + PreviewTransformRequest other = (PreviewTransformRequest) obj; return Objects.equals(config, other.config); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java similarity index 80% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java index 9f7cd2b313a..215d529f949 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java @@ -26,23 +26,23 @@ import java.util.List; import java.util.Map; import java.util.Objects; -public class PreviewDataFrameTransformResponse { +public class PreviewTransformResponse { private static final String PREVIEW = "preview"; private static final String MAPPINGS = "mappings"; @SuppressWarnings("unchecked") - public static PreviewDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { + public static PreviewTransformResponse fromXContent(final XContentParser parser) throws IOException { Map previewMap = parser.mapOrdered(); Object previewDocs = previewMap.get(PREVIEW); Object mappings = previewMap.get(MAPPINGS); - return new PreviewDataFrameTransformResponse((List>) previewDocs, (Map) mappings); + return new PreviewTransformResponse((List>) previewDocs, (Map) mappings); } private List> docs; private Map mappings; - public PreviewDataFrameTransformResponse(List> docs, Map mappings) { + public PreviewTransformResponse(List> docs, Map mappings) { this.docs = docs; this.mappings = mappings; } @@ -65,7 +65,7 @@ public class PreviewDataFrameTransformResponse { return false; } - PreviewDataFrameTransformResponse other = (PreviewDataFrameTransformResponse) obj; + PreviewTransformResponse other = (PreviewTransformResponse) obj; return Objects.equals(other.docs, docs) && Objects.equals(other.mappings, mappings); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutTransformRequest.java similarity index 79% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutTransformRequest.java index e948ae53e0d..caff901e5d6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutTransformRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -29,17 +29,17 @@ import java.io.IOException; import java.util.Objects; import java.util.Optional; -public class PutDataFrameTransformRequest implements ToXContentObject, Validatable { +public class PutTransformRequest implements ToXContentObject, Validatable { public static final String DEFER_VALIDATION = "defer_validation"; - private final DataFrameTransformConfig config; + private final TransformConfig config; private Boolean deferValidation; - public PutDataFrameTransformRequest(DataFrameTransformConfig config) { + public PutTransformRequest(TransformConfig config) { this.config = config; } - public DataFrameTransformConfig getConfig() { + public TransformConfig getConfig() { return config; } @@ -60,17 +60,17 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab public Optional validate() { ValidationException validationException = new ValidationException(); if (config == null) { - validationException.addValidationError("put requires a non-null data frame config"); + validationException.addValidationError("put requires a non-null transform config"); return Optional.of(validationException); } else { if (config.getId() == null) { - validationException.addValidationError("data frame transform id cannot be null"); + validationException.addValidationError("transform id cannot be null"); } if (config.getSource() == null) { - validationException.addValidationError("data frame transform source cannot be null"); + validationException.addValidationError("transform source cannot be null"); } if (config.getDestination() == null) { - validationException.addValidationError("data frame transform destination cannot be null"); + validationException.addValidationError("transform destination cannot be null"); } } @@ -99,7 +99,7 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab if (getClass() != obj.getClass()) { return false; } - PutDataFrameTransformRequest other = (PutDataFrameTransformRequest) obj; + PutTransformRequest other = (PutTransformRequest) obj; return Objects.equals(config, other.config); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformRequest.java similarity index 84% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformRequest.java index 208bebf58ab..cda35903af4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformRequest.java @@ -26,16 +26,16 @@ import org.elasticsearch.common.unit.TimeValue; import java.util.Objects; import java.util.Optional; -public class StartDataFrameTransformRequest implements Validatable { +public class StartTransformRequest implements Validatable { private final String id; private TimeValue timeout; - public StartDataFrameTransformRequest(String id) { + public StartTransformRequest(String id) { this.id = id; } - public StartDataFrameTransformRequest(String id, TimeValue timeout) { + public StartTransformRequest(String id, TimeValue timeout) { this.id = id; this.timeout = timeout; } @@ -56,7 +56,7 @@ public class StartDataFrameTransformRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -77,7 +77,7 @@ public class StartDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - StartDataFrameTransformRequest other = (StartDataFrameTransformRequest) obj; + StartTransformRequest other = (StartTransformRequest) obj; return Objects.equals(this.id, other.id) && Objects.equals(this.timeout, other.timeout); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformResponse.java similarity index 68% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformResponse.java index 9a96d2de7ae..b945b965adf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformResponse.java @@ -28,20 +28,20 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.List; -public class StartDataFrameTransformResponse extends AcknowledgedTasksResponse { +public class StartTransformResponse extends AcknowledgedTasksResponse { private static final String ACKNOWLEDGED = "acknowledged"; - private static final ConstructingObjectParser PARSER = - AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new, + private static final ConstructingObjectParser PARSER = + AcknowledgedTasksResponse.generateParser("start_transform_response", StartTransformResponse::new, ACKNOWLEDGED); - public static StartDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { + public static StartTransformResponse fromXContent(final XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - public StartDataFrameTransformResponse(boolean acknowledged, @Nullable List taskFailures, - @Nullable List nodeFailures) { + public StartTransformResponse(boolean acknowledged, @Nullable List taskFailures, + @Nullable List nodeFailures) { super(acknowledged, taskFailures, nodeFailures); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformRequest.java similarity index 87% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformRequest.java index 3a662c2caec..33fc356c8da 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformRequest.java @@ -26,20 +26,20 @@ import org.elasticsearch.common.unit.TimeValue; import java.util.Objects; import java.util.Optional; -public class StopDataFrameTransformRequest implements Validatable { +public class StopTransformRequest implements Validatable { private final String id; private Boolean waitForCompletion; private TimeValue timeout; private Boolean allowNoMatch; - public StopDataFrameTransformRequest(String id) { + public StopTransformRequest(String id) { this.id = id; waitForCompletion = null; timeout = null; } - public StopDataFrameTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) { + public StopTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) { this.id = id; this.waitForCompletion = waitForCompletion; this.timeout = timeout; @@ -77,7 +77,7 @@ public class StopDataFrameTransformRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -98,7 +98,7 @@ public class StopDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - StopDataFrameTransformRequest other = (StopDataFrameTransformRequest) obj; + StopTransformRequest other = (StopTransformRequest) obj; return Objects.equals(this.id, other.id) && Objects.equals(this.waitForCompletion, other.waitForCompletion) && Objects.equals(this.timeout, other.timeout) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformResponse.java similarity index 67% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformResponse.java index 3993679fba8..b8a60a59117 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformResponse.java @@ -28,19 +28,19 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.List; -public class StopDataFrameTransformResponse extends AcknowledgedTasksResponse { +public class StopTransformResponse extends AcknowledgedTasksResponse { private static final String ACKNOWLEDGED = "acknowledged"; - private static final ConstructingObjectParser PARSER = AcknowledgedTasksResponse - .generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, ACKNOWLEDGED); + private static final ConstructingObjectParser PARSER = AcknowledgedTasksResponse + .generateParser("stop_transform_response", StopTransformResponse::new, ACKNOWLEDGED); - public static StopDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { + public static StopTransformResponse fromXContent(final XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - public StopDataFrameTransformResponse(boolean acknowledged, @Nullable List taskFailures, - @Nullable List nodeFailures) { + public StopTransformResponse(boolean acknowledged, @Nullable List taskFailures, + @Nullable List nodeFailures) { super(acknowledged, taskFailures, nodeFailures); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DataFrameNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/TransformNamedXContentProvider.java similarity index 95% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DataFrameNamedXContentProvider.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/TransformNamedXContentProvider.java index bf4d5b4ade1..2326d6f658c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DataFrameNamedXContentProvider.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/TransformNamedXContentProvider.java @@ -28,7 +28,7 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider; import java.util.Arrays; import java.util.List; -public class DataFrameNamedXContentProvider implements NamedXContentProvider { +public class TransformNamedXContentProvider implements NamedXContentProvider { @Override public List getNamedXContentParsers() { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformRequest.java similarity index 82% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformRequest.java index d4fe836db0f..e066d52fed5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -29,18 +29,18 @@ import java.io.IOException; import java.util.Objects; import java.util.Optional; -public class UpdateDataFrameTransformRequest implements ToXContentObject, Validatable { +public class UpdateTransformRequest implements ToXContentObject, Validatable { - private final DataFrameTransformConfigUpdate update; + private final TransformConfigUpdate update; private final String id; private Boolean deferValidation; - public UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate update, String id) { + public UpdateTransformRequest(TransformConfigUpdate update, String id) { this.update = update; this.id = id; } - public DataFrameTransformConfigUpdate getUpdate() { + public TransformConfigUpdate getUpdate() { return update; } @@ -65,10 +65,10 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida public Optional validate() { ValidationException validationException = new ValidationException(); if (update == null) { - validationException.addValidationError("put requires a non-null data frame config update object"); + validationException.addValidationError("put requires a non-null transform config update object"); } if (id == null) { - validationException.addValidationError("data frame transform id cannot be null"); + validationException.addValidationError("transform id cannot be null"); } if (validationException.validationErrors().isEmpty()) { return Optional.empty(); @@ -95,7 +95,7 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida if (getClass() != obj.getClass()) { return false; } - UpdateDataFrameTransformRequest other = (UpdateDataFrameTransformRequest) obj; + UpdateTransformRequest other = (UpdateTransformRequest) obj; return Objects.equals(update, other.update) && Objects.equals(id, other.id) && Objects.equals(deferValidation, other.deferValidation); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformResponse.java similarity index 69% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformResponse.java index 2afc8f9f3f5..0f5d6f99e82 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformResponse.java @@ -19,24 +19,24 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.xcontent.XContentParser; import java.util.Objects; -public class UpdateDataFrameTransformResponse { +public class UpdateTransformResponse { - public static UpdateDataFrameTransformResponse fromXContent(final XContentParser parser) { - return new UpdateDataFrameTransformResponse(DataFrameTransformConfig.PARSER.apply(parser, null)); + public static UpdateTransformResponse fromXContent(final XContentParser parser) { + return new UpdateTransformResponse(TransformConfig.PARSER.apply(parser, null)); } - private DataFrameTransformConfig transformConfiguration; + private TransformConfig transformConfiguration; - public UpdateDataFrameTransformResponse(DataFrameTransformConfig transformConfiguration) { + public UpdateTransformResponse(TransformConfig transformConfiguration) { this.transformConfiguration = transformConfiguration; } - public DataFrameTransformConfig getTransformConfiguration() { + public TransformConfig getTransformConfiguration() { return transformConfiguration; } @@ -55,7 +55,7 @@ public class UpdateDataFrameTransformResponse { return false; } - final UpdateDataFrameTransformResponse that = (UpdateDataFrameTransformResponse) other; + final UpdateTransformResponse that = (UpdateTransformResponse) other; return Objects.equals(this.transformConfiguration, that.transformConfiguration); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java index 9dce70efe23..52d05d5f165 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java @@ -31,14 +31,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * Configuration containing the destination index for the {@link DataFrameTransformConfig} + * Configuration containing the destination index for the {@link TransformConfig} */ public class DestConfig implements ToXContentObject { public static final ParseField INDEX = new ParseField("index"); public static final ParseField PIPELINE = new ParseField("pipeline"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_frame_config_dest", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("transform_config_dest", true, args -> new DestConfig((String)args[0], (String)args[1])); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java index 3f4727863a2..daee248f469 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; /** - * Object for encapsulating the desired Query for a DataFrameTransform + * Object for encapsulating the desired Query for a Transform */ public class QueryConfig implements ToXContentObject { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java index fa72bc32391..157a637040c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java @@ -35,14 +35,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona /** - * Class encapsulating all options for a {@link DataFrameTransformConfig} gathering data + * Class encapsulating all options for a {@link TransformConfig} gathering data */ public class SourceConfig implements ToXContentObject { public static final ParseField QUERY = new ParseField("query"); public static final ParseField INDEX = new ParseField("index"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_frame_config_source", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("transform_config_source", true, args -> { @SuppressWarnings("unchecked") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStats.java similarity index 68% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStats.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStats.java index 1f9606fe2dc..8d376d2e191 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStats.java @@ -28,7 +28,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformCheckpointStats { +public class TransformCheckpointStats { public static final ParseField CHECKPOINT = new ParseField("checkpoint"); public static final ParseField POSITION = new ParseField("position"); @@ -36,40 +36,40 @@ public class DataFrameTransformCheckpointStats { public static final ParseField TIMESTAMP_MILLIS = new ParseField("timestamp_millis"); public static final ParseField TIME_UPPER_BOUND_MILLIS = new ParseField("time_upper_bound_millis"); - public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, 0L, 0L); + public static final TransformCheckpointStats EMPTY = new TransformCheckpointStats(0L, null, null, 0L, 0L); private final long checkpoint; - private final DataFrameIndexerPosition position; - private final DataFrameTransformProgress checkpointProgress; + private final TransformIndexerPosition position; + private final TransformProgress checkpointProgress; private final long timestampMillis; private final long timeUpperBoundMillis; - public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( - "data_frame_transform_checkpoint_stats", true, args -> { + public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( + "transform_checkpoint_stats", true, args -> { long checkpoint = args[0] == null ? 0L : (Long) args[0]; - DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[1]; - DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[2]; + TransformIndexerPosition position = (TransformIndexerPosition) args[1]; + TransformProgress checkpointProgress = (TransformProgress) args[2]; long timestamp = args[3] == null ? 0L : (Long) args[3]; long timeUpperBound = args[4] == null ? 0L : (Long) args[4]; - return new DataFrameTransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound); + return new TransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound); }); static { LENIENT_PARSER.declareLong(optionalConstructorArg(), CHECKPOINT); - LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, POSITION); - LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, CHECKPOINT_PROGRESS); + LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformIndexerPosition.PARSER, POSITION); + LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformProgress.PARSER, CHECKPOINT_PROGRESS); LENIENT_PARSER.declareLong(optionalConstructorArg(), TIMESTAMP_MILLIS); LENIENT_PARSER.declareLong(optionalConstructorArg(), TIME_UPPER_BOUND_MILLIS); } - public static DataFrameTransformCheckpointStats fromXContent(XContentParser parser) throws IOException { + public static TransformCheckpointStats fromXContent(XContentParser parser) throws IOException { return LENIENT_PARSER.parse(parser, null); } - public DataFrameTransformCheckpointStats(final long checkpoint, final DataFrameIndexerPosition position, - final DataFrameTransformProgress checkpointProgress, final long timestampMillis, - final long timeUpperBoundMillis) { + public TransformCheckpointStats(final long checkpoint, final TransformIndexerPosition position, + final TransformProgress checkpointProgress, final long timestampMillis, + final long timeUpperBoundMillis) { this.checkpoint = checkpoint; this.position = position; this.checkpointProgress = checkpointProgress; @@ -81,11 +81,11 @@ public class DataFrameTransformCheckpointStats { return checkpoint; } - public DataFrameIndexerPosition getPosition() { + public TransformIndexerPosition getPosition() { return position; } - public DataFrameTransformProgress getCheckpointProgress() { + public TransformProgress getCheckpointProgress() { return checkpointProgress; } @@ -112,7 +112,7 @@ public class DataFrameTransformCheckpointStats { return false; } - DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other; + TransformCheckpointStats that = (TransformCheckpointStats) other; return this.checkpoint == that.checkpoint && Objects.equals(this.position, that.position) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfo.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfo.java similarity index 71% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfo.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfo.java index 79d02c523ff..d5ba3643844 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfo.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfo.java @@ -29,37 +29,37 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.time.Instant; import java.util.Objects; -public class DataFrameTransformCheckpointingInfo { +public class TransformCheckpointingInfo { public static final ParseField LAST_CHECKPOINT = new ParseField("last", "current"); public static final ParseField NEXT_CHECKPOINT = new ParseField("next", "in_progress"); public static final ParseField OPERATIONS_BEHIND = new ParseField("operations_behind"); public static final ParseField CHANGES_LAST_DETECTED_AT = new ParseField("changes_last_detected_at"); - private final DataFrameTransformCheckpointStats last; - private final DataFrameTransformCheckpointStats next; + private final TransformCheckpointStats last; + private final TransformCheckpointStats next; private final long operationsBehind; private final Instant changesLastDetectedAt; - private static final ConstructingObjectParser LENIENT_PARSER = + private static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( - "data_frame_transform_checkpointing_info", + "transform_checkpointing_info", true, a -> { long behind = a[2] == null ? 0L : (Long) a[2]; Instant changesLastDetectedAt = (Instant)a[3]; - return new DataFrameTransformCheckpointingInfo( - a[0] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[0], - a[1] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[1], + return new TransformCheckpointingInfo( + a[0] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[0], + a[1] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[1], behind, changesLastDetectedAt); }); static { LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT); + (p, c) -> TransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT); LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT); + (p, c) -> TransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT); LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), OPERATIONS_BEHIND); LENIENT_PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> TimeUtil.parseTimeFieldToInstant(p, CHANGES_LAST_DETECTED_AT.getPreferredName()), @@ -67,21 +67,21 @@ public class DataFrameTransformCheckpointingInfo { ObjectParser.ValueType.VALUE); } - public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats last, - DataFrameTransformCheckpointStats next, - long operationsBehind, - Instant changesLastDetectedAt) { + public TransformCheckpointingInfo(TransformCheckpointStats last, + TransformCheckpointStats next, + long operationsBehind, + Instant changesLastDetectedAt) { this.last = Objects.requireNonNull(last); this.next = Objects.requireNonNull(next); this.operationsBehind = operationsBehind; this.changesLastDetectedAt = changesLastDetectedAt; } - public DataFrameTransformCheckpointStats getLast() { + public TransformCheckpointStats getLast() { return last; } - public DataFrameTransformCheckpointStats getNext() { + public TransformCheckpointStats getNext() { return next; } @@ -94,7 +94,7 @@ public class DataFrameTransformCheckpointingInfo { return changesLastDetectedAt; } - public static DataFrameTransformCheckpointingInfo fromXContent(XContentParser p) { + public static TransformCheckpointingInfo fromXContent(XContentParser p) { return LENIENT_PARSER.apply(p, null); } @@ -113,7 +113,7 @@ public class DataFrameTransformCheckpointingInfo { return false; } - DataFrameTransformCheckpointingInfo that = (DataFrameTransformCheckpointingInfo) other; + TransformCheckpointingInfo that = (TransformCheckpointingInfo) other; return Objects.equals(this.last, that.last) && Objects.equals(this.next, that.next) && diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java similarity index 85% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfig.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java index d3abc73e6f3..ff740cfcf24 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java @@ -40,7 +40,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformConfig implements ToXContentObject { +public class TransformConfig implements ToXContentObject { public static final ParseField ID = new ParseField("id"); public static final ParseField SOURCE = new ParseField("source"); @@ -63,8 +63,8 @@ public class DataFrameTransformConfig implements ToXContentObject { private final Version transformVersion; private final Instant createTime; - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>("data_frame_transform", true, + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("transform", true, (args) -> { String id = (String) args[0]; SourceConfig source = (SourceConfig) args[1]; @@ -75,7 +75,7 @@ public class DataFrameTransformConfig implements ToXContentObject { String description = (String)args[6]; Instant createTime = (Instant)args[7]; String transformVersion = (String)args[8]; - return new DataFrameTransformConfig(id, + return new TransformConfig(id, source, dest, frequency, @@ -109,34 +109,34 @@ public class DataFrameTransformConfig implements ToXContentObject { } - public static DataFrameTransformConfig fromXContent(final XContentParser parser) { + public static TransformConfig fromXContent(final XContentParser parser) { return PARSER.apply(parser, null); } /** - * Helper method for previewing a data frame transform configuration + * Helper method for previewing a transform configuration * - * The DataFrameTransformConfig returned from this method should only be used for previewing the resulting data. + * The TransformConfig returned from this method should only be used for previewing the resulting data. * - * A new, valid, DataFrameTransformConfig with an appropriate destination and ID will have to be constructed to create + * A new, valid, TransformConfig with an appropriate destination and ID will have to be constructed to create * the transform. * @param source Source configuration for gathering the data * @param pivotConfig Pivot config to preview - * @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index. + * @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index. */ - public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) { - return new DataFrameTransformConfig(null, source, null, null, null, pivotConfig, null, null, null); + public static TransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) { + return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null); } - DataFrameTransformConfig(final String id, - final SourceConfig source, - final DestConfig dest, - final TimeValue frequency, - final SyncConfig syncConfig, - final PivotConfig pivotConfig, - final String description, - final Instant createTime, - final String version) { + TransformConfig(final String id, + final SourceConfig source, + final DestConfig dest, + final TimeValue frequency, + final SyncConfig syncConfig, + final PivotConfig pivotConfig, + final String description, + final Instant createTime, + final String version) { this.id = id; this.source = source; this.dest = dest; @@ -231,7 +231,7 @@ public class DataFrameTransformConfig implements ToXContentObject { return false; } - final DataFrameTransformConfig that = (DataFrameTransformConfig) other; + final TransformConfig that = (TransformConfig) other; return Objects.equals(this.id, that.id) && Objects.equals(this.source, that.source) @@ -303,8 +303,8 @@ public class DataFrameTransformConfig implements ToXContentObject { return this; } - public DataFrameTransformConfig build() { - return new DataFrameTransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null); + public TransformConfig build() { + return new TransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null); } } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java similarity index 72% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdate.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java index 945e8b82116..241c578dbad 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdate.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java @@ -34,30 +34,30 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * This class holds the mutable configuration items for a data frame transform + * This class holds the mutable configuration items for a transform */ -public class DataFrameTransformConfigUpdate implements ToXContentObject { +public class TransformConfigUpdate implements ToXContentObject { - public static final String NAME = "data_frame_transform_config_update"; - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + public static final String NAME = "transform_config_update"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, (args) -> { SourceConfig source = (SourceConfig) args[0]; DestConfig dest = (DestConfig) args[1]; TimeValue frequency = args[2] == null ? null : - TimeValue.parseTimeValue((String) args[2], DataFrameTransformConfig.FREQUENCY.getPreferredName()); + TimeValue.parseTimeValue((String) args[2], TransformConfig.FREQUENCY.getPreferredName()); SyncConfig syncConfig = (SyncConfig) args[3]; String description = (String) args[4]; - return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description); + return new TransformConfigUpdate(source, dest, frequency, syncConfig, description); }); static { - PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), DataFrameTransformConfig.SOURCE); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DataFrameTransformConfig.DEST); - PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.FREQUENCY); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), DataFrameTransformConfig.SYNC); - PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.DESCRIPTION); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), TransformConfig.SOURCE); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), TransformConfig.DEST); + PARSER.declareString(optionalConstructorArg(), TransformConfig.FREQUENCY); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), TransformConfig.SYNC); + PARSER.declareString(optionalConstructorArg(), TransformConfig.DESCRIPTION); } private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException { @@ -74,11 +74,11 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { private final SyncConfig syncConfig; private final String description; - public DataFrameTransformConfigUpdate(final SourceConfig source, - final DestConfig dest, - final TimeValue frequency, - final SyncConfig syncConfig, - final String description){ + public TransformConfigUpdate(final SourceConfig source, + final DestConfig dest, + final TimeValue frequency, + final SyncConfig syncConfig, + final String description) { this.source = source; this.dest = dest; this.frequency = frequency; @@ -111,21 +111,21 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); if (source != null) { - builder.field(DataFrameTransformConfig.SOURCE.getPreferredName(), source); + builder.field(TransformConfig.SOURCE.getPreferredName(), source); } if (dest != null) { - builder.field(DataFrameTransformConfig.DEST.getPreferredName(), dest); + builder.field(TransformConfig.DEST.getPreferredName(), dest); } if (frequency != null) { - builder.field(DataFrameTransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); + builder.field(TransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); } if (syncConfig != null) { - builder.startObject(DataFrameTransformConfig.SYNC.getPreferredName()); + builder.startObject(TransformConfig.SYNC.getPreferredName()); builder.field(syncConfig.getName(), syncConfig); builder.endObject(); } if (description != null) { - builder.field(DataFrameTransformConfig.DESCRIPTION.getPreferredName(), description); + builder.field(TransformConfig.DESCRIPTION.getPreferredName(), description); } builder.endObject(); return builder; @@ -141,7 +141,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { return false; } - final DataFrameTransformConfigUpdate that = (DataFrameTransformConfigUpdate) other; + final TransformConfigUpdate that = (TransformConfigUpdate) other; return Objects.equals(this.source, that.source) && Objects.equals(this.dest, that.dest) @@ -164,7 +164,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { return new Builder(); } - public static DataFrameTransformConfigUpdate fromXContent(final XContentParser parser) { + public static TransformConfigUpdate fromXContent(final XContentParser parser) { return PARSER.apply(parser, null); } @@ -201,8 +201,8 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { return this; } - public DataFrameTransformConfigUpdate build() { - return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description); + public TransformConfigUpdate build() { + return new TransformConfigUpdate(source, dest, frequency, syncConfig, description); } } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPosition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerPosition.java similarity index 89% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPosition.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerPosition.java index 6141f77c3b0..c312666be89 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPosition.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerPosition.java @@ -37,7 +37,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona * indexer_position: the position of the indexer querying the source * bucket_position: the position used for identifying changes */ -public class DataFrameIndexerPosition { +public class TransformIndexerPosition { public static final ParseField INDEXER_POSITION = new ParseField("indexer_position"); public static final ParseField BUCKET_POSITION = new ParseField("bucket_position"); @@ -45,17 +45,17 @@ public class DataFrameIndexerPosition { private final Map bucketPosition; @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_indexer_position", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "transform_indexer_position", true, - args -> new DataFrameIndexerPosition((Map) args[0],(Map) args[1])); + args -> new TransformIndexerPosition((Map) args[0],(Map) args[1])); static { PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, INDEXER_POSITION, ValueType.OBJECT); PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, BUCKET_POSITION, ValueType.OBJECT); } - public DataFrameIndexerPosition(Map indexerPosition, Map bucketPosition) { + public TransformIndexerPosition(Map indexerPosition, Map bucketPosition) { this.indexerPosition = indexerPosition == null ? null : Collections.unmodifiableMap(indexerPosition); this.bucketPosition = bucketPosition == null ? null : Collections.unmodifiableMap(bucketPosition); } @@ -78,7 +78,7 @@ public class DataFrameIndexerPosition { return false; } - DataFrameIndexerPosition that = (DataFrameIndexerPosition) other; + TransformIndexerPosition that = (TransformIndexerPosition) other; return Objects.equals(this.indexerPosition, that.indexerPosition) && Objects.equals(this.bucketPosition, that.bucketPosition); @@ -89,7 +89,7 @@ public class DataFrameIndexerPosition { return Objects.hash(indexerPosition, bucketPosition); } - public static DataFrameIndexerPosition fromXContent(XContentParser parser) { + public static TransformIndexerPosition fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); } catch (IOException e) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java similarity index 84% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStats.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java index 23a25c511b2..2a04c6ea45e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java @@ -30,16 +30,16 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameIndexerTransformStats extends IndexerJobStats { +public class TransformIndexerStats extends IndexerJobStats { static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms"); static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed"); static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed"); - public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( NAME, true, - args -> new DataFrameIndexerTransformStats((long) args[0], (long) args[1], (long) args[2], + args -> new TransformIndexerStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9], (Double) args[10], (Double) args[11], (Double) args[12])); @@ -59,7 +59,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats { LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_PROCESSED); } - public static DataFrameIndexerTransformStats fromXContent(XContentParser parser) throws IOException { + public static TransformIndexerStats fromXContent(XContentParser parser) throws IOException { return LENIENT_PARSER.parse(parser, null); } @@ -67,11 +67,11 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats { private final double expAvgDocumentsIndexed; private final double expAvgDocumentsProcessed; - public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOuputDocuments, - long numInvocations, long indexTime, long searchTime, - long indexTotal, long searchTotal, long indexFailures, long searchFailures, - Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed, - Double expAvgDocumentsProcessed) { + public TransformIndexerStats(long numPages, long numInputDocuments, long numOuputDocuments, + long numInvocations, long indexTime, long searchTime, + long indexTotal, long searchTotal, long indexFailures, long searchFailures, + Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed, + Double expAvgDocumentsProcessed) { super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, indexFailures, searchFailures); this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs == null ? 0.0 : expAvgCheckpointDurationMs; @@ -101,7 +101,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats { return false; } - DataFrameIndexerTransformStats that = (DataFrameIndexerTransformStats) other; + TransformIndexerStats that = (TransformIndexerStats) other; return Objects.equals(this.numPages, that.numPages) && Objects.equals(this.numInputDocuments, that.numInputDocuments) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgress.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformProgress.java similarity index 82% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgress.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformProgress.java index 73eacac8513..e3d226d8742 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgress.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformProgress.java @@ -28,7 +28,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformProgress { +public class TransformProgress { public static final ParseField TOTAL_DOCS = new ParseField("total_docs"); public static final ParseField DOCS_REMAINING = new ParseField("docs_remaining"); @@ -36,10 +36,10 @@ public class DataFrameTransformProgress { public static final ParseField DOCS_PROCESSED = new ParseField("docs_processed"); public static final ParseField DOCS_INDEXED = new ParseField("docs_indexed"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_transform_progress", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "transform_progress", true, - a -> new DataFrameTransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4])); + a -> new TransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4])); static { PARSER.declareLong(optionalConstructorArg(), TOTAL_DOCS); @@ -49,7 +49,7 @@ public class DataFrameTransformProgress { PARSER.declareLong(optionalConstructorArg(), DOCS_INDEXED); } - public static DataFrameTransformProgress fromXContent(XContentParser parser) { + public static TransformProgress fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } @@ -59,11 +59,11 @@ public class DataFrameTransformProgress { private final long documentsProcessed; private final long documentsIndexed; - public DataFrameTransformProgress(Long totalDocs, - Long remainingDocs, - Double percentComplete, - Long documentsProcessed, - Long documentsIndexed) { + public TransformProgress(Long totalDocs, + Long remainingDocs, + Double percentComplete, + Long documentsProcessed, + Long documentsIndexed) { this.totalDocs = totalDocs; this.remainingDocs = remainingDocs == null ? totalDocs : remainingDocs; this.percentComplete = percentComplete; @@ -104,7 +104,7 @@ public class DataFrameTransformProgress { return false; } - DataFrameTransformProgress that = (DataFrameTransformProgress) other; + TransformProgress that = (TransformProgress) other; return Objects.equals(this.remainingDocs, that.remainingDocs) && Objects.equals(this.totalDocs, that.totalDocs) && Objects.equals(this.percentComplete, that.percentComplete) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformStats.java similarity index 76% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStats.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformStats.java index ccf2a18fb34..012b6751e59 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformStats.java @@ -31,7 +31,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformStats { +public class TransformStats { public static final ParseField ID = new ParseField("id"); public static final ParseField STATE_FIELD = new ParseField("state"); @@ -40,10 +40,10 @@ public class DataFrameTransformStats { public static final ParseField STATS_FIELD = new ParseField("stats"); public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "data_frame_transform_state_and_stats_info", true, - a -> new DataFrameTransformStats((String) a[0], (State) a[1], (String) a[2], - (NodeAttributes) a[3], (DataFrameIndexerTransformStats) a[4], (DataFrameTransformCheckpointingInfo) a[5])); + a -> new TransformStats((String) a[0], (State) a[1], (String) a[2], + (NodeAttributes) a[3], (TransformIndexerStats) a[4], (TransformCheckpointingInfo) a[5])); static { PARSER.declareString(constructorArg(), ID); @@ -51,12 +51,12 @@ public class DataFrameTransformStats { ObjectParser.ValueType.STRING); PARSER.declareString(optionalConstructorArg(), REASON_FIELD); PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT); - PARSER.declareObject(constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p), STATS_FIELD); + PARSER.declareObject(constructorArg(), (p, c) -> TransformIndexerStats.fromXContent(p), STATS_FIELD); PARSER.declareObject(optionalConstructorArg(), - (p, c) -> DataFrameTransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD); + (p, c) -> TransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD); } - public static DataFrameTransformStats fromXContent(XContentParser parser) throws IOException { + public static TransformStats fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } @@ -64,11 +64,11 @@ public class DataFrameTransformStats { private final String reason; private final State state; private final NodeAttributes node; - private final DataFrameIndexerTransformStats indexerStats; - private final DataFrameTransformCheckpointingInfo checkpointingInfo; + private final TransformIndexerStats indexerStats; + private final TransformCheckpointingInfo checkpointingInfo; - public DataFrameTransformStats(String id, State state, String reason, NodeAttributes node, DataFrameIndexerTransformStats stats, - DataFrameTransformCheckpointingInfo checkpointingInfo) { + public TransformStats(String id, State state, String reason, NodeAttributes node, TransformIndexerStats stats, + TransformCheckpointingInfo checkpointingInfo) { this.id = id; this.state = state; this.reason = reason; @@ -93,11 +93,11 @@ public class DataFrameTransformStats { return node; } - public DataFrameIndexerTransformStats getIndexerStats() { + public TransformIndexerStats getIndexerStats() { return indexerStats; } - public DataFrameTransformCheckpointingInfo getCheckpointingInfo() { + public TransformCheckpointingInfo getCheckpointingInfo() { return checkpointingInfo; } @@ -116,7 +116,7 @@ public class DataFrameTransformStats { return false; } - DataFrameTransformStats that = (DataFrameTransformStats) other; + TransformStats that = (TransformStats) other; return Objects.equals(this.id, that.id) && Objects.equals(this.state, that.state) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java index 6c775142539..2b653f3fbf1 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java @@ -72,7 +72,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo * fixed_interval fixed intervals like 1h, 1m, 1d * calendar_interval calendar aware intervals like 1M, 1Y, ... * - * Note: data frames do not support the deprecated interval option + * Note: transform does not support the deprecated interval option */ public interface Interval extends ToXContentFragment { String getName(); diff --git a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider index d558383dd14..dfa56956edb 100644 --- a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider @@ -1,4 +1,4 @@ org.elasticsearch.client.indexlifecycle.IndexLifecycleNamedXContentProvider org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider -org.elasticsearch.client.transform.DataFrameNamedXContentProvider +org.elasticsearch.client.transform.TransformNamedXContentProvider diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java index 24d91922980..69deeae800e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java @@ -24,19 +24,19 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.transform.DataFrameNamedXContentProvider; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdateTests; +import org.elasticsearch.client.transform.TransformNamedXContentProvider; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -49,7 +49,7 @@ import java.io.IOException; import java.util.Collections; import java.util.List; -import static org.elasticsearch.client.transform.GetDataFrameTransformRequest.ALLOW_NO_MATCH; +import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -62,50 +62,50 @@ public class DataFrameRequestConvertersTests extends ESTestCase { protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } public void testPutDataFrameTransform() throws IOException { - PutDataFrameTransformRequest putRequest = new PutDataFrameTransformRequest( - DataFrameTransformConfigTests.randomDataFrameTransformConfig()); - Request request = DataFrameRequestConverters.putDataFrameTransform(putRequest); + PutTransformRequest putRequest = new PutTransformRequest( + TransformConfigTests.randomTransformConfig()); + Request request = TransformRequestConverters.putTransform(putRequest); assertThat(request.getParameters(), not(hasKey("defer_validation"))); assertEquals(HttpPut.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + putRequest.getConfig().getId())); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { - DataFrameTransformConfig parsedConfig = DataFrameTransformConfig.PARSER.apply(parser, null); + TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null); assertThat(parsedConfig, equalTo(putRequest.getConfig())); } putRequest.setDeferValidation(true); - request = DataFrameRequestConverters.putDataFrameTransform(putRequest); + request = TransformRequestConverters.putTransform(putRequest); assertThat(request.getParameters(), hasEntry("defer_validation", Boolean.toString(putRequest.getDeferValidation()))); } public void testUpdateDataFrameTransform() throws IOException { String transformId = randomAlphaOfLength(10); - UpdateDataFrameTransformRequest updateDataFrameTransformRequest = new UpdateDataFrameTransformRequest( - DataFrameTransformConfigUpdateTests.randomDataFrameTransformConfigUpdate(), + UpdateTransformRequest updateDataFrameTransformRequest = new UpdateTransformRequest( + TransformConfigUpdateTests.randomTransformConfigUpdate(), transformId); - Request request = DataFrameRequestConverters.updateDataFrameTransform(updateDataFrameTransformRequest); + Request request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest); assertThat(request.getParameters(), not(hasKey("defer_validation"))); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + transformId + "/_update")); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { - DataFrameTransformConfigUpdate parsedConfig = DataFrameTransformConfigUpdate.fromXContent(parser); + TransformConfigUpdate parsedConfig = TransformConfigUpdate.fromXContent(parser); assertThat(parsedConfig, equalTo(updateDataFrameTransformRequest.getUpdate())); } updateDataFrameTransformRequest.setDeferValidation(true); - request = DataFrameRequestConverters.updateDataFrameTransform(updateDataFrameTransformRequest); + request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest); assertThat(request.getParameters(), hasEntry("defer_validation", Boolean.toString(updateDataFrameTransformRequest.getDeferValidation()))); } public void testDeleteDataFrameTransform() { - DeleteDataFrameTransformRequest deleteRequest = new DeleteDataFrameTransformRequest("foo"); - Request request = DataFrameRequestConverters.deleteDataFrameTransform(deleteRequest); + DeleteTransformRequest deleteRequest = new DeleteTransformRequest("foo"); + Request request = TransformRequestConverters.deleteTransform(deleteRequest); assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo")); @@ -113,7 +113,7 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertThat(request.getParameters(), not(hasKey("force"))); deleteRequest.setForce(true); - request = DataFrameRequestConverters.deleteDataFrameTransform(deleteRequest); + request = TransformRequestConverters.deleteTransform(deleteRequest); assertThat(request.getParameters(), hasEntry("force", "true")); } @@ -124,9 +124,9 @@ public class DataFrameRequestConvertersTests extends ESTestCase { if (randomBoolean()) { timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout"); } - StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id, timeValue); + StartTransformRequest startRequest = new StartTransformRequest(id, timeValue); - Request request = DataFrameRequestConverters.startDataFrameTransform(startRequest); + Request request = TransformRequestConverters.startTransform(startRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + startRequest.getId() + "/_start")); @@ -148,9 +148,9 @@ public class DataFrameRequestConvertersTests extends ESTestCase { if (randomBoolean()) { timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout"); } - StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, waitForCompletion, timeValue); + StopTransformRequest stopRequest = new StopTransformRequest(id, waitForCompletion, timeValue); - Request request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest); + Request request = TransformRequestConverters.stopTransform(stopRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + stopRequest.getId() + "/_stop")); @@ -170,27 +170,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); stopRequest.setAllowNoMatch(randomBoolean()); - request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest); + request = TransformRequestConverters.stopTransform(stopRequest); assertEquals(stopRequest.getAllowNoMatch(), Boolean.parseBoolean(request.getParameters().get(ALLOW_NO_MATCH))); } public void testPreviewDataFrameTransform() throws IOException { - PreviewDataFrameTransformRequest previewRequest = new PreviewDataFrameTransformRequest( - DataFrameTransformConfigTests.randomDataFrameTransformConfig()); - Request request = DataFrameRequestConverters.previewDataFrameTransform(previewRequest); + PreviewTransformRequest previewRequest = new PreviewTransformRequest( + TransformConfigTests.randomTransformConfig()); + Request request = TransformRequestConverters.previewTransform(previewRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/_preview")); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { - DataFrameTransformConfig parsedConfig = DataFrameTransformConfig.PARSER.apply(parser, null); + TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null); assertThat(parsedConfig, equalTo(previewRequest.getConfig())); } } public void testGetDataFrameTransformStats() { - GetDataFrameTransformStatsRequest getStatsRequest = new GetDataFrameTransformStatsRequest("foo"); - Request request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + GetTransformStatsRequest getStatsRequest = new GetTransformStatsRequest("foo"); + Request request = TransformRequestConverters.getTransformStats(getStatsRequest); assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo/_stats")); @@ -200,27 +200,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); getStatsRequest.setPageParams(new PageParams(0, null)); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertThat(request.getParameters(), hasEntry("from", "0")); assertEquals(null, request.getParameters().get("size")); getStatsRequest.setPageParams(new PageParams(null, 50)); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertEquals(null, request.getParameters().get("from")); assertThat(request.getParameters(), hasEntry("size", "50")); getStatsRequest.setPageParams(new PageParams(0, 10)); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); getStatsRequest.setAllowNoMatch(false); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertThat(request.getParameters(), hasEntry("allow_no_match", "false")); } public void testGetDataFrameTransform() { - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("bar"); - Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + GetTransformRequest getRequest = new GetTransformRequest("bar"); + Request request = TransformRequestConverters.getTransform(getRequest); assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/bar")); @@ -230,27 +230,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); getRequest.setPageParams(new PageParams(0, null)); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertThat(request.getParameters(), hasEntry("from", "0")); assertEquals(null, request.getParameters().get("size")); getRequest.setPageParams(new PageParams(null, 50)); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertEquals(null, request.getParameters().get("from")); assertThat(request.getParameters(), hasEntry("size", "50")); getRequest.setPageParams(new PageParams(0, 10)); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); getRequest.setAllowNoMatch(false); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertThat(request.getParameters(), hasEntry("allow_no_match", "false")); } public void testGetDataFrameTransform_givenMulitpleIds() { - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("foo", "bar", "baz"); - Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + GetTransformRequest getRequest = new GetTransformRequest("foo", "bar", "baz"); + Request request = TransformRequestConverters.getTransform(getRequest); assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo,bar,baz")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 2b7f0037b74..c0939ef586e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -837,7 +837,7 @@ public class RestHighLevelClientTests extends ESTestCase { apiName.startsWith("security.") == false && apiName.startsWith("index_lifecycle.") == false && apiName.startsWith("ccr.") == false && - apiName.startsWith("data_frame") == false && + apiName.startsWith("transform.") == false && apiName.endsWith("freeze") == false && apiName.endsWith("reload_analyzers") == false && // IndicesClientIT.getIndexTemplate should be renamed "getTemplate" in version 8.0 when we diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java similarity index 59% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java index 9b952a27ba0..e66d7b66ff4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java @@ -28,24 +28,24 @@ import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; -import org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PreviewTransformResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; +import org.elasticsearch.client.transform.transforms.TransformIndexerStats; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.SourceConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; @@ -79,7 +79,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; -public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { +public class TransformIT extends ESRestHighLevelClientTestCase { private List transformsToClean = new ArrayList<>(); @@ -147,13 +147,13 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { @After public void cleanUpTransforms() throws Exception { for (String transformId : transformsToClean) { - highLevelClient().dataFrame().stopDataFrameTransform( - new StopDataFrameTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT); + highLevelClient().transform().stopTransform( + new StopTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT); } for (String transformId : transformsToClean) { - highLevelClient().dataFrame().deleteDataFrameTransform( - new DeleteDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + highLevelClient().transform().deleteTransform( + new DeleteTransformRequest(transformId), RequestOptions.DEFAULT); } transformsToClean = new ArrayList<>(); @@ -165,21 +165,21 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-crud"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); - AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform, - client::putDataFrameTransformAsync); + TransformClient client = highLevelClient().transform(); + AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform, + client::putTransformAsync); assertTrue(ack.isAcknowledged()); - ack = execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform, - client::deleteDataFrameTransformAsync); + ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform, + client::deleteTransformAsync); assertTrue(ack.isAcknowledged()); // The second delete should fail ElasticsearchStatusException deleteError = expectThrows(ElasticsearchStatusException.class, - () -> execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform, - client::deleteDataFrameTransformAsync)); + () -> execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform, + client::deleteTransformAsync)); assertThat(deleteError.getMessage(), containsString("Transform with id [test-crud] could not be found")); } @@ -188,25 +188,25 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-update"; - DataFrameTransformConfig transform = validDataFrameTransformConfigBuilder(id, sourceIndex, "pivot-dest") + TransformConfig transform = validDataFrameTransformConfigBuilder(id, sourceIndex, "pivot-dest") .setSyncConfig(new TimeSyncConfig("timefield", TimeValue.timeValueSeconds(60))) .build(); - DataFrameClient client = highLevelClient().dataFrame(); - AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform, - client::putDataFrameTransformAsync); + TransformClient client = highLevelClient().transform(); + AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform, + client::putTransformAsync); assertTrue(ack.isAcknowledged()); String updatedDescription = "my new description"; - DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate.builder().setDescription(updatedDescription).build(); - UpdateDataFrameTransformResponse response = execute( - new UpdateDataFrameTransformRequest(update, id), client::updateDataFrameTransform, - client::updateDataFrameTransformAsync); + TransformConfigUpdate update = TransformConfigUpdate.builder().setDescription(updatedDescription).build(); + UpdateTransformResponse response = execute( + new UpdateTransformRequest(update, id), client::updateTransform, + client::updateTransformAsync); assertThat(response.getTransformConfiguration().getDescription(), equalTo(updatedDescription)); ElasticsearchStatusException updateError = expectThrows(ElasticsearchStatusException.class, - () -> execute(new UpdateDataFrameTransformRequest(update, "missing-transform"), client::updateDataFrameTransform, - client::updateDataFrameTransformAsync)); + () -> execute(new UpdateTransformRequest(update, "missing-transform"), client::updateTransform, + client::updateTransformAsync)); assertThat(updateError.getMessage(), containsString("Transform with id [missing-transform] could not be found")); } @@ -214,15 +214,15 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { String sourceIndex = "missing-source-index"; String id = "test-with-defer"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); - PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(transform); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformClient client = highLevelClient().transform(); + PutTransformRequest request = new PutTransformRequest(transform); request.setDeferValidation(true); - AcknowledgedResponse ack = execute(request, client::putDataFrameTransform, client::putDataFrameTransformAsync); + AcknowledgedResponse ack = execute(request, client::putTransform, client::putTransformAsync); assertTrue(ack.isAcknowledged()); - ack = execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform, - client::deleteDataFrameTransformAsync); + ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform, + client::deleteTransformAsync); assertTrue(ack.isAcknowledged()); } @@ -231,14 +231,14 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-get"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); putTransform(transform); - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest(id); - GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + GetTransformRequest getRequest = new GetTransformRequest(id); + GetTransformResponse getResponse = execute(getRequest, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(1)); assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(0).getId()); @@ -248,40 +248,40 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { String sourceIndex = "transform-source"; createIndex(sourceIndex); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); - DataFrameTransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1"); + TransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1"); putTransform(transform); transform = validDataFrameTransformConfig("test-get-all-2", sourceIndex, "pivot-dest-2"); putTransform(transform); - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("_all"); - GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + GetTransformRequest getRequest = new GetTransformRequest("_all"); + GetTransformResponse getResponse = execute(getRequest, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(2)); assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(1).getId()); getRequest.setPageParams(new PageParams(0,1)); - getResponse = execute(getRequest, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + getResponse = execute(getRequest, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(1)); - GetDataFrameTransformRequest getMulitple = new GetDataFrameTransformRequest("test-get-all-1", "test-get-all-2"); - getResponse = execute(getMulitple, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + GetTransformRequest getMulitple = new GetTransformRequest("test-get-all-1", "test-get-all-2"); + getResponse = execute(getMulitple, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(2)); } public void testGetMissingTransform() { - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); ElasticsearchStatusException missingError = expectThrows(ElasticsearchStatusException.class, - () -> execute(new GetDataFrameTransformRequest("unknown"), client::getDataFrameTransform, - client::getDataFrameTransformAsync)); + () -> execute(new GetTransformRequest("unknown"), client::getTransform, + client::getTransformAsync)); assertThat(missingError.status(), equalTo(RestStatus.NOT_FOUND)); } @@ -290,39 +290,39 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-stop-start"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); putTransform(transform); - StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id); - StartDataFrameTransformResponse startResponse = - execute(startRequest, client::startDataFrameTransform, client::startDataFrameTransformAsync); + StartTransformRequest startRequest = new StartTransformRequest(id); + StartTransformResponse startResponse = + execute(startRequest, client::startTransform, client::startTransformAsync); assertTrue(startResponse.isAcknowledged()); assertThat(startResponse.getNodeFailures(), empty()); assertThat(startResponse.getTaskFailures(), empty()); - GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); + GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); assertThat(statsResponse.getTransformsStats(), hasSize(1)); - DataFrameTransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState(); + TransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState(); // Since we are non-continuous, the transform could auto-stop between being started earlier and us gathering the statistics - assertThat(taskState, oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING, - DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED)); + assertThat(taskState, oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING, + TransformStats.State.STOPPING, TransformStats.State.STOPPED)); - StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null); - StopDataFrameTransformResponse stopResponse = - execute(stopRequest, client::stopDataFrameTransform, client::stopDataFrameTransformAsync); + StopTransformRequest stopRequest = new StopTransformRequest(id, Boolean.TRUE, null); + StopTransformResponse stopResponse = + execute(stopRequest, client::stopTransform, client::stopTransformAsync); assertTrue(stopResponse.isAcknowledged()); assertThat(stopResponse.getNodeFailures(), empty()); assertThat(stopResponse.getTaskFailures(), empty()); // Calling stop with wait_for_completion assures that we will be in the `STOPPED` state for the transform task - statsResponse = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); + statsResponse = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); taskState = statsResponse.getTransformsStats().get(0).getState(); - assertThat(taskState, is(DataFrameTransformStats.State.STOPPED)); + assertThat(taskState, is(TransformStats.State.STOPPED)); } @SuppressWarnings("unchecked") @@ -331,12 +331,12 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); indexData(sourceIndex); - DataFrameTransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null); + TransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null); - DataFrameClient client = highLevelClient().dataFrame(); - PreviewDataFrameTransformResponse preview = execute(new PreviewDataFrameTransformRequest(transform), - client::previewDataFrameTransform, - client::previewDataFrameTransformAsync); + TransformClient client = highLevelClient().transform(); + PreviewTransformResponse preview = execute(new PreviewTransformRequest(transform), + client::previewTransform, + client::previewTransformAsync); List> docs = preview.getDocs(); assertThat(docs, hasSize(2)); @@ -355,11 +355,11 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { assertThat(fields.get("avg_rating"), equalTo(Collections.singletonMap("type", "double"))); } - private DataFrameTransformConfig validDataFrameTransformConfig(String id, String source, String destination) { + private TransformConfig validDataFrameTransformConfig(String id, String source, String destination) { return validDataFrameTransformConfigBuilder(id, source, destination).build(); } - private DataFrameTransformConfig.Builder validDataFrameTransformConfigBuilder(String id, String source, String destination) { + private TransformConfig.Builder validDataFrameTransformConfigBuilder(String id, String source, String destination) { GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build(); AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder(); @@ -368,7 +368,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { DestConfig destConfig = (destination != null) ? DestConfig.builder().setIndex(destination).build() : null; - return DataFrameTransformConfig.builder() + return TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder().setIndex(source).setQuery(new MatchAllQueryBuilder()).build()) .setDest(destConfig) @@ -389,7 +389,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregations(aggBuilder).build(); String id = "test-get-stats"; - DataFrameTransformConfig transform = DataFrameTransformConfig.builder() + TransformConfig transform = TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder().setIndex(sourceIndex).setQuery(new MatchAllQueryBuilder()).build()) .setDest(DestConfig.builder().setIndex("pivot-dest").build()) @@ -397,17 +397,17 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { .setDescription("transform for testing stats") .build(); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); putTransform(transform); - GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); + GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); assertEquals(1, statsResponse.getTransformsStats().size()); - DataFrameTransformStats stats = statsResponse.getTransformsStats().get(0); - assertEquals(DataFrameTransformStats.State.STOPPED, stats.getState()); + TransformStats stats = statsResponse.getTransformsStats().get(0); + assertEquals(TransformStats.State.STOPPED, stats.getState()); - DataFrameIndexerTransformStats zeroIndexerStats = new DataFrameIndexerTransformStats( + TransformIndexerStats zeroIndexerStats = new TransformIndexerStats( 0L, 0L, 0L, @@ -424,25 +424,25 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { assertEquals(zeroIndexerStats, stats.getIndexerStats()); // start the transform - StartDataFrameTransformResponse startTransformResponse = execute(new StartDataFrameTransformRequest(id), - client::startDataFrameTransform, - client::startDataFrameTransformAsync); + StartTransformResponse startTransformResponse = execute(new StartTransformRequest(id), + client::startTransform, + client::startTransformAsync); assertThat(startTransformResponse.isAcknowledged(), is(true)); assertBusy(() -> { - GetDataFrameTransformStatsResponse response = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); - DataFrameTransformStats stateAndStats = response.getTransformsStats().get(0); + GetTransformStatsResponse response = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); + TransformStats stateAndStats = response.getTransformsStats().get(0); assertNotEquals(zeroIndexerStats, stateAndStats.getIndexerStats()); - assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING, - DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED)); + assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING, + TransformStats.State.STOPPING, TransformStats.State.STOPPED)); assertThat(stateAndStats.getReason(), is(nullValue())); }); } - void putTransform(DataFrameTransformConfig config) throws IOException { - DataFrameClient client = highLevelClient().dataFrame(); - AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(config), client::putDataFrameTransform, - client::putDataFrameTransformAsync); + void putTransform(TransformConfig config) throws IOException { + TransformClient client = highLevelClient().transform(); + AcknowledgedResponse ack = execute(new PutTransformRequest(config), client::putTransform, + client::putTransformAsync); assertTrue(ack.isAcknowledged()); transformsToClean.add(config.getId()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TransformDocumentationIT.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TransformDocumentationIT.java index 723594bf498..537ad29efa4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TransformDocumentationIT.java @@ -28,25 +28,25 @@ import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; -import org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformProgress; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PreviewTransformResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; +import org.elasticsearch.client.transform.transforms.TransformIndexerStats; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformProgress; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.NodeAttributes; import org.elasticsearch.client.transform.transforms.QueryConfig; @@ -73,20 +73,20 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTestCase { +public class TransformDocumentationIT extends ESRestHighLevelClientTestCase { private List transformsToClean = new ArrayList<>(); @After public void cleanUpTransforms() throws Exception { for (String transformId : transformsToClean) { - highLevelClient().dataFrame().stopDataFrameTransform( - new StopDataFrameTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT); + highLevelClient().transform().stopTransform( + new StopTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT); } for (String transformId : transformsToClean) { - highLevelClient().dataFrame().deleteDataFrameTransform( - new DeleteDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + highLevelClient().transform().deleteTransform( + new DeleteTransformRequest(transformId), RequestOptions.DEFAULT); } transformsToClean = new ArrayList<>(); @@ -116,7 +116,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest assertTrue(response.isAcknowledged()); } - public void testPutDataFrameTransform() throws IOException, InterruptedException { + public void testPutTransform() throws IOException, InterruptedException { createIndex("source-index"); RestHighLevelClient client = highLevelClient(); @@ -154,7 +154,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .build(); // end::put-transform-pivot-config // tag::put-transform-config - DataFrameTransformConfig transformConfig = DataFrameTransformConfig + TransformConfig transformConfig = TransformConfig .builder() .setId("reviewer-avg-rating") // <1> .setSource(sourceConfig) // <2> @@ -167,14 +167,14 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest { // tag::put-transform-request - PutDataFrameTransformRequest request = - new PutDataFrameTransformRequest(transformConfig); // <1> + PutTransformRequest request = + new PutTransformRequest(transformConfig); // <1> request.setDeferValidation(false); // <2> // end::put-transform-request // tag::put-transform-execute AcknowledgedResponse response = - client.dataFrame().putDataFrameTransform( + client.transform().putTransform( request, RequestOptions.DEFAULT); // end::put-transform-execute transformsToClean.add(request.getConfig().getId()); @@ -182,13 +182,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest assertTrue(response.isAcknowledged()); } { - DataFrameTransformConfig configWithDifferentId = DataFrameTransformConfig.builder() + TransformConfig configWithDifferentId = TransformConfig.builder() .setId("reviewer-avg-rating2") .setSource(transformConfig.getSource()) .setDest(transformConfig.getDestination()) .setPivotConfig(transformConfig.getPivotConfig()) .build(); - PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(configWithDifferentId); + PutTransformRequest request = new PutTransformRequest(configWithDifferentId); // tag::put-transform-execute-listener ActionListener listener = @@ -210,7 +210,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::put-transform-execute-async - client.dataFrame().putDataFrameTransformAsync( + client.transform().putTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::put-transform-execute-async @@ -231,7 +231,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest AggregationConfig aggConfig = new AggregationConfig(aggBuilder); PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder() + TransformConfig transformConfig = TransformConfig.builder() .setId("my-transform-to-update") .setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build()) .setDest(DestConfig.builder().setIndex("pivot-dest").build()) @@ -239,11 +239,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setSyncConfig(new TimeSyncConfig("time-field", TimeValue.timeValueSeconds(120))) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT); transformsToClean.add(transformConfig.getId()); // tag::update-transform-config - DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate + TransformConfigUpdate update = TransformConfigUpdate .builder() .setSource(SourceConfig.builder() .setIndex("source-data") @@ -260,32 +260,32 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest { // tag::update-transform-request - UpdateDataFrameTransformRequest request = - new UpdateDataFrameTransformRequest( + UpdateTransformRequest request = + new UpdateTransformRequest( update, // <1> "my-transform-to-update"); // <2> request.setDeferValidation(false); // <3> // end::update-transform-request // tag::update-transform-execute - UpdateDataFrameTransformResponse response = - client.dataFrame().updateDataFrameTransform(request, + UpdateTransformResponse response = + client.transform().updateTransform(request, RequestOptions.DEFAULT); - DataFrameTransformConfig updatedConfig = + TransformConfig updatedConfig = response.getTransformConfiguration(); // end::update-transform-execute assertThat(updatedConfig.getDescription(), equalTo("This is my updated transform")); } { - UpdateDataFrameTransformRequest request = new UpdateDataFrameTransformRequest(update, + UpdateTransformRequest request = new UpdateTransformRequest(update, "my-transform-to-update"); // tag::update-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(UpdateDataFrameTransformResponse response) { + public void onResponse(UpdateTransformResponse response) { // <1> } @@ -301,7 +301,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::update-transform-execute-async - client.dataFrame().updateDataFrameTransformAsync( + client.transform().updateTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::update-transform-execute-async @@ -322,20 +322,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest AggregationConfig aggConfig = new AggregationConfig(aggBuilder); PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder() + TransformConfig transformConfig = TransformConfig.builder() .setId("mega-transform") .setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build()) .setDest(DestConfig.builder().setIndex("pivot-dest").build()) .setPivotConfig(pivotConfig) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT); transformsToClean.add(transformConfig.getId()); { // tag::start-transform-request - StartDataFrameTransformRequest request = - new StartDataFrameTransformRequest("mega-transform"); // <1> + StartTransformRequest request = + new StartTransformRequest("mega-transform"); // <1> // end::start-transform-request // tag::start-transform-request-options @@ -343,8 +343,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest // end::start-transform-request-options // tag::start-transform-execute - StartDataFrameTransformResponse response = - client.dataFrame().startDataFrameTransform( + StartTransformResponse response = + client.transform().startTransform( request, RequestOptions.DEFAULT); // end::start-transform-execute @@ -352,8 +352,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::stop-transform-request - StopDataFrameTransformRequest request = - new StopDataFrameTransformRequest("mega-transform"); // <1> + StopTransformRequest request = + new StopTransformRequest("mega-transform"); // <1> // end::stop-transform-request // tag::stop-transform-request-options @@ -363,8 +363,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest // end::stop-transform-request-options // tag::stop-transform-execute - StopDataFrameTransformResponse response = - client.dataFrame().stopDataFrameTransform( + StopTransformResponse response = + client.transform().stopTransform( request, RequestOptions.DEFAULT); // end::stop-transform-execute @@ -372,11 +372,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::start-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override public void onResponse( - StartDataFrameTransformResponse response) { + StartTransformResponse response) { // <1> } @@ -391,9 +391,9 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - StartDataFrameTransformRequest request = new StartDataFrameTransformRequest("mega-transform"); + StartTransformRequest request = new StartTransformRequest("mega-transform"); // tag::start-transform-execute-async - client.dataFrame().startDataFrameTransformAsync( + client.transform().startTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::start-transform-execute-async @@ -401,11 +401,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::stop-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override public void onResponse( - StopDataFrameTransformResponse response) { + StopTransformResponse response) { // <1> } @@ -420,9 +420,9 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - StopDataFrameTransformRequest request = new StopDataFrameTransformRequest("mega-transform"); + StopTransformRequest request = new StopTransformRequest("mega-transform"); // tag::stop-transform-execute-async - client.dataFrame().stopDataFrameTransformAsync( + client.transform().stopTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::stop-transform-execute-async @@ -442,7 +442,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest AggregationConfig aggConfig = new AggregationConfig(aggBuilder); PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig transformConfig1 = DataFrameTransformConfig.builder() + TransformConfig transformConfig1 = TransformConfig.builder() .setId("mega-transform") .setSource(SourceConfig.builder() .setIndex("source-data") @@ -451,7 +451,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setDest(DestConfig.builder().setIndex("pivot-dest").build()) .setPivotConfig(pivotConfig) .build(); - DataFrameTransformConfig transformConfig2 = DataFrameTransformConfig.builder() + TransformConfig transformConfig2 = TransformConfig.builder() .setId("mega-transform2") .setSource(SourceConfig.builder() .setIndex("source-data") @@ -461,20 +461,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setPivotConfig(pivotConfig) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig1), RequestOptions.DEFAULT); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig2), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig1), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig2), RequestOptions.DEFAULT); { // tag::delete-transform-request - DeleteDataFrameTransformRequest request = - new DeleteDataFrameTransformRequest("mega-transform"); // <1> + DeleteTransformRequest request = + new DeleteTransformRequest("mega-transform"); // <1> request.setForce(false); // <2> // end::delete-transform-request // tag::delete-transform-execute AcknowledgedResponse response = - client.dataFrame() - .deleteDataFrameTransform(request, RequestOptions.DEFAULT); + client.transform() + .deleteTransform(request, RequestOptions.DEFAULT); // end::delete-transform-execute assertTrue(response.isAcknowledged()); @@ -499,10 +499,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - DeleteDataFrameTransformRequest request = new DeleteDataFrameTransformRequest("mega-transform2"); + DeleteTransformRequest request = new DeleteTransformRequest("mega-transform2"); // tag::delete-transform-execute-async - client.dataFrame().deleteDataFrameTransformAsync( + client.transform().deleteTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::delete-transform-execute-async @@ -524,23 +524,23 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); // tag::preview-transform-request - DataFrameTransformConfig transformConfig = - DataFrameTransformConfig.forPreview( + TransformConfig transformConfig = + TransformConfig.forPreview( SourceConfig.builder() .setIndex("source-data") .setQueryConfig(queryConfig) .build(), // <1> pivotConfig); // <2> - PreviewDataFrameTransformRequest request = - new PreviewDataFrameTransformRequest(transformConfig); // <3> + PreviewTransformRequest request = + new PreviewTransformRequest(transformConfig); // <3> // end::preview-transform-request { // tag::preview-transform-execute - PreviewDataFrameTransformResponse response = - client.dataFrame() - .previewDataFrameTransform(request, RequestOptions.DEFAULT); + PreviewTransformResponse response = + client.transform() + .previewTransform(request, RequestOptions.DEFAULT); // end::preview-transform-execute assertNotNull(response.getDocs()); @@ -548,10 +548,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::preview-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(PreviewDataFrameTransformResponse response) { + public void onResponse(PreviewTransformResponse response) { // <1> } @@ -567,7 +567,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::preview-transform-execute-async - client.dataFrame().previewDataFrameTransformAsync( + client.transform().previewTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::preview-transform-execute-async @@ -588,7 +588,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); String id = "statisitcal-transform"; - DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder() + TransformConfig transformConfig = TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder() .setIndex("source-data") @@ -597,12 +597,12 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setDest(DestConfig.builder().setIndex("pivot-dest").build()) .setPivotConfig(pivotConfig) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT); transformsToClean.add(id); // tag::get-transform-stats-request - GetDataFrameTransformStatsRequest request = - new GetDataFrameTransformStatsRequest(id); // <1> + GetTransformStatsRequest request = + new GetTransformStatsRequest(id); // <1> // end::get-transform-stats-request // tag::get-transform-stats-request-options @@ -612,38 +612,38 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest { // tag::get-transform-stats-execute - GetDataFrameTransformStatsResponse response = - client.dataFrame() - .getDataFrameTransformStats(request, RequestOptions.DEFAULT); + GetTransformStatsResponse response = + client.transform() + .getTransformStats(request, RequestOptions.DEFAULT); // end::get-transform-stats-execute assertThat(response.getTransformsStats(), hasSize(1)); // tag::get-transform-stats-response - DataFrameTransformStats stats = + TransformStats stats = response.getTransformsStats().get(0); // <1> - DataFrameTransformStats.State state = + TransformStats.State state = stats.getState(); // <2> - DataFrameIndexerTransformStats indexerStats = + TransformIndexerStats indexerStats = stats.getIndexerStats(); // <3> - DataFrameTransformProgress progress = + TransformProgress progress = stats.getCheckpointingInfo() .getNext().getCheckpointProgress(); // <4> NodeAttributes node = stats.getNode(); // <5> // end::get-transform-stats-response - assertEquals(DataFrameTransformStats.State.STOPPED, state); + assertEquals(TransformStats.State.STOPPED, state); assertNotNull(indexerStats); assertNull(progress); } { // tag::get-transform-stats-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override public void onResponse( - GetDataFrameTransformStatsResponse response) { + GetTransformStatsResponse response) { // <1> } @@ -659,7 +659,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::get-transform-stats-execute-async - client.dataFrame().getDataFrameTransformStatsAsync( + client.transform().getTransformStatsAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::get-transform-stats-execute-async @@ -679,7 +679,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig putTransformConfig = DataFrameTransformConfig.builder() + TransformConfig putTransformConfig = TransformConfig.builder() .setId("mega-transform") .setSource(SourceConfig.builder() .setIndex("source-data") @@ -690,13 +690,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .build(); RestHighLevelClient client = highLevelClient(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(putTransformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(putTransformConfig), RequestOptions.DEFAULT); transformsToClean.add(putTransformConfig.getId()); { // tag::get-transform-request - GetDataFrameTransformRequest request = - new GetDataFrameTransformRequest("mega-transform"); // <1> + GetTransformRequest request = + new GetTransformRequest("mega-transform"); // <1> // end::get-transform-request // tag::get-transform-request-options @@ -705,13 +705,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest // end::get-transform-request-options // tag::get-transform-execute - GetDataFrameTransformResponse response = - client.dataFrame() - .getDataFrameTransform(request, RequestOptions.DEFAULT); + GetTransformResponse response = + client.transform() + .getTransform(request, RequestOptions.DEFAULT); // end::get-transform-execute // tag::get-transform-response - List transformConfigs = + List transformConfigs = response.getTransformConfigurations(); // end::get-transform-response @@ -719,10 +719,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::get-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(GetDataFrameTransformResponse response) { + public void onResponse(GetTransformResponse response) { // <1> } @@ -737,10 +737,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - GetDataFrameTransformRequest request = new GetDataFrameTransformRequest("mega-transform"); + GetTransformRequest request = new GetTransformRequest("mega-transform"); // tag::get-transform-execute-async - client.dataFrame().getDataFrameTransformAsync( + client.transform().getTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::get-transform-execute-async diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteTransformRequestTests.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteTransformRequestTests.java index dd20d513970..bf70c426936 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteTransformRequestTests.java @@ -23,11 +23,11 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; -public class DeleteDataFrameTransformRequestTests extends ESTestCase { +public class DeleteTransformRequestTests extends ESTestCase { public void testValidate() { - assertFalse(new DeleteDataFrameTransformRequest("valid-id").validate().isPresent()); - assertThat(new DeleteDataFrameTransformRequest(null).validate().get().getMessage(), - containsString("data frame transform id must not be null")); + assertFalse(new DeleteTransformRequest("valid-id").validate().isPresent()); + assertThat(new DeleteTransformRequest(null).validate().get().getMessage(), + containsString("transform id must not be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformRequestTests.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformRequestTests.java index 044d880f8e8..d9d9702219a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformRequestTests.java @@ -23,10 +23,10 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; -public class GetDataFrameTransformRequestTests extends ESTestCase { +public class GetTransformRequestTests extends ESTestCase { public void testValidate() { - assertFalse(new GetDataFrameTransformRequest("valid-id").validate().isPresent()); - assertThat(new GetDataFrameTransformRequest(new String[0]).validate().get().getMessage(), - containsString("data frame transform id must not be null")); + assertFalse(new GetTransformRequest("valid-id").validate().isPresent()); + assertThat(new GetTransformRequest(new String[0]).validate().get().getMessage(), + containsString("transform id must not be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformResponseTests.java similarity index 71% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformResponseTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformResponseTests.java index 90c2c286077..3959114cd95 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformResponseTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,32 +35,32 @@ import java.util.List; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class GetDataFrameTransformResponseTests extends ESTestCase { +public class GetTransformResponseTests extends ESTestCase { public void testXContentParser() throws IOException { xContentTester(this::createParser, - GetDataFrameTransformResponseTests::createTestInstance, - GetDataFrameTransformResponseTests::toXContent, - GetDataFrameTransformResponse::fromXContent) + GetTransformResponseTests::createTestInstance, + GetTransformResponseTests::toXContent, + GetTransformResponse::fromXContent) .supportsUnknownFields(false) .test(); } - private static GetDataFrameTransformResponse createTestInstance() { + private static GetTransformResponse createTestInstance() { int numTransforms = randomIntBetween(0, 3); - List transforms = new ArrayList<>(); + List transforms = new ArrayList<>(); for (int i=0; i invalidIds = Arrays.asList(generateRandomStringArray(5, 6, false, false)); - invalidTransforms = new GetDataFrameTransformResponse.InvalidTransforms(invalidIds); + invalidTransforms = new GetTransformResponse.InvalidTransforms(invalidIds); } - return new GetDataFrameTransformResponse(transforms, transforms.size() + 10, invalidTransforms); + return new GetTransformResponse(transforms, transforms.size() + 10, invalidTransforms); } - private static void toXContent(GetDataFrameTransformResponse response, XContentBuilder builder) throws IOException { + private static void toXContent(GetTransformResponse response, XContentBuilder builder) throws IOException { builder.startObject(); { builder.field("count", response.getCount()); @@ -79,7 +79,7 @@ public class GetDataFrameTransformResponseTests extends ESTestCase { protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsRequestTests.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsRequestTests.java index 68fd9ff8853..cb99d5d6081 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsRequestTests.java @@ -23,10 +23,10 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; -public class GetDataFrameTransformStatsRequestTests extends ESTestCase { +public class GetTransformStatsRequestTests extends ESTestCase { public void testValidate() { - assertFalse(new GetDataFrameTransformStatsRequest("valid-id").validate().isPresent()); - assertThat(new GetDataFrameTransformStatsRequest(null).validate().get().getMessage(), - containsString("data frame transform id must not be null")); + assertFalse(new GetTransformStatsRequest("valid-id").validate().isPresent()); + assertThat(new GetTransformStatsRequest(null).validate().get().getMessage(), + containsString("transform id must not be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsResponseTests.java similarity index 70% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponseTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsResponseTests.java index d903215284a..d87d0c861ba 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsResponseTests.java @@ -21,8 +21,8 @@ package org.elasticsearch.client.transform; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStatsTests; +import org.elasticsearch.client.transform.transforms.TransformStats; +import org.elasticsearch.client.transform.transforms.TransformStatsTests; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; @@ -32,25 +32,25 @@ import java.util.List; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class GetDataFrameTransformStatsResponseTests extends ESTestCase { +public class GetTransformStatsResponseTests extends ESTestCase { public void testXContentParser() throws IOException { xContentTester(this::createParser, - GetDataFrameTransformStatsResponseTests::createTestInstance, - GetDataFrameTransformStatsResponseTests::toXContent, - GetDataFrameTransformStatsResponse::fromXContent) - .assertEqualsConsumer(GetDataFrameTransformStatsResponseTests::assertEqualInstances) + GetTransformStatsResponseTests::createTestInstance, + GetTransformStatsResponseTests::toXContent, + GetTransformStatsResponse::fromXContent) + .assertEqualsConsumer(GetTransformStatsResponseTests::assertEqualInstances) .assertToXContentEquivalence(false) .supportsUnknownFields(true) .randomFieldsExcludeFilter(path -> path.isEmpty() == false) .test(); } - private static GetDataFrameTransformStatsResponse createTestInstance() { + private static GetTransformStatsResponse createTestInstance() { int count = randomIntBetween(1, 3); - List stats = new ArrayList<>(); + List stats = new ArrayList<>(); for (int i=0; i taskFailures = null; @@ -66,19 +66,19 @@ public class GetDataFrameTransformStatsResponseTests extends ESTestCase { nodeFailures = new ArrayList<>(); int numNodeFailures = randomIntBetween(1, 4); for (int i=0; i { +public class PreviewTransformRequestTests extends AbstractXContentTestCase { @Override - protected PreviewDataFrameTransformRequest createTestInstance() { - return new PreviewDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + protected PreviewTransformRequest createTestInstance() { + return new PreviewTransformRequest(TransformConfigTests.randomTransformConfig()); } @Override - protected PreviewDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException { - return new PreviewDataFrameTransformRequest(DataFrameTransformConfig.fromXContent(parser)); + protected PreviewTransformRequest doParseInstance(XContentParser parser) throws IOException { + return new PreviewTransformRequest(TransformConfig.fromXContent(parser)); } @Override @@ -57,27 +57,27 @@ public class PreviewDataFrameTransformRequestTests extends AbstractXContentTestC protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } public void testValidate() { - assertFalse(new PreviewDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig()) + assertFalse(new PreviewTransformRequest(TransformConfigTests.randomTransformConfig()) .validate().isPresent()); - assertThat(new PreviewDataFrameTransformRequest(null).validate().get().getMessage(), - containsString("preview requires a non-null data frame config")); + assertThat(new PreviewTransformRequest(null).validate().get().getMessage(), + containsString("preview requires a non-null transform config")); // null id and destination is valid - DataFrameTransformConfig config = DataFrameTransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig()); + TransformConfig config = TransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig()); - assertFalse(new PreviewDataFrameTransformRequest(config).validate().isPresent()); + assertFalse(new PreviewTransformRequest(config).validate().isPresent()); // null source is not valid - config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); + config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); - Optional error = new PreviewDataFrameTransformRequest(config).validate(); + Optional error = new PreviewTransformRequest(config).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("data frame transform source cannot be null")); + assertThat(error.get().getMessage(), containsString("transform source cannot be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java similarity index 86% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponseTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java index c8f852c68c9..1ca3a49b5ec 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java @@ -31,19 +31,19 @@ import java.util.Map; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class PreviewDataFrameTransformResponseTests extends ESTestCase { +public class PreviewTransformResponseTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, this::createTestInstance, this::toXContent, - PreviewDataFrameTransformResponse::fromXContent) + PreviewTransformResponse::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(path -> path.isEmpty() == false) .test(); } - private PreviewDataFrameTransformResponse createTestInstance() { + private PreviewTransformResponse createTestInstance() { int numDocs = randomIntBetween(5, 10); List> docs = new ArrayList<>(numDocs); for (int i=0; i doc : response.getDocs()) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutTransformRequestTests.java similarity index 60% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutTransformRequestTests.java index 19af4bfe5a0..146ca990c7e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutTransformRequestTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -36,32 +36,32 @@ import java.util.Optional; import static org.hamcrest.Matchers.containsString; -public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase { +public class PutTransformRequestTests extends AbstractXContentTestCase { public void testValidate() { assertFalse(createTestInstance().validate().isPresent()); - DataFrameTransformConfig config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); + TransformConfig config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); - Optional error = new PutDataFrameTransformRequest(config).validate(); + Optional error = new PutTransformRequest(config).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("data frame transform id cannot be null")); - assertThat(error.get().getMessage(), containsString("data frame transform source cannot be null")); - assertThat(error.get().getMessage(), containsString("data frame transform destination cannot be null")); + assertThat(error.get().getMessage(), containsString("transform id cannot be null")); + assertThat(error.get().getMessage(), containsString("transform source cannot be null")); + assertThat(error.get().getMessage(), containsString("transform destination cannot be null")); - error = new PutDataFrameTransformRequest(null).validate(); + error = new PutTransformRequest(null).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("put requires a non-null data frame config")); + assertThat(error.get().getMessage(), containsString("put requires a non-null transform config")); } @Override - protected PutDataFrameTransformRequest createTestInstance() { - return new PutDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + protected PutTransformRequest createTestInstance() { + return new PutTransformRequest(TransformConfigTests.randomTransformConfig()); } @Override - protected PutDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException { - return new PutDataFrameTransformRequest(DataFrameTransformConfig.fromXContent(parser)); + protected PutTransformRequest doParseInstance(XContentParser parser) throws IOException { + return new PutTransformRequest(TransformConfig.fromXContent(parser)); } @Override @@ -73,7 +73,7 @@ public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase< protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java index f4950fc057a..1ba5f610912 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java @@ -28,14 +28,14 @@ import static org.hamcrest.Matchers.containsString; public class StartDataFrameTransformRequestTests extends ESTestCase { public void testValidate_givenNullId() { - StartDataFrameTransformRequest request = new StartDataFrameTransformRequest(null, null); + StartTransformRequest request = new StartTransformRequest(null, null); Optional validate = request.validate(); assertTrue(validate.isPresent()); - assertThat(validate.get().getMessage(), containsString("data frame transform id must not be null")); + assertThat(validate.get().getMessage(), containsString("transform id must not be null")); } public void testValidate_givenValid() { - StartDataFrameTransformRequest request = new StartDataFrameTransformRequest("foo", null); + StartTransformRequest request = new StartTransformRequest("foo", null); Optional validate = request.validate(); assertFalse(validate.isPresent()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopTransformRequestTests.java similarity index 82% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopTransformRequestTests.java index 5b28983c086..155da314582 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopTransformRequestTests.java @@ -26,16 +26,16 @@ import java.util.Optional; import static org.hamcrest.Matchers.containsString; -public class StopDataFrameTransformRequestTests extends ESTestCase { +public class StopTransformRequestTests extends ESTestCase { public void testValidate_givenNullId() { - StopDataFrameTransformRequest request = new StopDataFrameTransformRequest(null); + StopTransformRequest request = new StopTransformRequest(null); Optional validate = request.validate(); assertTrue(validate.isPresent()); - assertThat(validate.get().getMessage(), containsString("data frame transform id must not be null")); + assertThat(validate.get().getMessage(), containsString("transform id must not be null")); } public void testValidate_givenValid() { - StopDataFrameTransformRequest request = new StopDataFrameTransformRequest("foo"); + StopTransformRequest request = new StopTransformRequest("foo"); Optional validate = request.validate(); assertFalse(validate.isPresent()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java index a2b20d4516d..9eccbdc9ab9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -38,17 +38,17 @@ public class UpdateDataFrameTransformResponseTests extends ESTestCase { xContentTester(this::createParser, UpdateDataFrameTransformResponseTests::createTestInstance, UpdateDataFrameTransformResponseTests::toXContent, - UpdateDataFrameTransformResponse::fromXContent) + UpdateTransformResponse::fromXContent) .assertToXContentEquivalence(false) .supportsUnknownFields(false) .test(); } - private static UpdateDataFrameTransformResponse createTestInstance() { - return new UpdateDataFrameTransformResponse(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + private static UpdateTransformResponse createTestInstance() { + return new UpdateTransformResponse(TransformConfigTests.randomTransformConfig()); } - private static void toXContent(UpdateDataFrameTransformResponse response, XContentBuilder builder) throws IOException { + private static void toXContent(UpdateTransformResponse response, XContentBuilder builder) throws IOException { response.getTransformConfiguration().toXContent(builder, null); } @@ -56,7 +56,7 @@ public class UpdateDataFrameTransformResponseTests extends ESTestCase { protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateTransformRequestTests.java similarity index 63% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateTransformRequestTests.java index 7944b486c9e..77a468624a2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateTransformRequestTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -32,34 +32,34 @@ import java.util.Collections; import java.util.List; import java.util.Optional; -import static org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdateTests.randomDataFrameTransformConfigUpdate; +import static org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests.randomTransformConfigUpdate; import static org.hamcrest.Matchers.containsString; -public class UpdateDataFrameTransformRequestTests extends AbstractXContentTestCase { +public class UpdateTransformRequestTests extends AbstractXContentTestCase { public void testValidate() { assertFalse(createTestInstance().validate().isPresent()); - DataFrameTransformConfigUpdate config = randomDataFrameTransformConfigUpdate(); + TransformConfigUpdate config = randomTransformConfigUpdate(); - Optional error = new UpdateDataFrameTransformRequest(config, null).validate(); + Optional error = new UpdateTransformRequest(config, null).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("data frame transform id cannot be null")); + assertThat(error.get().getMessage(), containsString("transform id cannot be null")); - error = new UpdateDataFrameTransformRequest(null, "123").validate(); + error = new UpdateTransformRequest(null, "123").validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("put requires a non-null data frame config")); + assertThat(error.get().getMessage(), containsString("put requires a non-null transform config")); } private final String transformId = randomAlphaOfLength(10); @Override - protected UpdateDataFrameTransformRequest createTestInstance() { - return new UpdateDataFrameTransformRequest(randomDataFrameTransformConfigUpdate(), transformId); + protected UpdateTransformRequest createTestInstance() { + return new UpdateTransformRequest(randomTransformConfigUpdate(), transformId); } @Override - protected UpdateDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException { - return new UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate.fromXContent(parser), transformId); + protected UpdateTransformRequest doParseInstance(XContentParser parser) throws IOException { + return new UpdateTransformRequest(TransformConfigUpdate.fromXContent(parser), transformId); } @Override @@ -71,7 +71,7 @@ public class UpdateDataFrameTransformRequestTests extends AbstractXContentTestCa protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStatsTests.java similarity index 51% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStatsTests.java index d03651170f7..0095302919e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStatsTests.java @@ -26,38 +26,38 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformCheckpointStatsTests extends ESTestCase { +public class TransformCheckpointStatsTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformCheckpointStatsTests::randomDataFrameTransformCheckpointStats, - DataFrameTransformCheckpointStatsTests::toXContent, - DataFrameTransformCheckpointStats::fromXContent) + TransformCheckpointStatsTests::randomTransformCheckpointStats, + TransformCheckpointStatsTests::toXContent, + TransformCheckpointStats::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(field -> field.startsWith("position")) .test(); } - public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() { - return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000), - randomBoolean() ? null : DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(), - randomBoolean() ? null : DataFrameTransformProgressTests.randomInstance(), + public static TransformCheckpointStats randomTransformCheckpointStats() { + return new TransformCheckpointStats(randomLongBetween(1, 1_000_000), + randomBoolean() ? null : TransformIndexerPositionTests.randomTransformIndexerPosition(), + randomBoolean() ? null : TransformProgressTests.randomInstance(), randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000)); } - public static void toXContent(DataFrameTransformCheckpointStats stats, XContentBuilder builder) throws IOException { + public static void toXContent(TransformCheckpointStats stats, XContentBuilder builder) throws IOException { builder.startObject(); - builder.field(DataFrameTransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint()); + builder.field(TransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint()); if (stats.getPosition() != null) { - builder.field(DataFrameTransformCheckpointStats.POSITION.getPreferredName()); - DataFrameIndexerPositionTests.toXContent(stats.getPosition(), builder); + builder.field(TransformCheckpointStats.POSITION.getPreferredName()); + TransformIndexerPositionTests.toXContent(stats.getPosition(), builder); } if (stats.getCheckpointProgress() != null) { - builder.field(DataFrameTransformCheckpointStats.CHECKPOINT_PROGRESS.getPreferredName()); - DataFrameTransformProgressTests.toXContent(stats.getCheckpointProgress(), builder); + builder.field(TransformCheckpointStats.CHECKPOINT_PROGRESS.getPreferredName()); + TransformProgressTests.toXContent(stats.getCheckpointProgress(), builder); } - builder.field(DataFrameTransformCheckpointStats.TIMESTAMP_MILLIS.getPreferredName(), stats.getTimestampMillis()); - builder.field(DataFrameTransformCheckpointStats.TIME_UPPER_BOUND_MILLIS.getPreferredName(), stats.getTimeUpperBoundMillis()); + builder.field(TransformCheckpointStats.TIMESTAMP_MILLIS.getPreferredName(), stats.getTimestampMillis()); + builder.field(TransformCheckpointStats.TIME_UPPER_BOUND_MILLIS.getPreferredName(), stats.getTimeUpperBoundMillis()); builder.endObject(); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfoTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfoTests.java similarity index 54% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfoTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfoTests.java index 2ec042a4a6a..326ecd73a15 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfoTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfoTests.java @@ -27,38 +27,38 @@ import java.time.Instant; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformCheckpointingInfoTests extends ESTestCase { +public class TransformCheckpointingInfoTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformCheckpointingInfoTests::randomDataFrameTransformCheckpointingInfo, - DataFrameTransformCheckpointingInfoTests::toXContent, - DataFrameTransformCheckpointingInfo::fromXContent) + TransformCheckpointingInfoTests::randomTransformCheckpointingInfo, + TransformCheckpointingInfoTests::toXContent, + TransformCheckpointingInfo::fromXContent) .supportsUnknownFields(false) .test(); } - public static DataFrameTransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() { - return new DataFrameTransformCheckpointingInfo( - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), + public static TransformCheckpointingInfo randomTransformCheckpointingInfo() { + return new TransformCheckpointingInfo( + TransformCheckpointStatsTests.randomTransformCheckpointStats(), + TransformCheckpointStatsTests.randomTransformCheckpointStats(), randomLongBetween(0, 10000), randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong())); } - public static void toXContent(DataFrameTransformCheckpointingInfo info, XContentBuilder builder) throws IOException { + public static void toXContent(TransformCheckpointingInfo info, XContentBuilder builder) throws IOException { builder.startObject(); if (info.getLast().getTimestampMillis() > 0) { - builder.field(DataFrameTransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName()); - DataFrameTransformCheckpointStatsTests.toXContent(info.getLast(), builder); + builder.field(TransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName()); + TransformCheckpointStatsTests.toXContent(info.getLast(), builder); } if (info.getNext().getTimestampMillis() > 0) { - builder.field(DataFrameTransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName()); - DataFrameTransformCheckpointStatsTests.toXContent(info.getNext(), builder); + builder.field(TransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName()); + TransformCheckpointStatsTests.toXContent(info.getNext(), builder); } - builder.field(DataFrameTransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind()); + builder.field(TransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind()); if (info.getChangesLastDetectedAt() != null) { - builder.field(DataFrameTransformCheckpointingInfo.CHANGES_LAST_DETECTED_AT.getPreferredName(), info.getChangesLastDetectedAt()); + builder.field(TransformCheckpointingInfo.CHANGES_LAST_DETECTED_AT.getPreferredName(), info.getChangesLastDetectedAt()); } builder.endObject(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java similarity index 81% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java index a70a580c620..1444f817c8c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.client.transform.transforms; import org.elasticsearch.Version; -import org.elasticsearch.client.transform.DataFrameNamedXContentProvider; +import org.elasticsearch.client.transform.TransformNamedXContentProvider; import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -38,10 +38,10 @@ import java.util.function.Predicate; import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig; import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig; -public class DataFrameTransformConfigTests extends AbstractXContentTestCase { +public class TransformConfigTests extends AbstractXContentTestCase { - public static DataFrameTransformConfig randomDataFrameTransformConfig() { - return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), + public static TransformConfig randomTransformConfig() { + return new TransformConfig(randomAlphaOfLengthBetween(1, 10), randomSourceConfig(), randomDestConfig(), randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1000, 1000000)), @@ -57,13 +57,13 @@ public class DataFrameTransformConfigTests extends AbstractXContentTestCase namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdateTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java similarity index 78% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdateTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java index 7d5cca0e60b..f992a272b7d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdateTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.client.transform.transforms; -import org.elasticsearch.client.transform.DataFrameNamedXContentProvider; +import org.elasticsearch.client.transform.TransformNamedXContentProvider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -34,10 +34,10 @@ import java.util.List; import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig; import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig; -public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCase { +public class TransformConfigUpdateTests extends AbstractXContentTestCase { - public static DataFrameTransformConfigUpdate randomDataFrameTransformConfigUpdate() { - return new DataFrameTransformConfigUpdate( + public static TransformConfigUpdate randomTransformConfigUpdate() { + return new TransformConfigUpdate( randomBoolean() ? null : randomSourceConfig(), randomBoolean() ? null : randomDestConfig(), randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)), @@ -50,8 +50,8 @@ public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCas } @Override - protected DataFrameTransformConfigUpdate doParseInstance(XContentParser parser) throws IOException { - return DataFrameTransformConfigUpdate.fromXContent(parser); + protected TransformConfigUpdate doParseInstance(XContentParser parser) throws IOException { + return TransformConfigUpdate.fromXContent(parser); } @Override @@ -60,15 +60,15 @@ public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCas } @Override - protected DataFrameTransformConfigUpdate createTestInstance() { - return randomDataFrameTransformConfigUpdate(); + protected TransformConfigUpdate createTestInstance() { + return randomTransformConfigUpdate(); } @Override protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPositionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerPositionTests.java similarity index 85% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPositionTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerPositionTests.java index e4d1c505532..911966bbfc2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPositionTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerPositionTests.java @@ -28,24 +28,24 @@ import java.util.Map; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameIndexerPositionTests extends ESTestCase { +public class TransformIndexerPositionTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameIndexerPositionTests::randomDataFrameIndexerPosition, - DataFrameIndexerPositionTests::toXContent, - DataFrameIndexerPosition::fromXContent) + TransformIndexerPositionTests::randomTransformIndexerPosition, + TransformIndexerPositionTests::toXContent, + TransformIndexerPosition::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(field -> field.equals("indexer_position") || field.equals("bucket_position")) .test(); } - public static DataFrameIndexerPosition randomDataFrameIndexerPosition() { - return new DataFrameIndexerPosition(randomPositionMap(), randomPositionMap()); + public static TransformIndexerPosition randomTransformIndexerPosition() { + return new TransformIndexerPosition(randomPositionMap(), randomPositionMap()); } - public static void toXContent(DataFrameIndexerPosition position, XContentBuilder builder) throws IOException { + public static void toXContent(TransformIndexerPosition position, XContentBuilder builder) throws IOException { builder.startObject(); if (position.getIndexerPosition() != null) { builder.field("indexer_position", position.getIndexerPosition()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java similarity index 76% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java index f6174815aa4..018cab89b0f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java @@ -27,20 +27,20 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameIndexerTransformStatsTests extends ESTestCase { +public class TransformIndexerStatsTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester( this::createParser, - DataFrameIndexerTransformStatsTests::randomStats, - DataFrameIndexerTransformStatsTests::toXContent, - DataFrameIndexerTransformStats::fromXContent) + TransformIndexerStatsTests::randomStats, + TransformIndexerStatsTests::toXContent, + TransformIndexerStats::fromXContent) .supportsUnknownFields(true) .test(); } - public static DataFrameIndexerTransformStats randomStats() { - return new DataFrameIndexerTransformStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + public static TransformIndexerStats randomStats() { + return new TransformIndexerStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomBoolean() ? null : randomDouble(), @@ -48,7 +48,7 @@ public class DataFrameIndexerTransformStatsTests extends ESTestCase { randomBoolean() ? null : randomDouble()); } - public static void toXContent(DataFrameIndexerTransformStats stats, XContentBuilder builder) throws IOException { + public static void toXContent(TransformIndexerStats stats, XContentBuilder builder) throws IOException { builder.startObject(); builder.field(IndexerJobStats.NUM_PAGES.getPreferredName(), stats.getNumPages()); builder.field(IndexerJobStats.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments()); @@ -60,11 +60,11 @@ public class DataFrameIndexerTransformStatsTests extends ESTestCase { builder.field(IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime()); builder.field(IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal()); builder.field(IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures()); - builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(), + builder.field(TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(), stats.getExpAvgCheckpointDurationMs()); - builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(), + builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(), stats.getExpAvgDocumentsIndexed()); - builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(), + builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(), stats.getExpAvgDocumentsProcessed()); builder.endObject(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgressTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformProgressTests.java similarity index 62% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgressTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformProgressTests.java index faf6805ac27..f7b46b89298 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgressTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformProgressTests.java @@ -26,19 +26,19 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformProgressTests extends ESTestCase { +public class TransformProgressTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformProgressTests::randomInstance, - DataFrameTransformProgressTests::toXContent, - DataFrameTransformProgress::fromXContent) + TransformProgressTests::randomInstance, + TransformProgressTests::toXContent, + TransformProgress::fromXContent) .supportsUnknownFields(true) .test(); } - public static DataFrameTransformProgress randomInstance() { - return new DataFrameTransformProgress( + public static TransformProgress randomInstance() { + return new TransformProgress( randomBoolean() ? null : randomNonNegativeLong(), randomBoolean() ? null : randomNonNegativeLong(), randomBoolean() ? null : randomDouble(), @@ -46,19 +46,19 @@ public class DataFrameTransformProgressTests extends ESTestCase { randomBoolean() ? null : randomNonNegativeLong()); } - public static void toXContent(DataFrameTransformProgress progress, XContentBuilder builder) throws IOException { + public static void toXContent(TransformProgress progress, XContentBuilder builder) throws IOException { builder.startObject(); if (progress.getTotalDocs() != null) { - builder.field(DataFrameTransformProgress.TOTAL_DOCS.getPreferredName(), progress.getTotalDocs()); + builder.field(TransformProgress.TOTAL_DOCS.getPreferredName(), progress.getTotalDocs()); } if (progress.getPercentComplete() != null) { - builder.field(DataFrameTransformProgress.PERCENT_COMPLETE.getPreferredName(), progress.getPercentComplete()); + builder.field(TransformProgress.PERCENT_COMPLETE.getPreferredName(), progress.getPercentComplete()); } if (progress.getRemainingDocs() != null) { - builder.field(DataFrameTransformProgress.DOCS_REMAINING.getPreferredName(), progress.getRemainingDocs()); + builder.field(TransformProgress.DOCS_REMAINING.getPreferredName(), progress.getRemainingDocs()); } - builder.field(DataFrameTransformProgress.DOCS_INDEXED.getPreferredName(), progress.getDocumentsIndexed()); - builder.field(DataFrameTransformProgress.DOCS_PROCESSED.getPreferredName(), progress.getDocumentsProcessed()); + builder.field(TransformProgress.DOCS_INDEXED.getPreferredName(), progress.getDocumentsIndexed()); + builder.field(TransformProgress.DOCS_PROCESSED.getPreferredName(), progress.getDocumentsProcessed()); builder.endObject(); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformStatsTests.java similarity index 57% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformStatsTests.java index 4b3658f6ea1..873d4aef1d3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformStatsTests.java @@ -27,46 +27,46 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformStatsTests extends ESTestCase { +public class TransformStatsTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformStatsTests::randomInstance, - DataFrameTransformStatsTests::toXContent, - DataFrameTransformStats::fromXContent) + TransformStatsTests::randomInstance, + TransformStatsTests::toXContent, + TransformStats::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(field -> field.equals("node.attributes") || field.contains("position")) .test(); } - public static DataFrameTransformStats randomInstance() { - return new DataFrameTransformStats(randomAlphaOfLength(10), - randomBoolean() ? null : randomFrom(DataFrameTransformStats.State.values()), + public static TransformStats randomInstance() { + return new TransformStats(randomAlphaOfLength(10), + randomBoolean() ? null : randomFrom(TransformStats.State.values()), randomBoolean() ? null : randomAlphaOfLength(100), randomBoolean() ? null : NodeAttributesTests.createRandom(), - DataFrameIndexerTransformStatsTests.randomStats(), - randomBoolean() ? null : DataFrameTransformCheckpointingInfoTests.randomDataFrameTransformCheckpointingInfo()); + TransformIndexerStatsTests.randomStats(), + randomBoolean() ? null : TransformCheckpointingInfoTests.randomTransformCheckpointingInfo()); } - public static void toXContent(DataFrameTransformStats stats, XContentBuilder builder) throws IOException { + public static void toXContent(TransformStats stats, XContentBuilder builder) throws IOException { builder.startObject(); - builder.field(DataFrameTransformStats.ID.getPreferredName(), stats.getId()); + builder.field(TransformStats.ID.getPreferredName(), stats.getId()); if (stats.getState() != null) { - builder.field(DataFrameTransformStats.STATE_FIELD.getPreferredName(), + builder.field(TransformStats.STATE_FIELD.getPreferredName(), stats.getState().value()); } if (stats.getReason() != null) { - builder.field(DataFrameTransformStats.REASON_FIELD.getPreferredName(), stats.getReason()); + builder.field(TransformStats.REASON_FIELD.getPreferredName(), stats.getReason()); } if (stats.getNode() != null) { - builder.field(DataFrameTransformStats.NODE_FIELD.getPreferredName()); + builder.field(TransformStats.NODE_FIELD.getPreferredName()); stats.getNode().toXContent(builder, ToXContent.EMPTY_PARAMS); } - builder.field(DataFrameTransformStats.STATS_FIELD.getPreferredName()); - DataFrameIndexerTransformStatsTests.toXContent(stats.getIndexerStats(), builder); + builder.field(TransformStats.STATS_FIELD.getPreferredName()); + TransformIndexerStatsTests.toXContent(stats.getIndexerStats(), builder); if (stats.getCheckpointingInfo() != null) { - builder.field(DataFrameTransformStats.CHECKPOINTING_INFO_FIELD.getPreferredName()); - DataFrameTransformCheckpointingInfoTests.toXContent(stats.getCheckpointingInfo(), builder); + builder.field(TransformStats.CHECKPOINTING_INFO_FIELD.getPreferredName()); + TransformCheckpointingInfoTests.toXContent(stats.getCheckpointingInfo(), builder); } builder.endObject(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointStatsTests.java similarity index 75% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointStatsTests.java index 6299431893c..e9ca2b019fd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointStatsTests.java @@ -28,40 +28,40 @@ import java.io.IOException; import static org.hamcrest.Matchers.equalTo; -public class DataFrameTransformCheckpointStatsTests extends AbstractResponseTestCase< +public class TransformCheckpointStatsTests extends AbstractResponseTestCase< TransformCheckpointStats, - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats> { + org.elasticsearch.client.transform.transforms.TransformCheckpointStats> { public static TransformCheckpointStats fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats instance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointStats instance) { return new TransformCheckpointStats(instance.getCheckpoint(), - DataFrameIndexerPositionTests.fromHlrc(instance.getPosition()), - DataFrameTransformProgressTests.fromHlrc(instance.getCheckpointProgress()), + TransformIndexerPositionTests.fromHlrc(instance.getPosition()), + TransformProgressTests.fromHlrc(instance.getCheckpointProgress()), instance.getTimestampMillis(), instance.getTimeUpperBoundMillis()); } - public static TransformCheckpointStats randomDataFrameTransformCheckpointStats() { + public static TransformCheckpointStats randomTransformCheckpointStats() { return new TransformCheckpointStats(randomLongBetween(1, 1_000_000), - DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(), - randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(), + TransformIndexerPositionTests.randomTransformIndexerPosition(), + randomBoolean() ? null : TransformProgressTests.randomTransformProgress(), randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000)); } @Override protected TransformCheckpointStats createServerTestInstance(XContentType xContentType) { - return randomDataFrameTransformCheckpointStats(); + return randomTransformCheckpointStats(); } @Override - protected org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats doParseToClientInstance(XContentParser parser) + protected org.elasticsearch.client.transform.transforms.TransformCheckpointStats doParseToClientInstance(XContentParser parser) throws IOException { - return org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats.fromXContent(parser); + return org.elasticsearch.client.transform.transforms.TransformCheckpointStats.fromXContent(parser); } @Override protected void assertInstances(TransformCheckpointStats serverTestInstance, - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats clientInstance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointStats clientInstance) { assertThat(serverTestInstance.getCheckpoint(), equalTo(clientInstance.getCheckpoint())); assertThat(serverTestInstance.getPosition().getBucketsPosition(), equalTo(clientInstance.getPosition().getBucketsPosition())); assertThat(serverTestInstance.getPosition().getIndexerPosition(), equalTo(clientInstance.getPosition().getIndexerPosition())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointingInfoTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointingInfoTests.java similarity index 65% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointingInfoTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointingInfoTests.java index 45db79b8256..663d32ff3c4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointingInfoTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointingInfoTests.java @@ -27,41 +27,41 @@ import java.io.IOException; import java.time.Instant; import java.util.function.Predicate; -public class DataFrameTransformCheckpointingInfoTests extends AbstractHlrcXContentTestCase< +public class TransformCheckpointingInfoTests extends AbstractHlrcXContentTestCase< TransformCheckpointingInfo, - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo> { + org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo> { public static TransformCheckpointingInfo fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo instance) { return new TransformCheckpointingInfo( - DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getLast()), - DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getNext()), + TransformCheckpointStatsTests.fromHlrc(instance.getLast()), + TransformCheckpointStatsTests.fromHlrc(instance.getNext()), instance.getOperationsBehind(), instance.getChangesLastDetectedAt()); } @Override - public org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo doHlrcParseInstance(XContentParser parser) { - return org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo.fromXContent(parser); + public org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo doHlrcParseInstance(XContentParser parser) { + return org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo.fromXContent(parser); } @Override public TransformCheckpointingInfo convertHlrcToInternal( - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo instance) { return fromHlrc(instance); } - public static TransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() { + public static TransformCheckpointingInfo randomTransformCheckpointingInfo() { return new TransformCheckpointingInfo( - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), + TransformCheckpointStatsTests.randomTransformCheckpointStats(), + TransformCheckpointStatsTests.randomTransformCheckpointStats(), randomNonNegativeLong(), randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong())); } @Override protected TransformCheckpointingInfo createTestInstance() { - return randomDataFrameTransformCheckpointingInfo(); + return randomTransformCheckpointingInfo(); } @Override diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerPositionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerPositionTests.java similarity index 81% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerPositionTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerPositionTests.java index a26b94482ae..6ec176294c7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerPositionTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerPositionTests.java @@ -29,35 +29,35 @@ import java.util.Map; import static org.hamcrest.Matchers.equalTo; -public class DataFrameIndexerPositionTests extends AbstractResponseTestCase< +public class TransformIndexerPositionTests extends AbstractResponseTestCase< TransformIndexerPosition, - org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition> { + org.elasticsearch.client.transform.transforms.TransformIndexerPosition> { public static TransformIndexerPosition fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition instance) { + org.elasticsearch.client.transform.transforms.TransformIndexerPosition instance) { if (instance == null) { return null; } return new TransformIndexerPosition(instance.getIndexerPosition(), instance.getBucketsPosition()); } - public static TransformIndexerPosition randomDataFrameIndexerPosition() { + public static TransformIndexerPosition randomTransformIndexerPosition() { return new TransformIndexerPosition(randomPositionMap(), randomPositionMap()); } @Override protected TransformIndexerPosition createServerTestInstance(XContentType xContentType) { - return randomDataFrameIndexerPosition(); + return randomTransformIndexerPosition(); } @Override - protected org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition doParseToClientInstance(XContentParser parser) { - return org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition.fromXContent(parser); + protected org.elasticsearch.client.transform.transforms.TransformIndexerPosition doParseToClientInstance(XContentParser parser) { + return org.elasticsearch.client.transform.transforms.TransformIndexerPosition.fromXContent(parser); } @Override protected void assertInstances(TransformIndexerPosition serverTestInstance, - org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition clientInstance) { + org.elasticsearch.client.transform.transforms.TransformIndexerPosition clientInstance) { assertThat(serverTestInstance.getIndexerPosition(), equalTo(clientInstance.getIndexerPosition())); assertThat(serverTestInstance.getBucketsPosition(), equalTo(clientInstance.getBucketsPosition())); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java similarity index 82% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java index 0c696c7368b..e4e22bf3611 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java @@ -25,12 +25,12 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import java.io.IOException; -public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTestCase< +public class TransformIndexerStatsTests extends AbstractHlrcXContentTestCase< TransformIndexerStats, - org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats> { + org.elasticsearch.client.transform.transforms.TransformIndexerStats> { public static TransformIndexerStats fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) { + org.elasticsearch.client.transform.transforms.TransformIndexerStats instance) { return new TransformIndexerStats( instance.getNumPages(), instance.getNumDocuments(), @@ -48,14 +48,14 @@ public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTes } @Override - public org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats doHlrcParseInstance(XContentParser parser) + public org.elasticsearch.client.transform.transforms.TransformIndexerStats doHlrcParseInstance(XContentParser parser) throws IOException { - return org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats.fromXContent(parser); + return org.elasticsearch.client.transform.transforms.TransformIndexerStats.fromXContent(parser); } @Override public TransformIndexerStats convertHlrcToInternal( - org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) { + org.elasticsearch.client.transform.transforms.TransformIndexerStats instance) { return fromHlrc(instance); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformProgressTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformProgressTests.java similarity index 79% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformProgressTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformProgressTests.java index ff6e797b7dd..1d0b5257b7d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformProgressTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformProgressTests.java @@ -26,12 +26,12 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import static org.hamcrest.Matchers.equalTo; -public class DataFrameTransformProgressTests extends AbstractResponseTestCase< +public class TransformProgressTests extends AbstractResponseTestCase< TransformProgress, - org.elasticsearch.client.transform.transforms.DataFrameTransformProgress> { + org.elasticsearch.client.transform.transforms.TransformProgress> { public static TransformProgress fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameTransformProgress instance) { + org.elasticsearch.client.transform.transforms.TransformProgress instance) { if (instance == null) { return null; } @@ -41,7 +41,7 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase< instance.getDocumentsIndexed()); } - public static TransformProgress randomDataFrameTransformProgress() { + public static TransformProgress randomTransformProgress() { Long totalDocs = randomBoolean() ? null : randomNonNegativeLong(); Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null; return new TransformProgress( @@ -53,17 +53,17 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase< @Override protected TransformProgress createServerTestInstance(XContentType xContentType) { - return randomDataFrameTransformProgress(); + return randomTransformProgress(); } @Override - protected org.elasticsearch.client.transform.transforms.DataFrameTransformProgress doParseToClientInstance(XContentParser parser) { - return org.elasticsearch.client.transform.transforms.DataFrameTransformProgress.fromXContent(parser); + protected org.elasticsearch.client.transform.transforms.TransformProgress doParseToClientInstance(XContentParser parser) { + return org.elasticsearch.client.transform.transforms.TransformProgress.fromXContent(parser); } @Override protected void assertInstances(TransformProgress serverTestInstance, - org.elasticsearch.client.transform.transforms.DataFrameTransformProgress clientInstance) { + org.elasticsearch.client.transform.transforms.TransformProgress clientInstance) { assertThat(serverTestInstance.getTotalDocs(), equalTo(clientInstance.getTotalDocs())); assertThat(serverTestInstance.getDocumentsProcessed(), equalTo(clientInstance.getDocumentsProcessed())); assertThat(serverTestInstance.getPercentComplete(), equalTo(clientInstance.getPercentComplete())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java similarity index 75% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java index f58947be54e..5c15fb7bed7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java @@ -34,8 +34,8 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Predicate; -public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase { +public class TransformStatsTests extends AbstractHlrcXContentTestCase { public static NodeAttributes fromHlrc(org.elasticsearch.client.transform.transforms.NodeAttributes attributes) { return attributes == null ? null : new NodeAttributes(attributes.getId(), @@ -46,45 +46,45 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase field.contains("position") || field.equals("node.attributes"); } - public static TransformProgress randomDataFrameTransformProgress() { + public static TransformProgress randomTransformProgress() { Long totalDocs = randomBoolean() ? null : randomNonNegativeLong(); Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null; return new TransformProgress( @@ -107,16 +107,16 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase The response contains a list of `DataFrameTransformStats` objects +<1> The response contains a list of `TransformStats` objects <2> The running state of the {transform}, for example `started`, `indexing`, etc. <3> The overall {transform} statistics recording the number of documents indexed etc. <4> The progress of the current run in the {transform}. Supplies the number of docs left until the next checkpoint diff --git a/docs/java-rest/high-level/transform/preview_transform.asciidoc b/docs/java-rest/high-level/transform/preview_transform.asciidoc index 5de00f5891f..377aba597a6 100644 --- a/docs/java-rest/high-level/transform/preview_transform.asciidoc +++ b/docs/java-rest/high-level/transform/preview_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: preview-transform -:request: PreviewDataFrameTransformRequest -:response: PreviewDataFrameTransformResponse +:request: PreviewTransformRequest +:response: PreviewTransformResponse -- [role="xpack"] [id="{upid}-{api}"] diff --git a/docs/java-rest/high-level/transform/put_transform.asciidoc b/docs/java-rest/high-level/transform/put_transform.asciidoc index b84ba1329cd..53b0445564a 100644 --- a/docs/java-rest/high-level/transform/put_transform.asciidoc +++ b/docs/java-rest/high-level/transform/put_transform.asciidoc @@ -1,6 +1,6 @@ -- :api: put-transform -:request: PutDataFrameTransformRequest +:request: PutTransformRequest :response: AcknowledgedResponse -- [role="xpack"] @@ -29,7 +29,7 @@ such an error will not be visible until `_start` is called. [id="{upid}-{api}-config"] ==== {transform-cap} configuration -The `DataFrameTransformConfig` object contains all the details about the +The `TransformConfig` object contains all the details about the {transform} configuration and contains the following arguments: ["source","java",subs="attributes,callouts,macros"] diff --git a/docs/java-rest/high-level/transform/start_transform.asciidoc b/docs/java-rest/high-level/transform/start_transform.asciidoc index 69aea67dc2b..9de2a0da23d 100644 --- a/docs/java-rest/high-level/transform/start_transform.asciidoc +++ b/docs/java-rest/high-level/transform/start_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: start-transform -:request: StartDataFrameTransformRequest -:response: StartDataFrameTransformResponse +:request: StartTransformRequest +:response: StartTransformResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -37,4 +37,4 @@ include::../execution.asciidoc[] ==== Response The returned +{response}+ object acknowledges the {transform} has -started. \ No newline at end of file +started. diff --git a/docs/java-rest/high-level/transform/stop_transform.asciidoc b/docs/java-rest/high-level/transform/stop_transform.asciidoc index 36a1491615e..660594de39f 100644 --- a/docs/java-rest/high-level/transform/stop_transform.asciidoc +++ b/docs/java-rest/high-level/transform/stop_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: stop-transform -:request: StopDataFrameTransformRequest -:response: StopDataFrameTransformResponse +:request: StopTransformRequest +:response: StopTransformResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -31,7 +31,7 @@ The following arguments are optional. -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- -<1> If true wait for the data frame task to stop before responding +<1> If true wait for the transform task to stop before responding <2> Controls the amount of time to wait until the {transform} stops. <3> Whether to ignore if a wildcard expression matches no {transforms}. @@ -39,4 +39,4 @@ include::../execution.asciidoc[] ==== Response -The returned +{response}+ object acknowledges the {transform} has stopped. \ No newline at end of file +The returned +{response}+ object acknowledges the {transform} has stopped. diff --git a/docs/java-rest/high-level/transform/update_transform.asciidoc b/docs/java-rest/high-level/transform/update_transform.asciidoc index 37f60eacf47..ffde48ae186 100644 --- a/docs/java-rest/high-level/transform/update_transform.asciidoc +++ b/docs/java-rest/high-level/transform/update_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: update-transform -:request: UpdateDataFrameTransformRequest -:response: UpdateDataFrameTransformResponse +:request: UpdateTransformRequest +:response: UpdateTransformResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -30,7 +30,7 @@ such an error will not be visible until `_start` is called. [id="{upid}-{api}-config"] ==== {transform-cap} update configuration -The `DataFrameTransformConfigUpdate` object contains all the details about updated +The `TransformConfigUpdate` object contains all the details about updated {transform} configuration and contains the following arguments: ["source","java",subs="attributes,callouts,macros"] diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.delete_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.delete_transform.json similarity index 94% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.delete_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.delete_transform.json index d8545728ab9..01915ed7b56 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.delete_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.delete_transform.json @@ -1,5 +1,5 @@ { - "data_frame.delete_data_frame_transform":{ + "transform.delete_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html" }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform.json similarity index 87% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform.json index e25a3301ec0..9baf3446a2a 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform.json @@ -1,5 +1,5 @@ { - "data_frame.get_data_frame_transform":{ + "transform.get_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform.html" }, @@ -40,7 +40,7 @@ "allow_no_match":{ "type":"boolean", "required":false, - "description":"Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" + "description":"Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified)" } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform_stats.json similarity index 86% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform_stats.json index 57b004482a7..f37dfe29ff4 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform_stats.json @@ -1,5 +1,5 @@ { - "data_frame.get_data_frame_transform_stats":{ + "transform.get_transform_stats":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html" }, @@ -34,7 +34,7 @@ "allow_no_match":{ "type":"boolean", "required":false, - "description":"Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" + "description":"Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified)" } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.preview_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.preview_transform.json similarity index 75% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.preview_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.preview_transform.json index e3f24448b9f..b5ff3cbba96 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.preview_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.preview_transform.json @@ -1,5 +1,5 @@ { - "data_frame.preview_data_frame_transform":{ + "transform.preview_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html" }, @@ -15,7 +15,7 @@ ] }, "body":{ - "description":"The definition for the data_frame transform to preview", + "description":"The definition for the transform to preview", "required":true } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.put_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.put_transform.json similarity index 82% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.put_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.put_transform.json index 5e7354f435d..7925fc1063b 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.put_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.put_transform.json @@ -1,5 +1,5 @@ { - "data_frame.put_data_frame_transform":{ + "transform.put_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html" }, @@ -24,11 +24,11 @@ "defer_validation": { "type": "boolean", "required": false, - "description": "If validations should be deferred until data frame transform starts, defaults to false." + "description": "If validations should be deferred until transform starts, defaults to false." } }, "body":{ - "description":"The data frame transform definition", + "description":"The transform definition", "required":true } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.start_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.start_transform.json similarity index 93% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.start_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.start_transform.json index c0d701be562..408f978e22c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.start_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.start_transform.json @@ -1,5 +1,5 @@ { - "data_frame.start_data_frame_transform":{ + "transform.start_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html" }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.stop_transform.json similarity index 86% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.stop_transform.json index c88a7176309..b09d19703bf 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.stop_transform.json @@ -1,5 +1,5 @@ { - "data_frame.stop_data_frame_transform":{ + "transform.stop_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-transform.html" }, @@ -34,7 +34,7 @@ "allow_no_match":{ "type":"boolean", "required":false, - "description":"Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" + "description":"Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified)" } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.update_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.update_transform.json similarity index 82% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.update_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.update_transform.json index 70d1342815e..68de23da71b 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.update_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.update_transform.json @@ -1,5 +1,5 @@ { - "data_frame.update_data_frame_transform": { + "transform.update_transform": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html" }, @@ -23,11 +23,11 @@ "defer_validation": { "type": "boolean", "required": false, - "description": "If validations should be deferred until data frame transform starts, defaults to false." + "description": "If validations should be deferred until transform starts, defaults to false." } }, "body": { - "description" : "The update data frame transform definition", + "description" : "The update transform definition", "required": true } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/preview_transforms.yml similarity index 94% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/preview_transforms.yml index 30c7ec62687..35289c2bbd0 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/preview_transforms.yml @@ -68,7 +68,7 @@ setup: --- "Test preview transform": - do: - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -120,7 +120,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -154,7 +154,7 @@ setup: "Test preview transform with invalid config": - do: catch: /\[data_frame_terms_group\] unknown field \[not_a_terms_param\]/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -167,7 +167,7 @@ setup: "Test preview with non-existing source index": - do: catch: /Source index \[does_not_exist\] does not exist/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": ["airline-data", "does_not_exist"] }, @@ -182,7 +182,7 @@ setup: "Test preview returns bad request with invalid agg": - do: catch: bad_request - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -198,7 +198,7 @@ setup: - do: catch: /field \[time\] cannot be both an object and a field/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -213,7 +213,7 @@ setup: } - do: catch: /field \[super_metric\] cannot be both an object and a field/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -238,7 +238,7 @@ setup: "Test preview with missing pipeline": - do: catch: bad_request - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -256,7 +256,7 @@ setup: "Test preview with unsupported agg": - do: catch: bad_request - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -271,7 +271,7 @@ setup: } - do: catch: /Unsupported aggregation type \[terms\]/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_crud.yml similarity index 90% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_crud.yml index 57260996bae..fd9f75735e2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_crud.yml @@ -17,14 +17,14 @@ setup: --- "Test get all transforms when there are none": - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "*" - match: { count: 0 } - match: { transforms: [] } - do: catch: missing - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "*" allow_no_match: false @@ -32,21 +32,21 @@ setup: "Test get transform when it does not exist": - do: catch: /Transform with id \[missing-transform-id\] could not be found/ - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "missing-transform-id" --- "Test delete transform when it does not exist": - do: catch: missing - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "missing transform" --- "Test put transform with frequency too low": - do: catch: /minimum permitted \[frequency\] is \[1s\]/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "frequency-too-low" body: > { @@ -63,7 +63,7 @@ setup: "Test put transform with frequency too high": - do: catch: /highest permitted \[frequency\] is \[1h\]/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "frequency-too-low" body: > { @@ -80,7 +80,7 @@ setup: "Test put transform with invalid source index": - do: catch: /Source index \[missing-index\] does not exist/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "missing-source-transform" body: > { @@ -92,7 +92,7 @@ setup: } } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "missing-source-transform" defer_validation: true body: > @@ -108,7 +108,7 @@ setup: --- "Test basic transform crud": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -123,7 +123,7 @@ setup: - match: { acknowledged: true } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-dos" body: > { @@ -137,7 +137,7 @@ setup: - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform" - match: { count: 1 } - match: { transforms.0.id: "airline-transform" } @@ -151,7 +151,7 @@ setup: - match: { transforms.0.description: "yaml test transform on airline-data" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } @@ -159,27 +159,27 @@ setup: - is_false: transforms.1.description - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "_all" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } - match: { transforms.1.id: "airline-transform-dos" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform,airline-transform-dos" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } - match: { transforms.1.id: "airline-transform-dos" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } - match: { transforms.1.id: "airline-transform-dos" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform*" from: 0 size: 1 @@ -187,7 +187,7 @@ setup: - match: { transforms.0.id: "airline-transform" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform*" from: 1 size: 1 @@ -212,7 +212,7 @@ setup: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -228,7 +228,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform" - match: { count: 1 } - match: { transforms.0.id: "airline-transform" } @@ -241,7 +241,7 @@ setup: --- "Test PUT continuous transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-continuous" body: > { @@ -262,7 +262,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-continuous" } @@ -275,7 +275,7 @@ setup: --- "Test PUT continuous transform without delay set": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-continuous" body: > { @@ -295,7 +295,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-continuous" } @@ -309,7 +309,7 @@ setup: "Test transform with invalid page parameter": - do: catch: /Param \[size\] has a max acceptable value of \[1000\]/ - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "_all" from: 0 size: 10000 @@ -317,7 +317,7 @@ setup: "Test transform where dest is included in source": - do: catch: /Destination index \[airline-data-by-airline\] is included in source expression \[airline-data/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -332,7 +332,7 @@ setup: } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" defer_validation: true body: > @@ -363,7 +363,7 @@ setup: index: created-destination-index name: dest-index - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "transform-from-aliases" body: > { @@ -385,7 +385,7 @@ setup: - do: catch: /Destination index \[created-destination-index\] is included in source expression \[airline-data,created-destination-index\]/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "transform-from-aliases-failures" body: > { @@ -411,7 +411,7 @@ setup: - do: catch: /Destination index \[dest-index\] should refer to a single index/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -428,7 +428,7 @@ setup: "Test put config with invalid pivot size": - do: catch: /pivot\.max_page_search_size \[5\] must be greater than 10 and less than 10,000/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -442,7 +442,7 @@ setup: } - do: catch: /pivot\.max_page_search_size \[15000\] must be greater than 10 and less than 10,000/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -458,7 +458,7 @@ setup: "Test creation failures due to duplicate and conflicting field names": - do: catch: /duplicate field \[airline\] detected/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "duplicate-field-transform" body: > { @@ -473,7 +473,7 @@ setup: } - do: catch: /field \[airline\] cannot be both an object and a field/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "duplicate-field-transform" body: > { @@ -487,10 +487,10 @@ setup: } } --- -"Test invalid data frame id": +"Test invalid transform id": - do: catch: /can contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "!@#$%^&*(duplicate-field-transform" body: > { @@ -505,7 +505,7 @@ setup: } - do: catch: /The id cannot contain more than 64 character/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" body: > { @@ -522,7 +522,7 @@ setup: "Test invalid destination index name": - do: catch: /dest\.index \[DeStInAtIoN\] must be lowercase/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -537,7 +537,7 @@ setup: } - do: catch: /Invalid index name \[destination#dest\], must not contain \'#\'/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -556,7 +556,7 @@ setup: - do: catch: /Found \[create_time\], not allowed for strict parsing/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-with-create-time" body: > { @@ -575,7 +575,7 @@ setup: - do: catch: /Found \[version\], not allowed for strict parsing/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-with-version" body: > { @@ -591,7 +591,7 @@ setup: --- "Test force deleting a running transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-delete" body: > { @@ -610,12 +610,12 @@ setup: } - match: { acknowledged: true } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-delete" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-delete" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-delete" } @@ -623,11 +623,11 @@ setup: - do: catch: /Cannot delete transform \[airline-transform-start-delete\] as the task is running/ - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-delete" - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-delete" force: true - match: { acknowledged: true } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_start_stop.yml similarity index 81% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_start_stop.yml index 54805ffd7e7..ad28d880faf 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_start_stop.yml @@ -17,7 +17,7 @@ setup: event_rate: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-stop" body: > { @@ -32,18 +32,18 @@ setup: --- teardown: - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" timeout: "10m" wait_for_completion: true - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-stop" --- "Test start transform": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } @@ -51,25 +51,25 @@ teardown: "Test start missing transform": - do: catch: missing - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "missing-transform" --- "Test start already started transform": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: catch: /Cannot start transform \[airline-transform-start-stop\] as it is already started/ - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" --- "Verify start transform creates destination index with appropriate mapping": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: @@ -84,7 +84,7 @@ teardown: indices.create: index: airline-data-by-airline-start-stop - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: @@ -94,37 +94,37 @@ teardown: --- "Test start/stop/start transform": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.state: "stopped" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } @@ -132,7 +132,7 @@ teardown: --- "Test start/stop/start continuous transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-stop-continuous" body: > { @@ -151,82 +151,82 @@ teardown: } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop-continuous" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop-continuous" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - match: { transforms.0.state: "stopped" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop-continuous" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop-continuous" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-stop-continuous" --- "Test stop missing transform": - do: catch: missing - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "missing-transform" --- "Test stop missing transform by expression": - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: allow_no_match: true transform_id: "missing-transform*" - do: catch: missing - data_frame.stop_data_frame_transform: + transform.stop_transform: allow_no_match: false transform_id: "missing-transform*" --- "Test stop already stopped transform": - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } --- "Test start/stop only starts/stops specified transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-later" body: > { @@ -238,57 +238,57 @@ teardown: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-later" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-later" } - match: { transforms.0.state: "stopped" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-later" - match: { acknowledged: true } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-later" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-later" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-later" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-later" --- "Test stop all": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stop-all" body: > { @@ -300,35 +300,35 @@ teardown: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-stop-all" - match: { acknowledged: true } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "_all" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "*" - match: { count: 2 } - match: { transforms.0.state: "stopped" } - match: { transforms.1.state: "stopped" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stop-all" --- "Test start/stop with field alias": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline_via_field_alias" body: > { @@ -340,7 +340,7 @@ teardown: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline_via_field_alias" - match: { acknowledged: true } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml similarity index 87% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml index b4699898d48..5f4c11f00d0 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml @@ -14,7 +14,7 @@ setup: event_rate: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats" body: > { @@ -26,24 +26,24 @@ setup: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-stats" --- teardown: - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-stats" wait_for_completion: true - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats" --- "Test get transform stats": - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats" } @@ -63,7 +63,7 @@ teardown: --- "Test get transform stats on missing transform": - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "missing-transform" - match: { count: 0 } - match: { transforms: [] } @@ -71,7 +71,7 @@ teardown: --- "Test get multiple transform stats": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-dos" body: > { @@ -83,7 +83,7 @@ teardown: } } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-the-third" body: > { @@ -95,7 +95,7 @@ teardown: } } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "*" - match: { count: 3 } - match: { transforms.0.id: "airline-transform-stats" } @@ -103,7 +103,7 @@ teardown: - match: { transforms.2.id: "airline-transform-stats-the-third" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" - match: { count: 3 } - match: { transforms.0.id: "airline-transform-stats" } @@ -111,14 +111,14 @@ teardown: - match: { transforms.2.id: "airline-transform-stats-the-third" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats-dos,airline-transform-stats-the*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform-stats-dos" } - match: { transforms.1.id: "airline-transform-stats-the-third" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" from: 0 size: 1 @@ -126,7 +126,7 @@ teardown: - match: { transforms.0.id: "airline-transform-stats" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" from: 1 size: 2 @@ -135,18 +135,18 @@ teardown: - match: { transforms.1.id: "airline-transform-stats-the-third" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats-dos" - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats-the-third" --- "Test get multiple transform stats where one does not have a task": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-dos" body: > { @@ -158,14 +158,14 @@ teardown: } } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform-stats" } - match: { transforms.1.id: "airline-transform-stats-dos" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" - match: { count: 2 } - match: { transforms.0.id: "airline-transform-stats" } @@ -175,7 +175,7 @@ teardown: "Test get single transform stats when it does not have a task": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-dos" body: > { @@ -187,7 +187,7 @@ teardown: } } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats-dos" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats-dos" } @@ -206,7 +206,7 @@ teardown: --- "Test get continuous transform stats": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-continuous" body: > { @@ -219,10 +219,10 @@ teardown: "sync": { "time": { "field": "time", "delay": "1m" } } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-stats-continuous" - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats-continuous" } @@ -251,10 +251,10 @@ teardown: - match: { transforms.0.stats.exponential_avg_documents_processed: 0.0 } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-stats-continuous" wait_for_completion: true - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats-continuous" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_update.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_update.yml similarity index 91% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_update.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_update.yml index f764258e5af..5b054a27fa3 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_update.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_update.yml @@ -15,7 +15,7 @@ setup: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "updating-airline-transform" body: > { @@ -38,7 +38,7 @@ setup: "Test update transform with missing transform": - do: catch: /Transform with id \[missing-transform\] could not be found/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "missing-transform" body: > { @@ -48,7 +48,7 @@ setup: "Test update transform with frequency too low": - do: catch: /minimum permitted \[frequency\] is \[1s\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -58,7 +58,7 @@ setup: "Test update transform with frequency too high": - do: catch: /highest permitted \[frequency\] is \[1h\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -68,14 +68,14 @@ setup: "Test put transform with invalid source index": - do: catch: /Source index \[missing-index\] does not exist/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { "source": { "index": "missing-index" } } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" defer_validation: true body: > @@ -87,7 +87,7 @@ setup: --- "Test update transform when it is batch": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "batch-airline-transform" body: > { @@ -99,7 +99,7 @@ setup: } } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "batch-airline-transform" body: > { @@ -109,7 +109,7 @@ setup: - do: catch: /Cannot change the current sync configuration of transform \[batch-airline-transform\] from \[null\] to \[time\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "batch-airline-transform" body: > { @@ -137,7 +137,7 @@ setup: event_rate: type: integer - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "updating-airline-transform" - match: { count: 1 } - match: { transforms.0.id: "updating-airline-transform" } @@ -154,7 +154,7 @@ setup: - match: { transforms.0.frequency: "60s" } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -180,7 +180,7 @@ setup: - match: { frequency: "5s" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "updating-airline-transform" - match: { count: 1 } - match: { transforms.0.id: "updating-airline-transform" } @@ -200,7 +200,7 @@ setup: "Test transform where dest is included in source": - do: catch: /Destination index \[airline-data-by-airline\] is included in source expression \[airline-data/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -211,7 +211,7 @@ setup: } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" defer_validation: true body: > @@ -238,7 +238,7 @@ setup: index: created-destination-index name: dest-index - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -256,7 +256,7 @@ setup: - do: catch: /Destination index \[created-destination-index\] is included in source expression \[created-destination-index\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -281,7 +281,7 @@ setup: name: dest2-index - do: catch: /Destination index \[dest2-index\] should refer to a single index/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -291,7 +291,7 @@ setup: "Test invalid destination index name": - do: catch: /dest\.index \[DeStInAtIoN\] must be lowercase/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -299,7 +299,7 @@ setup: } - do: catch: /Invalid index name \[destination#dest\], must not contain \'#\'/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index d394636507c..46c53b8204b 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -16,9 +16,9 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; import org.elasticsearch.client.transform.transforms.pivot.SingleGroupSource; @@ -65,7 +65,7 @@ public class TransformIT extends TransformIntegTestCase { .addAggregator(AggregationBuilders.avg("review_score").field("stars")) .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); - DataFrameTransformConfig config = createTransformConfig("transform-crud", + TransformConfig config = createTransformConfig("transform-crud", groups, aggs, "reviews-by-user-business-day", @@ -78,7 +78,7 @@ public class TransformIT extends TransformIntegTestCase { stopTransform(config.getId()); - DataFrameTransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); + TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT)); Instant now = Instant.now(); assertTrue("[create_time] is not before current time", storedConfig.getCreateTime().isBefore(now)); @@ -98,7 +98,7 @@ public class TransformIT extends TransformIntegTestCase { .addAggregator(AggregationBuilders.avg("review_score").field("stars")) .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); - DataFrameTransformConfig config = createTransformConfigBuilder("transform-crud", + TransformConfig config = createTransformConfigBuilder("transform-crud", groups, aggs, "reviews-by-user-business-day", @@ -112,7 +112,7 @@ public class TransformIT extends TransformIntegTestCase { waitUntilCheckpoint(config.getId(), 1L); assertThat(getTransformStats(config.getId()).getTransformsStats().get(0).getState(), - equalTo(DataFrameTransformStats.State.STARTED)); + equalTo(TransformStats.State.STARTED)); long docsIndexed = getTransformStats(config.getId()) .getTransformsStats() @@ -120,7 +120,7 @@ public class TransformIT extends TransformIntegTestCase { .getIndexerStats() .getNumDocuments(); - DataFrameTransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); + TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT)); Instant now = Instant.now(); assertTrue("[create_time] is not before current time", storedConfig.getCreateTime().isBefore(now)); @@ -155,7 +155,7 @@ public class TransformIT extends TransformIntegTestCase { String id = "transform-to-update"; String dest = "reviews-by-user-business-day-to-update"; - DataFrameTransformConfig config = createTransformConfigBuilder(id, + TransformConfig config = createTransformConfigBuilder(id, groups, aggs, dest, @@ -169,7 +169,7 @@ public class TransformIT extends TransformIntegTestCase { waitUntilCheckpoint(config.getId(), 1L); assertThat(getTransformStats(config.getId()).getTransformsStats().get(0).getState(), - oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); long docsIndexed = getTransformStats(config.getId()) .getTransformsStats() @@ -177,13 +177,13 @@ public class TransformIT extends TransformIntegTestCase { .getIndexerStats() .getNumDocuments(); - DataFrameTransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); + TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT)); Instant now = Instant.now(); assertTrue("[create_time] is not before current time", storedConfig.getCreateTime().isBefore(now)); String pipelineId = "add_forty_two"; - DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate.builder() + TransformConfigUpdate update = TransformConfigUpdate.builder() .setDescription("updated config") .setDest(DestConfig.builder().setIndex(dest).setPipeline(pipelineId).build()) .build(); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java index d1cde296664..0d5bc7d94b9 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java @@ -16,22 +16,22 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.QueryConfig; import org.elasticsearch.client.transform.transforms.SourceConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.client.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource; import org.elasticsearch.client.transform.transforms.pivot.GroupConfig; @@ -70,7 +70,7 @@ import static org.hamcrest.core.Is.is; abstract class TransformIntegTestCase extends ESRestTestCase { - private Map transformConfigs = new HashMap<>(); + private Map transformConfigs = new HashMap<>(); protected void cleanUp() throws IOException { cleanUpTransforms(); @@ -78,54 +78,54 @@ abstract class TransformIntegTestCase extends ESRestTestCase { } protected void cleanUpTransforms() throws IOException { - for (DataFrameTransformConfig config : transformConfigs.values()) { + for (TransformConfig config : transformConfigs.values()) { stopTransform(config.getId()); deleteTransform(config.getId()); } transformConfigs.clear(); } - protected StopDataFrameTransformResponse stopTransform(String id) throws IOException { + protected StopTransformResponse stopTransform(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().stopDataFrameTransform(new StopDataFrameTransformRequest(id, true, null), RequestOptions.DEFAULT); + return restClient.transform().stopTransform(new StopTransformRequest(id, true, null), RequestOptions.DEFAULT); } - protected StartDataFrameTransformResponse startTransform(String id, RequestOptions options) throws IOException { + protected StartTransformResponse startTransform(String id, RequestOptions options) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().startDataFrameTransform(new StartDataFrameTransformRequest(id), options); + return restClient.transform().startTransform(new StartTransformRequest(id), options); } protected AcknowledgedResponse deleteTransform(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); AcknowledgedResponse response = - restClient.dataFrame().deleteDataFrameTransform(new DeleteDataFrameTransformRequest(id), RequestOptions.DEFAULT); + restClient.transform().deleteTransform(new DeleteTransformRequest(id), RequestOptions.DEFAULT); if (response.isAcknowledged()) { transformConfigs.remove(id); } return response; } - protected AcknowledgedResponse putTransform(DataFrameTransformConfig config, RequestOptions options) throws IOException { + protected AcknowledgedResponse putTransform(TransformConfig config, RequestOptions options) throws IOException { if (transformConfigs.keySet().contains(config.getId())) { throw new IllegalArgumentException("transform [" + config.getId() + "] is already registered"); } RestHighLevelClient restClient = new TestRestHighLevelClient(); AcknowledgedResponse response = - restClient.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(config), options); + restClient.transform().putTransform(new PutTransformRequest(config), options); if (response.isAcknowledged()) { transformConfigs.put(config.getId(), config); } return response; } - protected GetDataFrameTransformStatsResponse getTransformStats(String id) throws IOException { + protected GetTransformStatsResponse getTransformStats(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().getDataFrameTransformStats(new GetDataFrameTransformStatsRequest(id), RequestOptions.DEFAULT); + return restClient.transform().getTransformStats(new GetTransformStatsRequest(id), RequestOptions.DEFAULT); } - protected GetDataFrameTransformResponse getTransform(String id) throws IOException { + protected GetTransformResponse getTransform(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().getDataFrameTransform(new GetDataFrameTransformRequest(id), RequestOptions.DEFAULT); + return restClient.transform().getTransform(new GetTransformRequest(id), RequestOptions.DEFAULT); } protected void waitUntilCheckpoint(String id, long checkpoint) throws Exception { @@ -195,21 +195,21 @@ abstract class TransformIntegTestCase extends ESRestTestCase { return builder.build(); } - protected DataFrameTransformConfig createTransformConfig(String id, - Map groups, - AggregatorFactories.Builder aggregations, - String destinationIndex, - String... sourceIndices) throws Exception { + protected TransformConfig createTransformConfig(String id, + Map groups, + AggregatorFactories.Builder aggregations, + String destinationIndex, + String... sourceIndices) throws Exception { return createTransformConfig(id, groups, aggregations, destinationIndex, QueryBuilders.matchAllQuery(), sourceIndices); } - protected DataFrameTransformConfig.Builder createTransformConfigBuilder(String id, - Map groups, - AggregatorFactories.Builder aggregations, - String destinationIndex, - QueryBuilder queryBuilder, - String... sourceIndices) throws Exception { - return DataFrameTransformConfig.builder() + protected TransformConfig.Builder createTransformConfigBuilder(String id, + Map groups, + AggregatorFactories.Builder aggregations, + String destinationIndex, + QueryBuilder queryBuilder, + String... sourceIndices) throws Exception { + return TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder().setIndex(sourceIndices).setQueryConfig(createQueryConfig(queryBuilder)).build()) .setDest(DestConfig.builder().setIndex(destinationIndex).build()) @@ -218,12 +218,12 @@ abstract class TransformIntegTestCase extends ESRestTestCase { .setDescription("Test transform config id: " + id); } - protected DataFrameTransformConfig createTransformConfig(String id, - Map groups, - AggregatorFactories.Builder aggregations, - String destinationIndex, - QueryBuilder queryBuilder, - String... sourceIndices) throws Exception { + protected TransformConfig createTransformConfig(String id, + Map groups, + AggregatorFactories.Builder aggregations, + String destinationIndex, + QueryBuilder queryBuilder, + String... sourceIndices) throws Exception { return createTransformConfigBuilder(id, groups, aggregations, destinationIndex, queryBuilder, sourceIndices).build(); } @@ -233,9 +233,9 @@ abstract class TransformIntegTestCase extends ESRestTestCase { assertThat(response.buildFailureMessage(), response.hasFailures(), is(false)); } - protected void updateConfig(String id, DataFrameTransformConfigUpdate update) throws Exception { + protected void updateConfig(String id, TransformConfigUpdate update) throws Exception { RestHighLevelClient restClient = new TestRestHighLevelClient(); - restClient.dataFrame().updateDataFrameTransform(new UpdateDataFrameTransformRequest(update, id), RequestOptions.DEFAULT); + restClient.transform().updateTransform(new UpdateTransformRequest(update, id), RequestOptions.DEFAULT); } protected void createReviewsIndex(String indexName, int numDocs) throws Exception { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java index af35bb022de..03b396ef845 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java @@ -19,11 +19,11 @@ import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.client.indices.CreateIndexRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; @@ -88,12 +88,12 @@ public class TransformInternalIndexIT extends ESRestTestCase { RequestOptions.DEFAULT); assertThat(getResponse.isExists(), is(true)); - GetDataFrameTransformResponse response = client.dataFrame() - .getDataFrameTransform(new GetDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + GetTransformResponse response = client.transform() + .getTransform(new GetTransformRequest(transformId), RequestOptions.DEFAULT); assertThat(response.getTransformConfigurations().get(0).getId(), equalTo(transformId)); - UpdateDataFrameTransformResponse updated = client.dataFrame().updateDataFrameTransform( - new UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate.builder().setDescription("updated").build(), transformId), + UpdateTransformResponse updated = client.transform().updateTransform( + new UpdateTransformRequest(TransformConfigUpdate.builder().setDescription("updated").build(), transformId), RequestOptions.DEFAULT); assertThat(updated.getTransformConfiguration().getId(), equalTo(transformId)); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java index f77e2d78dee..9aaef80f011 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java @@ -11,9 +11,9 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.core.IndexerState; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.SourceConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; @@ -129,7 +129,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { totalDocsWrittenSum += docs * ENTITIES.size(); } long totalDocsWritten = totalDocsWrittenSum; - DataFrameTransformConfig config = DataFrameTransformConfig.builder() + TransformConfig config = TransformConfig.builder() .setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1))) .setPivotConfig(PivotConfig.builder() .setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("stars").field("stars"))) @@ -146,12 +146,12 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { waitUntilAfterCheckpoint(CONTINUOUS_DATA_FRAME_ID, 0L); assertBusy(() -> { - DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + TransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); assertThat(stateAndStats.getIndexerStats().getOutputDocuments(), equalTo((long)ENTITIES.size())); assertThat(stateAndStats.getIndexerStats().getNumDocuments(), equalTo(totalDocsWritten)); // Even if we get back to started, we may periodically get set back to `indexing` when triggered. // Though short lived due to no changes on the source indices, it could result in flaky test behavior - assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); }, 120, TimeUnit.SECONDS); @@ -165,13 +165,13 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { // A continuous data frame should automatically become started when it gets assigned to a node // if it was assigned to the node that was removed from the cluster assertBusy(() -> { - DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); - assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + TransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); }, 120, TimeUnit.SECONDS); - DataFrameTransformStats previousStateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + TransformStats previousStateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); // Add a new user and write data to it // This is so we can have more reliable data counts, as writing to existing entities requires @@ -190,10 +190,10 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { greaterThanOrEqualTo(docs + previousStateAndStats.getIndexerStats().getNumDocuments())), 120, TimeUnit.SECONDS); - DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + TransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); assertThat(stateAndStats.getState(), - oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); awaitWrittenIndexerState(CONTINUOUS_DATA_FRAME_ID, (responseBody) -> { Map indexerStats = (Map)((List)XContentMapValues.extractValue("hits.hits._source.stats", responseBody)) @@ -245,7 +245,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { }); } - private void putTransform(String id, DataFrameTransformConfig config) throws IOException { + private void putTransform(String id, TransformConfig config) throws IOException { final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + id); createDataframeTransformRequest.setJsonEntity(Strings.toString(config)); Response response = client().performRequest(createDataframeTransformRequest); @@ -270,7 +270,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { assertEquals(200, response.getStatusLine().getStatusCode()); } - private DataFrameTransformStats getTransformStats(String id) throws IOException { + private TransformStats getTransformStats(String id) throws IOException { final Request getStats = new Request("GET", DATAFRAME_ENDPOINT + id + "/_stats"); Response response = client().performRequest(getStats); assertEquals(200, response.getStatusLine().getStatusCode()); @@ -278,7 +278,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { try (XContentParser parser = xContentType.xContent().createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, response.getEntity().getContent())) { - GetDataFrameTransformStatsResponse resp = GetDataFrameTransformStatsResponse.fromXContent(parser); + GetTransformStatsResponse resp = GetTransformStatsResponse.fromXContent(parser); assertThat(resp.getTransformsStats(), hasSize(1)); return resp.getTransformsStats().get(0); } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml index f678f36b578..1ed5d63fd0e 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml @@ -2,16 +2,16 @@ "Test put batch data frame transforms on mixed cluster": - do: cluster.health: - index: "dataframe-transform-airline-data" + index: "transform-airline-data" wait_for_status: green timeout: 70s - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "mixed-simple-transform" body: > { - "source": { "index": "dataframe-transform-airline-data" }, + "source": { "index": "transform-airline-data" }, "dest": { "index": "mixed-simple-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -21,11 +21,11 @@ - match: { acknowledged: true } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } @@ -34,13 +34,13 @@ #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } @@ -49,12 +49,12 @@ #- match: { transforms.0.state: "stopped" } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "mixed-complex-transform" body: > { "source": { - "index": "dataframe-transform-airline-data", + "index": "transform-airline-data", "query": { "bool": { "filter": {"term": {"airline": "ElasticAir"}} @@ -76,17 +76,17 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } @@ -95,13 +95,13 @@ #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } @@ -113,16 +113,16 @@ "Test put continuous data frame transform on mixed cluster": - do: cluster.health: - index: "dataframe-transform-airline-data-cont" + index: "transform-airline-data-cont" wait_for_status: green timeout: 70s - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "mixed-simple-continuous-transform" body: > { - "source": { "index": "dataframe-transform-airline-data-cont" }, + "source": { "index": "transform-airline-data-cont" }, "dest": { "index": "mixed-simple-continuous-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -138,7 +138,7 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } @@ -148,24 +148,24 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } @@ -175,26 +175,26 @@ "Test GET, start, and stop old cluster batch transforms": - do: cluster.health: - index: "dataframe-transform-airline-data" + index: "transform-airline-data" wait_for_status: green timeout: 70s - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-simple-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } @@ -203,12 +203,12 @@ #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } @@ -217,11 +217,11 @@ #- match: { transforms.0.state: "stopped" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-complex-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.group_by.day.date_histogram.field: "timestamp" } @@ -229,11 +229,11 @@ - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } @@ -242,12 +242,12 @@ #- match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } @@ -259,12 +259,12 @@ "Test GET, stop, start, old continuous transforms": - do: cluster.health: - index: "dataframe-transform-airline-data-cont" + index: "transform-airline-data-cont" wait_for_status: green timeout: 70s - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } @@ -274,24 +274,24 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml index 1a25dc1a069..894f2053d67 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml @@ -2,7 +2,7 @@ "Test put batch data frame transforms on old cluster": - do: indices.create: - index: dataframe-transform-airline-data + index: transform-airline-data body: mappings: properties: @@ -16,16 +16,16 @@ type: integer - do: cluster.health: - index: "dataframe-transform-airline-data" + index: "transform-airline-data" wait_for_status: green timeout: 70s - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "old-simple-transform" body: > { - "source": { "index": "dataframe-transform-airline-data" }, + "source": { "index": "transform-airline-data" }, "dest": { "index": "old-simple-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -35,40 +35,40 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "old-complex-transform" body: > { "source": { - "index": "dataframe-transform-airline-data", + "index": "transform-airline-data", "query": { "bool": { "filter": {"term": {"airline": "ElasticAir"}} @@ -90,29 +90,29 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } @@ -121,7 +121,7 @@ "Test put continuous data frame transform on old cluster": - do: indices.create: - index: dataframe-transform-airline-data-cont + index: transform-airline-data-cont body: mappings: properties: @@ -135,16 +135,16 @@ type: integer - do: cluster.health: - index: "dataframe-transform-airline-data-cont" + index: "transform-airline-data-cont" wait_for_status: green timeout: 70s - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "old-simple-continuous-transform" body: > { - "source": { "index": "dataframe-transform-airline-data-cont" }, + "source": { "index": "transform-airline-data-cont" }, "dest": { "index": "old-simple-continuous-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -160,7 +160,7 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } @@ -170,23 +170,23 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml index 3c07098bc97..715589266c2 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml @@ -9,43 +9,43 @@ setup: "Get start, stop, and delete old and mixed cluster batch data frame transforms": # Simple and complex OLD transforms - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-simple-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-complex-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.group_by.day.date_histogram.field: "timestamp" } @@ -53,23 +53,23 @@ setup: - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } @@ -77,44 +77,44 @@ setup: # Simple and complex Mixed cluster transforms - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "mixed-simple-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "mixed-complex-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.group_by.day.date_histogram.field: "timestamp" } @@ -122,23 +122,23 @@ setup: - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } @@ -146,22 +146,22 @@ setup: # Delete all old and mixed transforms - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "old-simple-transform" - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "mixed-simple-transform" - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform,mixed-simple-transform" - match: { count: 0 } --- "Test GET, stop, delete, old and mixed continuous transforms": - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } @@ -171,42 +171,42 @@ setup: - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "old-simple-continuous-transform" - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } @@ -216,48 +216,48 @@ setup: - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "mixed-simple-continuous-transform" --- "Test index mappings for latest internal index": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "upgraded-simple-transform" defer_validation: true body: > { - "source": { "index": "dataframe-transform-airline-data" }, + "source": { "index": "transform-airline-data" }, "dest": { "index": "upgraded-simple-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, From 7377ac4637f6c509f17b7e911145830dda1b7046 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 25 Sep 2019 07:38:59 +0200 Subject: [PATCH 35/94] [Transform] Replace transforms with transform, index constants (#47023) - replace "transforms" with "transform" for consistency - use constants for internal index naming wherever possible and document required changes --- .../xpack/core/XPackClientPlugin.java | 8 ++-- ...rmsAction.java => GetTransformAction.java} | 10 ++--- ...tion.java => GetTransformStatsAction.java} | 8 ++-- .../TransformInternalIndexConstants.java | 39 +++++++++++++++++++ .../authz/store/ReservedRolesStoreTests.java | 21 +++++----- ...va => GetTransformActionRequestTests.java} | 4 +- ...a => GetTransformActionResponseTests.java} | 4 +- ... GetTransformStatsActionRequestTests.java} | 4 +- ...GetTransformStatsActionResponseTests.java} | 4 +- .../integration/TransformAuditorIT.java | 8 ++-- .../TransformConfigurationIndexIT.java | 6 +-- .../integration/TransformInternalIndexIT.java | 6 +-- .../integration/TransformRestTestCase.java | 4 +- .../integration/TransformUsageIT.java | 6 +-- .../xpack/transform/Transform.java | 20 +++++----- .../xpack/transform/TransformFeatureSet.java | 6 +-- ....java => TransportGetTransformAction.java} | 22 +++++------ ... => TransportGetTransformStatsAction.java} | 28 ++++++------- .../action/TransportStopTransformAction.java | 4 +- .../notifications/TransformAuditor.java | 4 +- .../persistence/TransformConfigManager.java | 33 ++++++++-------- .../persistence/TransformInternalIndex.java | 26 ++++--------- .../rest/action/RestGetTransformAction.java | 6 +-- .../action/RestGetTransformStatsAction.java | 6 +-- .../TransformPersistentTasksExecutor.java | 3 +- .../TransformSingleNodeTestCase.java | 4 +- .../TransformConfigManagerTests.java | 33 +++++++++------- .../TransformInternalIndexTests.java | 3 +- ...TransformPersistentTasksExecutorTests.java | 8 ++-- 29 files changed, 189 insertions(+), 149 deletions(-) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/{GetTransformsAction.java => GetTransformAction.java} (93%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/{GetTransformsStatsAction.java => GetTransformStatsAction.java} (96%) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/{GetTransformsActionRequestTests.java => GetTransformActionRequestTests.java} (88%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/{GetTransformsActionResponseTests.java => GetTransformActionResponseTests.java} (96%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/{GetTransformsStatsActionRequestTests.java => GetTransformStatsActionRequestTests.java} (87%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/{GetTransformsStatsActionResponseTests.java => GetTransformStatsActionResponseTests.java} (92%) rename x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/{TransportGetTransformsAction.java => TransportGetTransformAction.java} (81%) rename x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/{TransportGetTransformsStatsAction.java => TransportGetTransformStatsAction.java} (94%) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 12f77fdc18e..9ba8ea02306 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -204,8 +204,8 @@ import org.elasticsearch.xpack.core.ssl.action.GetCertificateInfoAction; import org.elasticsearch.xpack.core.transform.TransformFeatureSetUsage; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; @@ -425,8 +425,8 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl StartTransformAction.INSTANCE, StopTransformAction.INSTANCE, DeleteTransformAction.INSTANCE, - GetTransformsAction.INSTANCE, - GetTransformsStatsAction.INSTANCE, + GetTransformAction.INSTANCE, + GetTransformStatsAction.INSTANCE, PreviewTransformAction.INSTANCE ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java similarity index 93% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java index 03307665829..fcc17a5cedd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java @@ -28,16 +28,16 @@ import java.util.List; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetTransformsAction extends ActionType { +public class GetTransformAction extends ActionType { - public static final GetTransformsAction INSTANCE = new GetTransformsAction(); + public static final GetTransformAction INSTANCE = new GetTransformAction(); public static final String NAME = "cluster:monitor/data_frame/get"; private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(GetTransformsAction.class)); + LogManager.getLogger(GetTransformAction.class)); - private GetTransformsAction() { - super(NAME, GetTransformsAction.Response::new); + private GetTransformAction() { + super(NAME, GetTransformAction.Response::new); } public static class Request extends AbstractGetResourcesRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java similarity index 96% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index a83f6cb53ba..d1832368bb3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -34,12 +34,12 @@ import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetTransformsStatsAction extends ActionType { +public class GetTransformStatsAction extends ActionType { - public static final GetTransformsStatsAction INSTANCE = new GetTransformsStatsAction(); + public static final GetTransformStatsAction INSTANCE = new GetTransformStatsAction(); public static final String NAME = "cluster:monitor/data_frame/stats/get"; - public GetTransformsStatsAction() { - super(NAME, GetTransformsStatsAction.Response::new); + public GetTransformStatsAction() { + super(NAME, GetTransformStatsAction.Response::new); } public static class Request extends BaseTasksRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java new file mode 100644 index 00000000000..dcf8707e87c --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.transform.transforms.persistence; + +public final class TransformInternalIndexConstants { + + /* Constants for internal indexes of the transform plugin + * (defined in core to provide wider access) + * + * Increase the version number for every mapping change, see TransformInternalIndex for details + * + * Together with increasing the version number please keep the following in sync: + * + * - XPackRestTestConstants + * - yaml tests under x-pack/qa/ + * + * (pro-tip: grep for the constant) + */ + + // internal index + public static final String INDEX_VERSION = "2"; + public static final String INDEX_PATTERN = ".data-frame-internal-"; + public static final String LATEST_INDEX_VERSIONED_NAME = INDEX_PATTERN + INDEX_VERSION; + public static final String LATEST_INDEX_NAME = LATEST_INDEX_VERSIONED_NAME; + public static final String INDEX_NAME_PATTERN = INDEX_PATTERN + "*"; + + // audit index + public static final String AUDIT_TEMPLATE_VERSION = "1"; + public static final String AUDIT_INDEX_PREFIX = ".data-frame-notifications-"; + public static final String AUDIT_INDEX = AUDIT_INDEX_PREFIX + AUDIT_TEMPLATE_VERSION; + + private TransformInternalIndexConstants() { + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index f1e7f1a9bfe..9c23def4283 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -133,12 +133,13 @@ import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchAction; @@ -1130,8 +1131,8 @@ public class ReservedRolesStoreTests extends ESTestCase { Role role = Role.builder(roleDescriptor, null).build(); assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformsAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformsStatsAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(true)); @@ -1140,9 +1141,9 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); - assertOnlyReadAllowed(role, ".data-frame-notifications-1"); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX); assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, ".data-frame-internal-1"); // internal use only + assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); // internal use only assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); @@ -1169,8 +1170,8 @@ public class ReservedRolesStoreTests extends ESTestCase { Role role = Role.builder(roleDescriptor, null).build(); assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(GetTransformsAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformsStatsAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(false)); @@ -1179,9 +1180,9 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); - assertOnlyReadAllowed(role, ".data-frame-notifications-1"); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX); assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, ".data-frame-internal-1"); + assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionRequestTests.java similarity index 88% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionRequestTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionRequestTests.java index 53cb7d01cf2..4014ba1310b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionRequestTests.java @@ -9,9 +9,9 @@ package org.elasticsearch.xpack.core.transform.action; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Request; -public class GetTransformsActionRequestTests extends AbstractWireSerializingTestCase { +public class GetTransformActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionResponseTests.java similarity index 96% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionResponseTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionResponseTests.java index 8c274e2822d..c0a13ca4bad 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionResponseTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; import org.elasticsearch.xpack.core.watcher.watch.Payload.XContent; @@ -22,7 +22,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -public class GetTransformsActionResponseTests extends AbstractWireSerializingTransformTestCase { +public class GetTransformActionResponseTests extends AbstractWireSerializingTransformTestCase { public void testInvalidTransforms() throws IOException { List transforms = new ArrayList<>(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionRequestTests.java similarity index 87% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionRequestTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionRequestTests.java index d685d28a5e0..c604c3a90ab 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionRequestTests.java @@ -9,9 +9,9 @@ package org.elasticsearch.xpack.core.transform.action; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Request; -public class GetTransformsStatsActionRequestTests extends AbstractWireSerializingTestCase { +public class GetTransformStatsActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { if (randomBoolean()) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionResponseTests.java similarity index 92% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionResponseTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionResponseTests.java index ebb0fbf078a..df5142af058 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionResponseTests.java @@ -10,14 +10,14 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformStats; import org.elasticsearch.xpack.core.transform.transforms.TransformStatsTests; import java.util.ArrayList; import java.util.List; -public class GetTransformsStatsActionResponseTests extends AbstractWireSerializingTransformTestCase { +public class GetTransformStatsActionResponseTests extends AbstractWireSerializingTransformTestCase { @Override protected Response createTestInstance() { List stats = new ArrayList<>(); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java index 3c433c48bdd..01abbbae7d7 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.integration; import org.elasticsearch.client.Request; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.Before; import java.io.IOException; @@ -63,15 +63,15 @@ public class TransformAuditorIT extends TransformRestTestCase { startAndWaitForTransform(transformId, transformIndex, BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS); // Make sure we wrote to the audit - final Request request = new Request("GET", TransformInternalIndex.AUDIT_INDEX + "/_search"); + final Request request = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search"); request.setJsonEntity("{\"query\":{\"term\":{\"transform_id\":\"simple_pivot_for_audit\"}}}"); assertBusy(() -> { - assertTrue(indexExists(TransformInternalIndex.AUDIT_INDEX)); + assertTrue(indexExists(TransformInternalIndexConstants.AUDIT_INDEX)); }); // Since calls to write the AbstractAuditor are sent and forgot (async) we could have returned from the start, // finished the job (as this is a very short DF job), all without the audit being fully written. assertBusy(() -> { - refreshIndex(TransformInternalIndex.AUDIT_INDEX); + refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX); Map response = entityAsMap(client().performRequest(request)); List hitList = ((List) ((Map)response.get("hits")).get("hits")); assertThat(hitList, is(not(empty()))); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java index ad360c65816..4b4845ef52b 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; @@ -42,13 +42,13 @@ public class TransformConfigurationIndexIT extends TransformRestTestCase { builder.endObject(); final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); Request req = new Request("PUT", - TransformInternalIndex.LATEST_INDEX_NAME + "/_doc/" + TransformConfig.documentId(fakeTransformName)); + TransformInternalIndexConstants.LATEST_INDEX_NAME + "/_doc/" + TransformConfig.documentId(fakeTransformName)); req.setEntity(entity); client().performRequest(req); } // refresh the index - assertOK(client().performRequest(new Request("POST", TransformInternalIndex.LATEST_INDEX_NAME + "/_refresh"))); + assertOK(client().performRequest(new Request("POST", TransformInternalIndexConstants.LATEST_INDEX_NAME + "/_refresh"))); Request deleteRequest = new Request("DELETE", TRANSFORM_ENDPOINT + fakeTransformName); Response deleteResponse = client().performRequest(deleteRequest); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java index 03b396ef845..048175629da 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.SearchModule; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.transform.GetTransformRequest; import org.elasticsearch.client.transform.GetTransformResponse; @@ -42,8 +42,8 @@ import static org.hamcrest.Matchers.equalTo; public class TransformInternalIndexIT extends ESRestTestCase { - private static final String CURRENT_INDEX = TransformInternalIndex.LATEST_INDEX_NAME; - private static final String OLD_INDEX = TransformInternalIndex.INDEX_PATTERN + "1"; + private static final String CURRENT_INDEX = TransformInternalIndexConstants.LATEST_INDEX_NAME; + private static final String OLD_INDEX = TransformInternalIndexConstants.INDEX_PATTERN + "1"; public void testUpdateDeletesOldTransformConfig() throws Exception { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 372efddc57c..59d5fe1c1ce 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.transform.TransformField; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.After; import org.junit.AfterClass; @@ -384,7 +384,7 @@ public abstract class TransformRestTestCase extends ESRestTestCase { assertTrue(transformConfigs.isEmpty()); // the configuration index should be empty - Request request = new Request("GET", TransformInternalIndex.LATEST_INDEX_NAME + "/_search"); + Request request = new Request("GET", TransformInternalIndexConstants.LATEST_INDEX_NAME + "/_search"); try { Response searchResponse = adminClient().performRequest(request); Map searchResult = entityAsMap(searchResponse); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java index 3c45bb34512..4c9b751a8e7 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.Before; import java.io.IOException; @@ -53,7 +53,7 @@ public class TransformUsageIT extends TransformRestTestCase { stopTransform("test_usage", false); Request statsExistsRequest = new Request("GET", - TransformInternalIndex.LATEST_INDEX_NAME+"/_search?q=" + + TransformInternalIndexConstants.LATEST_INDEX_NAME+"/_search?q=" + INDEX_DOC_TYPE.getPreferredName() + ":" + TransformStoredDoc.NAME); // Verify that we have one stat document @@ -95,7 +95,7 @@ public class TransformUsageIT extends TransformRestTestCase { XContentMapValues.extractValue("transform.stats." + statName, statsMap)); } // Refresh the index so that statistics are searchable - refreshIndex(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME); + refreshIndex(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME); }, 60, TimeUnit.SECONDS); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 5c0a637dbed..269e8775064 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -45,16 +45,17 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.action.TransportDeleteTransformAction; -import org.elasticsearch.xpack.transform.action.TransportGetTransformsAction; -import org.elasticsearch.xpack.transform.action.TransportGetTransformsStatsAction; +import org.elasticsearch.xpack.transform.action.TransportGetTransformAction; +import org.elasticsearch.xpack.transform.action.TransportGetTransformStatsAction; import org.elasticsearch.xpack.transform.action.TransportPreviewTransformAction; import org.elasticsearch.xpack.transform.action.TransportPutTransformAction; import org.elasticsearch.xpack.transform.action.TransportStartTransformAction; @@ -62,8 +63,8 @@ import org.elasticsearch.xpack.transform.action.TransportStopTransformAction; import org.elasticsearch.xpack.transform.action.TransportUpdateTransformAction; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; +import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.xpack.transform.rest.action.RestDeleteTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformStatsAction; @@ -155,8 +156,8 @@ public class Transform extends Plugin implements ActionPlugin, PersistentTaskPlu new ActionHandler<>(StartTransformAction.INSTANCE, TransportStartTransformAction.class), new ActionHandler<>(StopTransformAction.INSTANCE, TransportStopTransformAction.class), new ActionHandler<>(DeleteTransformAction.INSTANCE, TransportDeleteTransformAction.class), - new ActionHandler<>(GetTransformsAction.INSTANCE, TransportGetTransformsAction.class), - new ActionHandler<>(GetTransformsStatsAction.INSTANCE, TransportGetTransformsStatsAction.class), + new ActionHandler<>(GetTransformAction.INSTANCE, TransportGetTransformAction.class), + new ActionHandler<>(GetTransformStatsAction.INSTANCE, TransportGetTransformStatsAction.class), new ActionHandler<>(PreviewTransformAction.INSTANCE, TransportPreviewTransformAction.class), new ActionHandler<>(UpdateTransformAction.INSTANCE, TransportUpdateTransformAction.class) ); @@ -194,12 +195,13 @@ public class Transform extends Plugin implements ActionPlugin, PersistentTaskPlu public UnaryOperator> getIndexTemplateMetaDataUpgrader() { return templates -> { try { - templates.put(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME, TransformInternalIndex.getIndexTemplateMetaData()); + templates.put(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME, + TransformInternalIndex.getIndexTemplateMetaData()); } catch (IOException e) { logger.error("Error creating data frame index template", e); } try { - templates.put(TransformInternalIndex.AUDIT_INDEX, TransformInternalIndex.getAuditIndexTemplateMetaData()); + templates.put(TransformInternalIndexConstants.AUDIT_INDEX, TransformInternalIndex.getAuditIndexTemplateMetaData()); } catch (IOException e) { logger.warn("Error creating data frame audit index", e); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java index bb48c563ac4..c75ec9ed45c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformFeatureSet.java @@ -36,8 +36,8 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import java.util.ArrayList; import java.util.Arrays; @@ -154,7 +154,7 @@ public class TransformFeatureSet implements XPackFeatureSet { } ); - SearchRequest totalTransformCount = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest totalTransformCount = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setTrackTotalHits(true) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformConfig.NAME)))) @@ -196,7 +196,7 @@ public class TransformFeatureSet implements XPackFeatureSet { .filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformStoredDoc.NAME))); - SearchRequestBuilder requestBuilder = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequestBuilder requestBuilder = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setSize(0) .setQuery(queryBuilder); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformAction.java similarity index 81% rename from x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsAction.java rename to x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformAction.java index 9ffe5941ee5..6e6813aa22c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformAction.java @@ -24,23 +24,23 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.AbstractTransportGetResourcesAction; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.TransformMessages; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Request; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import static org.elasticsearch.xpack.core.transform.TransformField.INDEX_DOC_TYPE; -public class TransportGetTransformsAction extends AbstractTransportGetResourcesAction { +public class TransportGetTransformAction extends AbstractTransportGetResourcesAction { @Inject - public TransportGetTransformsAction(TransportService transportService, ActionFilters actionFilters, - Client client, NamedXContentRegistry xContentRegistry) { - super(GetTransformsAction.NAME, transportService, actionFilters, Request::new, client, xContentRegistry); + public TransportGetTransformAction(TransportService transportService, ActionFilters actionFilters, + Client client, NamedXContentRegistry xContentRegistry) { + super(GetTransformAction.NAME, transportService, actionFilters, Request::new, client, xContentRegistry); } @Override @@ -58,7 +58,7 @@ public class TransportGetTransformsAction extends AbstractTransportGetResourcesA @Override protected String[] getIndices() { - return new String[]{TransformInternalIndex.INDEX_NAME_PATTERN}; + return new String[]{TransformInternalIndexConstants.INDEX_NAME_PATTERN}; } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java similarity index 94% rename from x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsStatsAction.java rename to x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java index 257aa6b9fdf..474450e8d12 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsStatsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java @@ -23,14 +23,14 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Request; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Response; +import org.elasticsearch.xpack.core.transform.transforms.NodeAttributes; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStats; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; -import org.elasticsearch.xpack.core.transform.transforms.NodeAttributes; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.transforms.TransformTask; @@ -45,23 +45,23 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -public class TransportGetTransformsStatsAction extends +public class TransportGetTransformStatsAction extends TransportTasksAction { + GetTransformStatsAction.Request, + GetTransformStatsAction.Response, + GetTransformStatsAction.Response> { - private static final Logger logger = LogManager.getLogger(TransportGetTransformsStatsAction.class); + private static final Logger logger = LogManager.getLogger(TransportGetTransformStatsAction.class); private final TransformConfigManager transformConfigManager; private final TransformCheckpointService transformCheckpointService; @Inject - public TransportGetTransformsStatsAction(TransportService transportService, ActionFilters actionFilters, - ClusterService clusterService, - TransformConfigManager transformsConfigManager, - TransformCheckpointService transformsCheckpointService) { - super(GetTransformsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, + public TransportGetTransformStatsAction(TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService, + TransformConfigManager transformsConfigManager, + TransformCheckpointService transformsCheckpointService) { + super(GetTransformStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, Response::new, ThreadPool.Names.SAME); this.transformConfigManager = transformsConfigManager; this.transformCheckpointService = transformsCheckpointService; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index 0cbba5ef46d..777bbb4fc28 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -36,7 +36,7 @@ import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Request import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.transforms.TransformTask; @@ -175,7 +175,7 @@ public class TransportStopTransformAction extends TransportTasksAction client.admin() .indices() - .prepareRefresh(TransformInternalIndex.LATEST_INDEX_NAME) + .prepareRefresh(TransformInternalIndexConstants.LATEST_INDEX_NAME) .execute(ActionListener.wrap( r -> listener.onResponse(waitResponse), e -> { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java index 055a3c02f8b..162616feffe 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.transform.notifications; import org.elasticsearch.client.Client; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; import org.elasticsearch.xpack.core.transform.notifications.TransformAuditMessage; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; @@ -18,6 +18,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; public class TransformAuditor extends AbstractAuditor { public TransformAuditor(Client client, String nodeName) { - super(client, nodeName, TransformInternalIndex.AUDIT_INDEX, TRANSFORM_ORIGIN, TransformAuditMessage::new); + super(client, nodeName, TransformInternalIndexConstants.AUDIT_INDEX, TRANSFORM_ORIGIN, TransformAuditMessage::new); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java index 4bb4cbadd6a..f2faf5085e4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.core.transform.TransformMessages; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; import java.io.InputStream; @@ -111,7 +112,7 @@ public class TransformConfigManager { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = checkpoint.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); - IndexRequest indexRequest = new IndexRequest(TransformInternalIndex.LATEST_INDEX_NAME) + IndexRequest indexRequest = new IndexRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME) .opType(DocWriteRequest.OpType.INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .id(TransformCheckpoint.documentId(checkpoint.getTransformId(), checkpoint.getCheckpoint())) @@ -150,7 +151,7 @@ public class TransformConfigManager { public void updateTransformConfiguration(TransformConfig transformConfig, SeqNoPrimaryTermAndIndex seqNoPrimaryTermAndIndex, ActionListener listener) { - if (seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndex.LATEST_INDEX_NAME)) { + if (seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndexConstants.LATEST_INDEX_NAME)) { // update the config in the same, current index using optimistic concurrency control putTransformConfiguration(transformConfig, DocWriteRequest.OpType.INDEX, seqNoPrimaryTermAndIndex, listener); } else { @@ -167,9 +168,9 @@ public class TransformConfigManager { * @param listener listener to alert on completion */ public void deleteOldTransformConfigurations(String transformId, ActionListener listener) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndex.INDEX_NAME_PATTERN) + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() - .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndex.LATEST_INDEX_NAME)) + .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndexConstants.LATEST_INDEX_NAME)) .filter(QueryBuilders.termQuery("_id", TransformConfig.documentId(transformId))))) .setIndicesOptions(IndicesOptions.lenientExpandOpen()); @@ -194,9 +195,9 @@ public class TransformConfigManager { * @param listener listener to alert on completion */ public void deleteOldTransformStoredDocuments(String transformId, ActionListener listener) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndex.INDEX_NAME_PATTERN) + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() - .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndex.LATEST_INDEX_NAME)) + .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndexConstants.LATEST_INDEX_NAME)) .filter(QueryBuilders.termQuery("_id", TransformStoredDoc.documentId(transformId))))) .setIndicesOptions(IndicesOptions.lenientExpandOpen()); @@ -221,7 +222,7 @@ public class TransformConfigManager { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = transformConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); - IndexRequest indexRequest = new IndexRequest(TransformInternalIndex.LATEST_INDEX_NAME) + IndexRequest indexRequest = new IndexRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME) .opType(optType) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .id(TransformConfig.documentId(transformConfig.getId())) @@ -260,7 +261,7 @@ public class TransformConfigManager { */ public void getTransformCheckpoint(String transformId, long checkpoint, ActionListener resultListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformCheckpoint.documentId(transformId, checkpoint)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -289,7 +290,7 @@ public class TransformConfigManager { */ public void getTransformConfiguration(String transformId, ActionListener resultListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformConfig.documentId(transformId)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -320,7 +321,7 @@ public class TransformConfigManager { ActionListener> configAndVersionListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformConfig.documentId(transformId)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -361,7 +362,7 @@ public class TransformConfigManager { String[] idTokens = ExpandedIdsMatcher.tokenizeExpression(transformIdsExpression); QueryBuilder queryBuilder = buildQueryFromTokenizedIds(idTokens, TransformConfig.NAME); - SearchRequest request = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest request = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .addSort(TransformField.ID.getPreferredName(), SortOrder.ASC) .setFrom(pageParams.getFrom()) .setTrackTotalHits(true) @@ -412,7 +413,7 @@ public class TransformConfigManager { DeleteByQueryRequest request = new DeleteByQueryRequest() .setAbortOnVersionConflict(false); //since these documents are not updated, a conflict just means it was deleted previously - request.indices(TransformInternalIndex.INDEX_NAME_PATTERN); + request.indices(TransformInternalIndexConstants.INDEX_NAME_PATTERN); QueryBuilder query = QueryBuilders.termQuery(TransformField.ID.getPreferredName(), transformId); request.setQuery(query); request.setRefresh(true); @@ -440,12 +441,12 @@ public class TransformConfigManager { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = stats.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); - IndexRequest indexRequest = new IndexRequest(TransformInternalIndex.LATEST_INDEX_NAME) + IndexRequest indexRequest = new IndexRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .id(TransformStoredDoc.documentId(stats.getId())) .source(source); if (seqNoPrimaryTermAndIndex != null && - seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndex.LATEST_INDEX_NAME)) { + seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndexConstants.LATEST_INDEX_NAME)) { indexRequest.opType(DocWriteRequest.OpType.INDEX) .setIfSeqNo(seqNoPrimaryTermAndIndex.getSeqNo()) .setIfPrimaryTerm(seqNoPrimaryTermAndIndex.getPrimaryTerm()); @@ -471,7 +472,7 @@ public class TransformConfigManager { public void getTransformStoredDoc(String transformId, ActionListener> resultListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformStoredDoc.documentId(transformId)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -507,7 +508,7 @@ public class TransformConfigManager { .filter(QueryBuilders.termsQuery(TransformField.ID.getPreferredName(), transformIds)) .filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformStoredDoc.NAME))); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .addSort(TransformField.ID.getPreferredName(), SortOrder.ASC) .addSort("_index", SortOrder.DESC) .setQuery(builder) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java index 9a2f50b1679..e0f033fa163 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; import java.util.Collections; @@ -45,7 +46,7 @@ public final class TransformInternalIndex { /* Changelog of internal index versions * - * Please list changes, increase the version if you are 1st in this release cycle + * Please list changes, increase the version in @link{TransformInternalIndexConstants} if you are 1st in this release cycle * * version 1 (7.2): initial * version 2 (7.4): cleanup, add config::version, config::create_time, checkpoint::timestamp, checkpoint::time_upper_bound, @@ -54,17 +55,6 @@ public final class TransformInternalIndex { * stats::exponential_avg_documents_processed */ - // constants for the index - public static final String INDEX_VERSION = "2"; - public static final String INDEX_PATTERN = ".data-frame-internal-"; - public static final String LATEST_INDEX_VERSIONED_NAME = INDEX_PATTERN + INDEX_VERSION; - public static final String LATEST_INDEX_NAME = LATEST_INDEX_VERSIONED_NAME; - public static final String INDEX_NAME_PATTERN = INDEX_PATTERN + "*"; - - public static final String AUDIT_TEMPLATE_VERSION = "1"; - public static final String AUDIT_INDEX_PREFIX = ".data-frame-notifications-"; - public static final String AUDIT_INDEX = AUDIT_INDEX_PREFIX + AUDIT_TEMPLATE_VERSION; - // constants for mappings public static final String DYNAMIC = "dynamic"; public static final String PROPERTIES = "properties"; @@ -82,8 +72,8 @@ public final class TransformInternalIndex { public static final String KEYWORD = "keyword"; public static IndexTemplateMetaData getIndexTemplateMetaData() throws IOException { - IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(LATEST_INDEX_VERSIONED_NAME) - .patterns(Collections.singletonList(LATEST_INDEX_VERSIONED_NAME)) + IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME) + .patterns(Collections.singletonList(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME)) .version(Version.CURRENT.id) .settings(Settings.builder() // the configurations are expected to be small @@ -96,8 +86,8 @@ public final class TransformInternalIndex { } public static IndexTemplateMetaData getAuditIndexTemplateMetaData() throws IOException { - IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(AUDIT_INDEX) - .patterns(Collections.singletonList(AUDIT_INDEX_PREFIX + "*")) + IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(TransformInternalIndexConstants.AUDIT_INDEX) + .patterns(Collections.singletonList(TransformInternalIndexConstants.AUDIT_INDEX_PREFIX + "*")) .version(Version.CURRENT.id) .settings(Settings.builder() // the audits are expected to be small @@ -319,7 +309,7 @@ public final class TransformInternalIndex { } public static boolean haveLatestVersionedIndexTemplate(ClusterState state) { - return state.getMetaData().getTemplates().containsKey(LATEST_INDEX_VERSIONED_NAME); + return state.getMetaData().getTemplates().containsKey(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME); } /** @@ -345,7 +335,7 @@ public final class TransformInternalIndex { try { IndexTemplateMetaData indexTemplateMetaData = getIndexTemplateMetaData(); BytesReference jsonMappings = new BytesArray(indexTemplateMetaData.mappings().get(SINGLE_MAPPING_NAME).uncompressed()); - PutIndexTemplateRequest request = new PutIndexTemplateRequest(LATEST_INDEX_VERSIONED_NAME) + PutIndexTemplateRequest request = new PutIndexTemplateRequest(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME) .patterns(indexTemplateMetaData.patterns()) .version(indexTemplateMetaData.version()) .settings(indexTemplateMetaData.settings()) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java index d5ddef41d0c..c329500b206 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.transform.TransformField; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; import static org.elasticsearch.xpack.core.transform.TransformField.ALLOW_NO_MATCH; @@ -26,7 +26,7 @@ public class RestGetTransformAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - GetTransformsAction.Request request = new GetTransformsAction.Request(); + GetTransformAction.Request request = new GetTransformAction.Request(); String id = restRequest.param(TransformField.ID.getPreferredName()); request.setResourceId(id); @@ -36,7 +36,7 @@ public class RestGetTransformAction extends BaseRestHandler { new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); } - return channel -> client.execute(GetTransformsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(GetTransformAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java index 7fc8d2ba656..61b8d60b3cc 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.transform.TransformField; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import static org.elasticsearch.xpack.core.transform.TransformField.ALLOW_NO_MATCH; @@ -27,14 +27,14 @@ public class RestGetTransformStatsAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(TransformField.ID.getPreferredName()); - GetTransformsStatsAction.Request request = new GetTransformsStatsAction.Request(id); + GetTransformStatsAction.Request request = new GetTransformStatsAction.Request(id); request.setAllowNoMatch(restRequest.paramAsBoolean(ALLOW_NO_MATCH.getPreferredName(), true)); if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { request.setPageParams( new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); } - return channel -> client.execute(GetTransformsStatsAction.INSTANCE, request, + return channel -> client.execute(GetTransformStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index 5616e828dd2..64b299182d2 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.TransformMessages; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformState; @@ -108,7 +109,7 @@ public class TransformPersistentTasksExecutor extends PersistentTasksExecutor unavailableIndices = new ArrayList<>(indices.length); for (String index : indices) { IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java index 5cdbba8e122..f505ca85868 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.template.TemplateUtils; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.Before; import java.util.Collection; @@ -32,7 +32,7 @@ public abstract class TransformSingleNodeTestCase extends ESSingleNodeTestCase { assertBusy(() -> { ClusterState state = client().admin().cluster().prepareState().get().getState(); assertTrue("Timed out waiting for the transform templates to be installed", TemplateUtils - .checkTemplateExistsAndVersionIsGTECurrentVersion(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME, state)); + .checkTemplateExistsAndVersionIsGTECurrentVersion(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME, state)); }); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java index c4f1f94144c..cfc66532e65 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDocTests; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.TransformSingleNodeTestCase; import org.junit.Before; @@ -36,8 +37,8 @@ import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.transform.persistence.TransformInternalIndex.mappings; import static org.elasticsearch.xpack.transform.persistence.TransformConfigManager.TO_XCONTENT_PARAMS; +import static org.elasticsearch.xpack.transform.persistence.TransformInternalIndex.mappings; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -261,7 +262,7 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { String transformId = "transform_test_stored_doc_create_read_update"; TransformStoredDoc storedDocs = TransformStoredDocTests.randomTransformStoredDoc(transformId); - SeqNoPrimaryTermAndIndex firstIndex = new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndex.LATEST_INDEX_NAME); + SeqNoPrimaryTermAndIndex firstIndex = new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndexConstants.LATEST_INDEX_NAME); assertAsync(listener -> transformConfigManager.putOrUpdateTransformStoredDoc(storedDocs, null, listener), firstIndex, @@ -272,7 +273,7 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { null, null); - SeqNoPrimaryTermAndIndex secondIndex = new SeqNoPrimaryTermAndIndex(1, 1, TransformInternalIndex.LATEST_INDEX_NAME); + SeqNoPrimaryTermAndIndex secondIndex = new SeqNoPrimaryTermAndIndex(1, 1, TransformInternalIndexConstants.LATEST_INDEX_NAME); TransformStoredDoc updated = TransformStoredDocTests.randomTransformStoredDoc(transformId); assertAsync(listener -> transformConfigManager.putOrUpdateTransformStoredDoc(updated, firstIndex, listener), secondIndex, @@ -297,7 +298,7 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { List expectedDocs = new ArrayList<>(); for (int i=0; i transformConfigManager.putTransformConfiguration(transformConfig, listener), true, null, null); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(true)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); assertAsync(listener -> transformConfigManager.deleteOldTransformConfigurations(transformId, listener), true, null, null); - client().admin().indices().refresh(new RefreshRequest(TransformInternalIndex.INDEX_NAME_PATTERN)).actionGet(); + client().admin().indices().refresh(new RefreshRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN)).actionGet(); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(false)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); } public void testDeleteOldTransformStoredDocuments() throws Exception { - String oldIndex = TransformInternalIndex.INDEX_PATTERN + "1"; + String oldIndex = TransformInternalIndexConstants.INDEX_PATTERN + "1"; String transformId = "transform_test_delete_old_stored_documents"; String docId = TransformStoredDoc.documentId(transformId); TransformStoredDoc transformStoredDoc = TransformStoredDocTests @@ -369,22 +372,24 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { assertAsync(listener -> transformConfigManager.putOrUpdateTransformStoredDoc(transformStoredDoc, new SeqNoPrimaryTermAndIndex(3, 1, oldIndex), listener), - new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndex.LATEST_INDEX_NAME), + new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndexConstants.LATEST_INDEX_NAME), null, null); - client().admin().indices().refresh(new RefreshRequest(TransformInternalIndex.INDEX_NAME_PATTERN)).actionGet(); + client().admin().indices().refresh(new RefreshRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN)).actionGet(); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(true)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); assertAsync(listener -> transformConfigManager.deleteOldTransformStoredDocuments(transformId, listener), true, null, null); - client().admin().indices().refresh(new RefreshRequest(TransformInternalIndex.INDEX_NAME_PATTERN)).actionGet(); + client().admin().indices().refresh(new RefreshRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN)).actionGet(); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(false)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java index 23249ca4fbf..83f9b36c496 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; import java.io.UncheckedIOException; @@ -41,7 +42,7 @@ public class TransformInternalIndexTests extends ESTestCase { static { ImmutableOpenMap.Builder mapBuilder = ImmutableOpenMap.builder(); try { - mapBuilder.put(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME, TransformInternalIndex.getIndexTemplateMetaData()); + mapBuilder.put(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME, TransformInternalIndex.getIndexTemplateMetaData()); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java index 2b51d0f28a2..c2105fa3eee 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java @@ -31,9 +31,9 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.xpack.transform.persistence.TransformInternalIndexTests; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; @@ -137,7 +137,7 @@ public class TransformPersistentTasksExecutorTests extends ESTestCase { metaData = new MetaData.Builder(cs.metaData()); routingTable = new RoutingTable.Builder(cs.routingTable()); - String indexToRemove = TransformInternalIndex.LATEST_INDEX_NAME; + String indexToRemove = TransformInternalIndexConstants.LATEST_INDEX_NAME; if (randomBoolean()) { routingTable.remove(indexToRemove); } else { @@ -159,8 +159,8 @@ public class TransformPersistentTasksExecutorTests extends ESTestCase { private void addIndices(MetaData.Builder metaData, RoutingTable.Builder routingTable) { List indices = new ArrayList<>(); - indices.add(TransformInternalIndex.AUDIT_INDEX); - indices.add(TransformInternalIndex.LATEST_INDEX_NAME); + indices.add(TransformInternalIndexConstants.AUDIT_INDEX); + indices.add(TransformInternalIndexConstants.LATEST_INDEX_NAME); for (String indexName : indices) { IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); indexMetaData.settings(Settings.builder() From b1bf05bb8985cd6afcc03483678abfd981d3eb13 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 25 Sep 2019 09:18:44 +0200 Subject: [PATCH 36/94] Add blob container retries tests for Azure SDK client (#47032) Similarly to what has been done for S3 and GCS, this commit adds unit tests that verify the retry logic of the Azure SDK client implementation when the remote service returns errors. It only tests the retry logic in case of errors and not in case of timeouts because Azure client timeout options are not exposed as settings. --- .../azure/AzureStorageSettings.java | 2 +- .../azure/AzureBlobContainerRetriesTests.java | 315 ++++++++++++++++++ 2 files changed, 316 insertions(+), 1 deletion(-) create mode 100644 plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java index 31353176e64..5d1e8d57ae2 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java @@ -59,7 +59,7 @@ final class AzureStorageSettings { key -> SecureSetting.secureString(key, null)); /** max_retries: Number of retries in case of Azure errors. Defaults to 3 (RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT). */ - public static final Setting MAX_RETRIES_SETTING = + public static final AffixSetting MAX_RETRIES_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "max_retries", (key) -> Setting.intSetting(key, RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT, Setting.Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java new file mode 100644 index 00000000000..c5fe0c5e72f --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -0,0 +1,315 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import com.microsoft.azure.storage.Constants; +import com.microsoft.azure.storage.RetryExponentialRetry; +import com.microsoft.azure.storage.RetryPolicyFactory; +import com.microsoft.azure.storage.blob.BlobRequestOptions; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpServer; +import org.apache.http.HttpStatus; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; +import org.elasticsearch.common.lucene.store.InputStreamIndexInput; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.mocksocket.MockHttpServer; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.io.ByteArrayOutputStream; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.nio.file.NoSuchFileException; +import java.util.Arrays; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; +import static org.elasticsearch.repositories.azure.AzureRepository.Repository.CONTAINER_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.ACCOUNT_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.ENDPOINT_SUFFIX_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.KEY_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.MAX_RETRIES_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.TIMEOUT_SETTING; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; + +/** + * This class tests how a {@link AzureBlobContainer} and its underlying SDK client are retrying requests when reading or writing blobs. + */ +@SuppressForbidden(reason = "use a http server") +public class AzureBlobContainerRetriesTests extends ESTestCase { + + private HttpServer httpServer; + private ThreadPool threadPool; + + @Before + public void setUp() throws Exception { + threadPool = new TestThreadPool(getTestClass().getName(), AzureRepositoryPlugin.executorBuilder()); + httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpServer.start(); + super.setUp(); + } + + @After + public void tearDown() throws Exception { + httpServer.stop(0); + super.tearDown(); + ThreadPool.terminate(threadPool, 10L, TimeUnit.SECONDS); + } + + private BlobContainer createBlobContainer(final int maxRetries) { + final Settings.Builder clientSettings = Settings.builder(); + final String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + + final InetSocketAddress address = httpServer.getAddress(); + final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=http://" + + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort(); + clientSettings.put(ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); + clientSettings.put(MAX_RETRIES_SETTING.getConcreteSettingForNamespace(clientName).getKey(), maxRetries); + clientSettings.put(TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), TimeValue.timeValueMillis(500)); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ACCOUNT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), "account"); + final String key = Base64.getEncoder().encodeToString(randomAlphaOfLength(10).getBytes(UTF_8)); + secureSettings.setString(KEY_SETTING.getConcreteSettingForNamespace(clientName).getKey(), key); + clientSettings.setSecureSettings(secureSettings); + + final AzureStorageService service = new AzureStorageService(clientSettings.build()) { + @Override + RetryPolicyFactory createRetryPolicy(final AzureStorageSettings azureStorageSettings) { + return new RetryExponentialRetry(1, 100, 500, azureStorageSettings.getMaxRetries()); + } + + @Override + BlobRequestOptions getBlobRequestOptionsForWriteBlob() { + BlobRequestOptions options = new BlobRequestOptions(); + options.setSingleBlobPutThresholdInBytes(Math.toIntExact(ByteSizeUnit.MB.toBytes(1))); + return options; + } + }; + + final RepositoryMetaData repositoryMetaData = new RepositoryMetaData("repository", AzureRepository.TYPE, + Settings.builder() + .put(CONTAINER_SETTING.getKey(), "container") + .put(ACCOUNT_SETTING.getKey(), clientName) + .build()); + + return new AzureBlobContainer(BlobPath.cleanPath(), new AzureBlobStore(repositoryMetaData, service, threadPool), threadPool); + } + + public void testReadNonexistentBlobThrowsNoSuchFileException() { + final BlobContainer blobContainer = createBlobContainer(between(1, 5)); + final Exception exception = expectThrows(NoSuchFileException.class, () -> blobContainer.readBlob("read_nonexistent_blob")); + assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("not found")); + } + + public void testReadBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(1, 5); + final CountDown countDownHead = new CountDown(maxRetries); + final CountDown countDownGet = new CountDown(maxRetries); + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/container/read_blob_max_retries", exchange -> { + Streams.readFully(exchange.getRequestBody()); + if ("HEAD".equals(exchange.getRequestMethod())) { + if (countDownHead.countDown()) { + exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(bytes.length)); + exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, -1); + exchange.close(); + return; + } + } else if ("GET".equals(exchange.getRequestMethod())) { + if (countDownGet.countDown()) { + final int rangeStart = getRangeStart(exchange); + assertThat(rangeStart, lessThan(bytes.length)); + final int length = bytes.length - rangeStart; + exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(length)); + exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, length); + exchange.getResponseBody().write(bytes, rangeStart, length); + exchange.close(); + return; + } + } + if (randomBoolean()) { + exchange.sendResponseHeaders(randomFrom(HttpStatus.SC_INTERNAL_SERVER_ERROR, HttpStatus.SC_BAD_GATEWAY, + HttpStatus.SC_SERVICE_UNAVAILABLE, HttpStatus.SC_GATEWAY_TIMEOUT), -1); + } + exchange.close(); + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries); + try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { + assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); + assertThat(countDownHead.isCountedDown(), is(true)); + assertThat(countDownGet.isCountedDown(), is(true)); + } + } + + public void testWriteBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(1, 5); + final CountDown countDown = new CountDown(maxRetries); + + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/container/write_blob_max_retries", exchange -> { + if ("PUT".equals(exchange.getRequestMethod())) { + if (countDown.countDown()) { + final BytesReference body = Streams.readFully(exchange.getRequestBody()); + if (Objects.deepEquals(bytes, BytesReference.toBytes(body))) { + exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + } else { + exchange.sendResponseHeaders(HttpStatus.SC_BAD_REQUEST, -1); + } + exchange.close(); + return; + } + + if (randomBoolean()) { + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody(), new byte[randomIntBetween(1, Math.max(1, bytes.length - 1))]); + } else { + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(randomFrom(HttpStatus.SC_INTERNAL_SERVER_ERROR, HttpStatus.SC_BAD_GATEWAY, + HttpStatus.SC_SERVICE_UNAVAILABLE, HttpStatus.SC_GATEWAY_TIMEOUT), -1); + } + } + exchange.close(); + } + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { + blobContainer.writeBlob("write_blob_max_retries", stream, bytes.length, false); + } + assertThat(countDown.isCountedDown(), is(true)); + } + + public void testWriteLargeBlob() throws Exception { + final int maxRetries = randomIntBetween(1, 5); + + final int nbBlocks = randomIntBetween(1, 2); + final byte[] data = randomBytes(Constants.DEFAULT_STREAM_WRITE_IN_BYTES * nbBlocks); + + final int nbErrors = 2; // we want all requests to fail at least once + final AtomicInteger countDownUploads = new AtomicInteger(nbErrors * nbBlocks); + final CountDown countDownComplete = new CountDown(nbErrors); + + final Map blocks = new ConcurrentHashMap<>(); + httpServer.createContext("/container/write_large_blob", exchange -> { + + if ("PUT".equals(exchange.getRequestMethod())) { + final Map params = new HashMap<>(); + RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + + final String blockId = params.get("blockid"); + if (Strings.hasText(blockId) && (countDownUploads.decrementAndGet() % 2 == 0)) { + blocks.put(blockId, Streams.readFully(exchange.getRequestBody())); + exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + exchange.close(); + return; + } + + final String complete = params.get("comp"); + if ("blocklist".equals(complete) && (countDownComplete.countDown())) { + final String blockList = Streams.copyToString(new InputStreamReader(exchange.getRequestBody(), UTF_8)); + final List blockUids = Arrays.stream(blockList.split("")) + .filter(line -> line.contains("")) + .map(line -> line.substring(0, line.indexOf(""))) + .collect(Collectors.toList()); + + final ByteArrayOutputStream blob = new ByteArrayOutputStream(); + for (String blockUid : blockUids) { + BytesReference block = blocks.remove(blockUid); + assert block != null; + block.writeTo(blob); + } + assertArrayEquals(data, blob.toByteArray()); + exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + exchange.close(); + return; + } + } + + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(randomFrom(HttpStatus.SC_INTERNAL_SERVER_ERROR, HttpStatus.SC_BAD_GATEWAY, + HttpStatus.SC_SERVICE_UNAVAILABLE, HttpStatus.SC_GATEWAY_TIMEOUT), -1); + } + exchange.close(); + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { + blobContainer.writeBlob("write_large_blob", stream, data.length * nbBlocks, false); + } + assertThat(countDownUploads.get(), equalTo(0)); + assertThat(countDownComplete.isCountedDown(), is(true)); + assertThat(blocks.isEmpty(), is(true)); + } + + private static byte[] randomBlobContent() { + return randomByteArrayOfLength(randomIntBetween(1, frequently() ? 512 : 1 << 20)); // rarely up to 1mb + } + + private static int getRangeStart(final HttpExchange exchange) { + final String rangeHeader = exchange.getRequestHeaders().getFirst("X-ms-range"); + if (rangeHeader == null) { + return 0; + } + + final Matcher matcher = Pattern.compile("^bytes=([0-9]+)-([0-9]+)$").matcher(rangeHeader); + assertTrue(rangeHeader + " matches expected pattern", matcher.matches()); + return Math.toIntExact(Long.parseLong(matcher.group(1))); + } +} From db63e78b68c6638d92ce181f7f900d2ac5342326 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 25 Sep 2019 09:01:15 +0200 Subject: [PATCH 37/94] Mute DebMetadataTests.test05CheckLintian Relates #46903 --- .../java/org/elasticsearch/packaging/test/DebMetadataTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java index 0a291a9c40d..d484e92a302 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.packaging.util.Distribution; import org.elasticsearch.packaging.util.FileUtils; import org.elasticsearch.packaging.util.Shell; import org.junit.Before; +import org.junit.Ignore; import java.util.regex.Pattern; @@ -37,6 +38,7 @@ public class DebMetadataTests extends PackagingTestCase { assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB); } + @Ignore public void test05CheckLintian() { sh.run("lintian --fail-on-warnings " + FileUtils.getDistributionFile(distribution())); } From 0c187e0a103df6c11efcc395caa5e4f3fa6bea80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 25 Sep 2019 10:15:10 +0200 Subject: [PATCH 38/94] Add migration tool checks for `_field_names` disabling (#46972) This change adds a check to the migration tool that warns about the deprecated `enabled` setting for the `_field_names` field on 7.x indices and issues a warning for templates containing this setting, which has been removed with 8.0. Relates to #42854, #46681 --- .../deprecation/ClusterDeprecationChecks.java | 51 ++++++++++++ .../xpack/deprecation/DeprecationChecks.java | 6 +- .../deprecation/IndexDeprecationChecks.java | 16 ++++ .../ClusterDeprecationChecksTests.java | 79 +++++++++++++++++++ .../IndexDeprecationChecksTests.java | 32 +++++++- 5 files changed, 180 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java index ff79704136e..ed960df9f6f 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java @@ -12,15 +12,19 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -91,6 +95,53 @@ public class ClusterDeprecationChecks { return null; } + /** + * Check templates that use `_field_names` explicitly, which was deprecated in https://github.com/elastic/elasticsearch/pull/42854 + * and will throw an error on new indices in 8.0 + */ + @SuppressWarnings("unchecked") + static DeprecationIssue checkTemplatesWithFieldNamesDisabled(ClusterState state) { + Set templatesContainingFieldNames = new HashSet<>(); + state.getMetaData().getTemplates().forEach((templateCursor) -> { + String templateName = templateCursor.key; + templateCursor.value.getMappings().forEach((mappingCursor) -> { + String type = mappingCursor.key; + // there should be the type name at this level, but there was a bug where mappings could be stored without a type (#45120) + // to make sure, we try to detect this like we try to do in MappingMetaData#sourceAsMap() + Map mapping = XContentHelper.convertToMap(mappingCursor.value.compressedReference(), true).v2(); + if (mapping.size() == 1 && mapping.containsKey(type)) { + // the type name is the root value, reduce it + mapping = (Map) mapping.get(type); + } + if (mapContainsFieldNamesDisabled(mapping)) { + templatesContainingFieldNames.add(templateName); + } + }); + }); + + if (templatesContainingFieldNames.isEmpty() == false) { + return new DeprecationIssue(DeprecationIssue.Level.WARNING, "Index templates contain _field_names settings.", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-8.0.html#fieldnames-enabling", + "Index templates " + templatesContainingFieldNames + " use the deprecated `enable` setting for the `" + + FieldNamesFieldMapper.NAME + "` field. Using this setting in new index mappings will throw an error " + + "in the next major version and needs to be removed from existing mappings and templates."); + } + return null; + } + + /** + * check for "_field_names" entries in the map that contain another property "enabled" in the sub-map + */ + static boolean mapContainsFieldNamesDisabled(Map map) { + Object fieldNamesMapping = map.get(FieldNamesFieldMapper.NAME); + if (fieldNamesMapping != null) { + if (((Map) fieldNamesMapping).keySet().contains("enabled")) { + return true; + } + } + return false; + } + static DeprecationIssue checkPollIntervalTooLow(ClusterState state) { String pollIntervalString = state.metaData().settings().get(LIFECYCLE_POLL_INTERVAL_SETTING.getKey()); if (Strings.isNullOrEmpty(pollIntervalString)) { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index 0ddbcac6109..c555aeb7830 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -35,7 +35,8 @@ public class DeprecationChecks { Collections.unmodifiableList(Arrays.asList( ClusterDeprecationChecks::checkUserAgentPipelines, ClusterDeprecationChecks::checkTemplatesWithTooManyFields, - ClusterDeprecationChecks::checkPollIntervalTooLow + ClusterDeprecationChecks::checkPollIntervalTooLow, + ClusterDeprecationChecks::checkTemplatesWithFieldNamesDisabled )); @@ -51,7 +52,8 @@ public class DeprecationChecks { IndexDeprecationChecks::tooManyFieldsCheck, IndexDeprecationChecks::chainedMultiFieldsCheck, IndexDeprecationChecks::deprecatedDateTimeFormat, - IndexDeprecationChecks::translogRetentionSettingCheck + IndexDeprecationChecks::translogRetentionSettingCheck, + IndexDeprecationChecks::fieldNamesDisabledCheck )); static List> ML_SETTINGS_CHECKS = diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 38a0d0ad5cc..06e8e9c33a3 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.deprecation; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -185,6 +186,21 @@ public class IndexDeprecationChecks { return false; } + /** + * warn about existing explicit "_field_names" settings in existing mappings + */ + static DeprecationIssue fieldNamesDisabledCheck(IndexMetaData indexMetaData) { + MappingMetaData mapping = indexMetaData.mapping(); + if ((mapping != null) && ClusterDeprecationChecks.mapContainsFieldNamesDisabled(mapping.getSourceAsMap())) { + return new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Index mapping contains explicit `_field_names` enabling settings.", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-8.0.html" + + "#fieldnames-enabling", + "The index mapping contains a deprecated `enabled` setting for `_field_names` that should be removed moving foward."); + } + return null; + } + private static final Set TYPES_THAT_DONT_COUNT; static { HashSet typesThatDontCount = new HashSet<>(); diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecksTests.java index 2432b452cf2..46bd7edb93b 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecksTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -158,6 +159,84 @@ public class ClusterDeprecationChecksTests extends ESTestCase { assertEquals(singletonList(expected), issues); } + public void testTemplatesWithFieldNamesDisabled() throws IOException { + XContentBuilder goodMappingBuilder = jsonBuilder(); + goodMappingBuilder.startObject(); + { + goodMappingBuilder.startObject("_doc"); + { + goodMappingBuilder.startObject("properties"); + { + addRandomFields(10, goodMappingBuilder); + } + goodMappingBuilder.endObject(); + } + goodMappingBuilder.endObject(); + } + goodMappingBuilder.endObject(); + assertFieldNamesEnabledTemplate(goodMappingBuilder, false); + + XContentBuilder badMappingBuilder = jsonBuilder(); + badMappingBuilder.startObject(); + { + // we currently always store a type level internally + badMappingBuilder.startObject("_doc"); + { + badMappingBuilder.startObject(FieldNamesFieldMapper.NAME); + { + badMappingBuilder.field("enabled", randomBoolean()); + } + badMappingBuilder.endObject(); + } + badMappingBuilder.endObject(); + } + badMappingBuilder.endObject(); + assertFieldNamesEnabledTemplate(badMappingBuilder, true); + + // however, there was a bug where mappings could be stored without a type (#45120) + // so we also should try to check these cases + + XContentBuilder badMappingWithoutTypeBuilder = jsonBuilder(); + badMappingWithoutTypeBuilder.startObject(); + { + badMappingWithoutTypeBuilder.startObject(FieldNamesFieldMapper.NAME); + { + badMappingWithoutTypeBuilder.field("enabled", randomBoolean()); + } + badMappingWithoutTypeBuilder.endObject(); + } + badMappingWithoutTypeBuilder.endObject(); + assertFieldNamesEnabledTemplate(badMappingWithoutTypeBuilder, true); + } + + private void assertFieldNamesEnabledTemplate(XContentBuilder templateBuilder, boolean expectIssue) throws IOException { + String badTemplateName = randomAlphaOfLength(5); + final ClusterState state = ClusterState.builder(new ClusterName(randomAlphaOfLength(5))) + .metaData(MetaData.builder() + .put(IndexTemplateMetaData.builder(badTemplateName) + .patterns(Collections.singletonList(randomAlphaOfLength(5))) + .putMapping("_doc", Strings.toString(templateBuilder)) + .build()) + .build()) + .build(); + + List issues = DeprecationChecks.filterChecks(CLUSTER_SETTINGS_CHECKS, c -> c.apply(state)); + if (expectIssue) { + assertEquals(1, issues.size()); + DeprecationIssue issue = issues.get(0); + assertEquals(DeprecationIssue.Level.WARNING, issue.getLevel()); + assertEquals("https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-8.0.html#fieldnames-enabling" + , issue.getUrl()); + assertEquals("Index templates contain _field_names settings.", issue.getMessage()); + assertEquals("Index templates [" + badTemplateName + "] " + + "use the deprecated `enable` setting for the `" + FieldNamesFieldMapper.NAME + + "` field. Using this setting in new index mappings will throw an error in the next major version and " + + "needs to be removed from existing mappings and templates.", issue.getDetails()); + } else { + assertTrue(issues.isEmpty()); + } + } + public void testPollIntervalTooLow() { { final String tooLowInterval = randomTimeValue(1, 999, "ms", "micros", "nanos"); diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index e32e24aeafb..6ad322b6cec 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -11,10 +11,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.joda.JodaDeprecationPatterns; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -27,8 +28,8 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.singletonList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; -import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; public class IndexDeprecationChecksTests extends ESTestCase { @@ -161,6 +162,7 @@ public class IndexDeprecationChecksTests extends ESTestCase { "The names of fields that contain chained multi-fields: [[type: _doc, field: invalid-field]]"); assertEquals(singletonList(expected), issues); } + public void testDefinedPatternsDoNotWarn() throws IOException { String simpleMapping = "{\n" + "\"properties\" : {\n" + @@ -412,4 +414,30 @@ public class IndexDeprecationChecksTests extends ESTestCase { List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); assertThat(issues, empty()); } + + public void testFieldNamesEnabling() throws IOException { + XContentBuilder xContent = XContentFactory.jsonBuilder().startObject() + .startObject(FieldNamesFieldMapper.NAME) + .field("enabled", randomBoolean()) + .endObject() + .endObject(); + String mapping = BytesReference.bytes(xContent).utf8ToString(); + + IndexMetaData simpleIndex = IndexMetaData.builder(randomAlphaOfLengthBetween(5, 10)) + .settings(settings( + VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))) + .numberOfShards(1) + .numberOfReplicas(0) + .putMapping("_doc", mapping).build(); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(simpleIndex)); + assertEquals(1, issues.size()); + + DeprecationIssue issue = issues.get(0); + assertEquals(DeprecationIssue.Level.WARNING, issue.getLevel()); + assertEquals("https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-8.0.html#fieldnames-enabling" + , issue.getUrl()); + assertEquals("Index mapping contains explicit `_field_names` enabling settings.", issue.getMessage()); + assertEquals("The index mapping contains a deprecated `enabled` setting for `_field_names` that should be removed moving foward.", + issue.getDetails()); + } } From 48df560593a5f0dcf039a0ea2772c60eccbaa2ae Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 25 Sep 2019 10:22:46 +0200 Subject: [PATCH 39/94] Emit log message when parent circuit breaker trips (#47000) (#47073) We emit a debug log message whenever a child circuit breaker trips (in `ChildMemoryCircuitBreaker#circuitBreak(String, long)`) but we never emit a log message when the parent circuit breaker trips. As this is more likely to happen with the real memory circuit breaker it is not possible to detect this in the logs. With this commit we add a log message on the same log level (debug) when the parent circuit breaker trips. --- .../indices/breaker/HierarchyCircuitBreakerService.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 5797843161c..536d0b15b5f 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -339,6 +339,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { // child circuit breakers is categorized as transient or permanent. CircuitBreaker.Durability durability = memoryUsed.transientChildUsage >= memoryUsed.permanentChildUsage ? CircuitBreaker.Durability.TRANSIENT : CircuitBreaker.Durability.PERMANENT; + logger.debug("{}", message); throw new CircuitBreakingException(message.toString(), memoryUsed.totalUsage, parentLimit, durability); } } From 056ac32738fc94ef1787ae2b718402782b81a701 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 25 Sep 2019 10:34:54 +0200 Subject: [PATCH 40/94] Mute JdbcCsvSpecIT.testAverageWithOneValueAndLimit Relates to #47080 --- .../plugin/sql/qa/src/main/resources/pivot.csv-spec | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec index 8858187d000..ae761b6432e 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec @@ -82,18 +82,6 @@ null |48396.28571428572|62140.666666666664 5 |39052.875 |46705.555555555555 ; - -averageWithOneValueAndLimit -schema::languages:bt|'F':d -SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) LIMIT 3; - - languages | 'F' ----------------+------------------ -null |62140.666666666664 -1 |47073.25 -2 |50684.4 -; - averageWithTwoValuesAndLimit schema::languages:bt|'M':d|'F':d SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) LIMIT 3; From 6f453aa6b2eae90a72014125c3ef178a00f40e77 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Wed, 25 Sep 2019 18:57:11 +1000 Subject: [PATCH 41/94] Validate index and cluster privilege names when creating a role (#46361) (#47063) This commit adds validation so a role cannot be created with invalid index or cluster privilege name. Closes #29703 --- .../security/action/role/PutRoleRequest.java | 21 +++++++++++ .../privilege/ClusterPrivilegeResolver.java | 10 ++++-- .../authz/privilege/IndexPrivilege.java | 13 ++++--- .../action/role/PutRoleRequestTests.java | 36 +++++++++++++++++++ 4 files changed, 74 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java index 3c310deabd9..19f76b5104e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java @@ -14,10 +14,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import java.io.IOException; @@ -70,6 +73,24 @@ public class PutRoleRequest extends ActionRequest implements WriteRequest ALL_SECURITY_PATTERN = Collections.singleton("cluster:admin/xpack/security/*"); private static final Set MANAGE_SAML_PATTERN = Collections.unmodifiableSet( @@ -168,10 +172,12 @@ public class ClusterPrivilegeResolver { if (fixedPrivilege != null) { return fixedPrivilege; } - throw new IllegalArgumentException("unknown cluster privilege [" + name + "]. a privilege must be either " + + String errorMessage = "unknown cluster privilege [" + name + "]. a privilege must be either " + "one of the predefined cluster privilege names [" + Strings.collectionToCommaDelimitedString(VALUES.keySet()) + "] or a pattern over one of the available " + - "cluster actions"); + "cluster actions"; + logger.debug(errorMessage); + throw new IllegalArgumentException(errorMessage); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 715558f6940..3448fe7509c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction; @@ -41,6 +43,7 @@ import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndMinimize; public final class IndexPrivilege extends Privilege { + private static final Logger logger = LogManager.getLogger(IndexPrivilege.class); private static final Automaton ALL_AUTOMATON = patterns("indices:*", "internal:transport/proxy/indices:*"); private static final Automaton READ_AUTOMATON = patterns("indices:data/read/*"); @@ -144,10 +147,12 @@ public final class IndexPrivilege extends Privilege { } else if (indexPrivilege != null) { automata.add(indexPrivilege.automaton); } else { - throw new IllegalArgumentException("unknown index privilege [" + part + "]. a privilege must be either " + - "one of the predefined fixed indices privileges [" + - Strings.collectionToCommaDelimitedString(VALUES.entrySet()) + "] or a pattern over one of the available index" + - " actions"); + String errorMessage = "unknown index privilege [" + part + "]. a privilege must be either " + + "one of the predefined fixed indices privileges [" + + Strings.collectionToCommaDelimitedString(VALUES.entrySet()) + "] or a pattern over one of the available index" + + " actions"; + logger.debug(errorMessage); + throw new IllegalArgumentException(errorMessage); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java index 731109c523b..e3e065ec705 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -41,6 +41,42 @@ import static org.hamcrest.Matchers.nullValue; public class PutRoleRequestTests extends ESTestCase { + public void testValidationErrorWithUnknownClusterPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + String unknownClusterPrivilegeName = "unknown_" + randomAlphaOfLengthBetween(3,9); + request.cluster("manage_security", unknownClusterPrivilegeName); + + // Fail + assertValidationError("unknown cluster privilege [" + unknownClusterPrivilegeName.toLowerCase(Locale.ROOT) + "]", request); + } + + public void testValidationSuccessWithCorrectClusterPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + request.cluster("manage_security", "manage", "cluster:admin/xpack/security/*"); + assertSuccessfulValidation(request); + } + + public void testValidationErrorWithUnknownIndexPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + String unknownIndexPrivilegeName = "unknown_" + randomAlphaOfLengthBetween(3,9); + request.addIndex(new String[]{randomAlphaOfLength(5)}, new String[]{"index", unknownIndexPrivilegeName}, null, + null, null, randomBoolean()); + + // Fail + assertValidationError("unknown index privilege [" + unknownIndexPrivilegeName.toLowerCase(Locale.ROOT) + "]", request); + } + + public void testValidationSuccessWithCorrectIndexPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + request.addIndex(new String[]{randomAlphaOfLength(5)}, new String[]{"index", "write", "indices:data/read"}, null, + null, null, randomBoolean()); + assertSuccessfulValidation(request); + } + public void testValidationOfApplicationPrivileges() { assertSuccessfulValidation(buildRequestWithApplicationPrivilege("app", new String[]{"read"}, new String[]{"*"})); assertSuccessfulValidation(buildRequestWithApplicationPrivilege("app", new String[]{"action:login"}, new String[]{"/"})); From b234d2cbd778ec9c49d8ec8f7a37753653c8037c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Krze=C5=9Bniak?= Date: Wed, 25 Sep 2019 11:23:52 +0200 Subject: [PATCH 42/94] =?UTF-8?q?[Docs]=C2=A0Fix=20the=20numbering=20in=20?= =?UTF-8?q?the=20annotated=5Ftext=20example=20(#47077)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/plugins/mapper-annotated-text.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/plugins/mapper-annotated-text.asciidoc b/docs/plugins/mapper-annotated-text.asciidoc index e6d1628b717..8cac0aec708 100644 --- a/docs/plugins/mapper-annotated-text.asciidoc +++ b/docs/plugins/mapper-annotated-text.asciidoc @@ -114,12 +114,12 @@ in this example where a search for `Beck` will not match `Jeff Beck` : # Example documents PUT my_index/_doc/1 { - "my_field": "[Beck](Beck) announced a new tour"<2> + "my_field": "[Beck](Beck) announced a new tour"<1> } PUT my_index/_doc/2 { - "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<1> + "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<2> } # Example search From eef1ba3fadfa3adae2be7280a97f1812a39c1f1a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 25 Sep 2019 10:35:36 +0200 Subject: [PATCH 43/94] Make ingest pipeline resolution logic unit testable (#47026) Extracted ingest pipeline resolution logic into a static method and added unit tests for pipeline resolution logic. Followup from #46847 --- .../action/bulk/TransportBulkAction.java | 215 +++++++++--------- .../action/bulk/TransportBulkActionTests.java | 157 +++++++++++++ 2 files changed, 266 insertions(+), 106 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 94dca0d2167..0745e3b8d4d 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -54,7 +54,6 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -159,115 +158,13 @@ public class TransportBulkAction extends HandledTransportAction indicesMetaData = metaData.indices(); for (DocWriteRequest actionRequest : bulkRequest.requests) { IndexRequest indexRequest = getIndexWriteRequest(actionRequest); - if (indexRequest != null) { - if (indexRequest.isPipelineResolved() == false) { - final String requestPipeline = indexRequest.getPipeline(); - indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); - boolean requestCanOverridePipeline = true; - String requiredPipeline = null; - // start to look for default or required pipelines via settings found in the index meta data - IndexMetaData indexMetaData = indicesMetaData.get(actionRequest.index()); - // check the alias for the index request (this is how normal index requests are modeled) - if (indexMetaData == null && indexRequest.index() != null) { - AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index()); - if (indexOrAlias != null && indexOrAlias.isAlias()) { - AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; - indexMetaData = alias.getWriteIndex(); - } - } - // check the alias for the action request (this is how upserts are modeled) - if (indexMetaData == null && actionRequest.index() != null) { - AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(actionRequest.index()); - if (indexOrAlias != null && indexOrAlias.isAlias()) { - AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; - indexMetaData = alias.getWriteIndex(); - } - } - if (indexMetaData != null) { - final Settings indexSettings = indexMetaData.getSettings(); - if (IndexSettings.REQUIRED_PIPELINE.exists(indexSettings)) { - // find the required pipeline if one is defined from an existing index - requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(indexSettings); - assert IndexSettings.DEFAULT_PIPELINE.get(indexSettings).equals(IngestService.NOOP_PIPELINE_NAME) : - IndexSettings.DEFAULT_PIPELINE.get(indexSettings); - indexRequest.setPipeline(requiredPipeline); - requestCanOverridePipeline = false; - } else { - // find the default pipeline if one is defined from an existing index - String defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(indexSettings); - indexRequest.setPipeline(defaultPipeline); - } - } else if (indexRequest.index() != null) { - // the index does not exist yet (and is valid request), so match index templates to look for a default pipeline - List templates = MetaDataIndexTemplateService.findTemplates(metaData, indexRequest.index()); - assert (templates != null); - // order of templates are highest order first, we have to iterate through them all though - String defaultPipeline = null; - for (IndexTemplateMetaData template : templates) { - final Settings settings = template.settings(); - if (requiredPipeline == null && IndexSettings.REQUIRED_PIPELINE.exists(settings)) { - requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(settings); - requestCanOverridePipeline = false; - // we can not break in case a lower-order template has a default pipeline that we need to reject - } else if (defaultPipeline == null && IndexSettings.DEFAULT_PIPELINE.exists(settings)) { - defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(settings); - // we can not break in case a lower-order template has a required pipeline that we need to reject - } - } - if (requiredPipeline != null && defaultPipeline != null) { - // we can not have picked up a required and a default pipeline from applying templates - final String message = String.format( - Locale.ROOT, - "required pipeline [%s] and default pipeline [%s] can not both be set", - requiredPipeline, - defaultPipeline); - throw new IllegalArgumentException(message); - } - final String pipeline; - if (requiredPipeline != null) { - pipeline = requiredPipeline; - } else { - pipeline = defaultPipeline != null ? defaultPipeline : IngestService.NOOP_PIPELINE_NAME; - } - indexRequest.setPipeline(pipeline); - } - - if (requestPipeline != null) { - if (requestCanOverridePipeline == false) { - final String message = String.format( - Locale.ROOT, - "request pipeline [%s] can not override required pipeline [%s]", - requestPipeline, - requiredPipeline); - throw new IllegalArgumentException(message); - } else { - indexRequest.setPipeline(requestPipeline); - } - } - - if (IngestService.NOOP_PIPELINE_NAME.equals(indexRequest.getPipeline()) == false) { - hasIndexRequestsWithPipelines = true; - } - /* - * We have to track whether or not the pipeline for this request has already been resolved. It can happen that the - * pipeline for this request has already been derived yet we execute this loop again. That occurs if the bulk request - * has been forwarded by a non-ingest coordinating node to an ingest node. In this case, the coordinating node will have - * already resolved the pipeline for this request. It is important that we are able to distinguish this situation as we - * can not double-resolve the pipeline because we will not be able to distinguish the case of the pipeline having been - * set from a request pipeline parameter versus having been set by the resolution. We need to be able to distinguish - * these cases as we need to reject the request if the pipeline was set by a required pipeline and there is a request - * pipeline parameter too. - */ - indexRequest.isPipelineResolved(true); - } else if (IngestService.NOOP_PIPELINE_NAME.equals(indexRequest.getPipeline()) == false) { - hasIndexRequestsWithPipelines = true; - } + // Each index request needs to be evaluated, because this method also modifies the IndexRequest + boolean indexRequestHasPipeline = resolveRequiredOrDefaultPipeline(actionRequest, indexRequest, metaData); + hasIndexRequestsWithPipelines |= indexRequestHasPipeline; } - } if (hasIndexRequestsWithPipelines) { @@ -363,6 +260,112 @@ public class TransportBulkAction extends HandledTransportAction originalRequest, + IndexRequest indexRequest, + MetaData metaData) { + + if (indexRequest.isPipelineResolved() == false) { + final String requestPipeline = indexRequest.getPipeline(); + indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); + boolean requestCanOverridePipeline = true; + String requiredPipeline = null; + // start to look for default or required pipelines via settings found in the index meta data + IndexMetaData indexMetaData = metaData.indices().get(originalRequest.index()); + // check the alias for the index request (this is how normal index requests are modeled) + if (indexMetaData == null && indexRequest.index() != null) { + AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index()); + if (indexOrAlias != null && indexOrAlias.isAlias()) { + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; + indexMetaData = alias.getWriteIndex(); + } + } + // check the alias for the action request (this is how upserts are modeled) + if (indexMetaData == null && originalRequest.index() != null) { + AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(originalRequest.index()); + if (indexOrAlias != null && indexOrAlias.isAlias()) { + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; + indexMetaData = alias.getWriteIndex(); + } + } + if (indexMetaData != null) { + final Settings indexSettings = indexMetaData.getSettings(); + if (IndexSettings.REQUIRED_PIPELINE.exists(indexSettings)) { + // find the required pipeline if one is defined from an existing index + requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(indexSettings); + assert IndexSettings.DEFAULT_PIPELINE.get(indexSettings).equals(IngestService.NOOP_PIPELINE_NAME) : + IndexSettings.DEFAULT_PIPELINE.get(indexSettings); + indexRequest.setPipeline(requiredPipeline); + requestCanOverridePipeline = false; + } else { + // find the default pipeline if one is defined from an existing index + String defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(indexSettings); + indexRequest.setPipeline(defaultPipeline); + } + } else if (indexRequest.index() != null) { + // the index does not exist yet (and is valid request), so match index templates to look for a default pipeline + List templates = MetaDataIndexTemplateService.findTemplates(metaData, indexRequest.index()); + assert (templates != null); + // order of templates are highest order first, we have to iterate through them all though + String defaultPipeline = null; + for (IndexTemplateMetaData template : templates) { + final Settings settings = template.settings(); + if (requiredPipeline == null && IndexSettings.REQUIRED_PIPELINE.exists(settings)) { + requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(settings); + requestCanOverridePipeline = false; + // we can not break in case a lower-order template has a default pipeline that we need to reject + } else if (defaultPipeline == null && IndexSettings.DEFAULT_PIPELINE.exists(settings)) { + defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(settings); + // we can not break in case a lower-order template has a required pipeline that we need to reject + } + } + if (requiredPipeline != null && defaultPipeline != null) { + // we can not have picked up a required and a default pipeline from applying templates + final String message = String.format( + Locale.ROOT, + "required pipeline [%s] and default pipeline [%s] can not both be set", + requiredPipeline, + defaultPipeline); + throw new IllegalArgumentException(message); + } + final String pipeline; + if (requiredPipeline != null) { + pipeline = requiredPipeline; + } else { + pipeline = defaultPipeline != null ? defaultPipeline : IngestService.NOOP_PIPELINE_NAME; + } + indexRequest.setPipeline(pipeline); + } + + if (requestPipeline != null) { + if (requestCanOverridePipeline == false) { + final String message = String.format( + Locale.ROOT, + "request pipeline [%s] can not override required pipeline [%s]", + requestPipeline, + requiredPipeline); + throw new IllegalArgumentException(message); + } else { + indexRequest.setPipeline(requestPipeline); + } + } + + /* + * We have to track whether or not the pipeline for this request has already been resolved. It can happen that the + * pipeline for this request has already been derived yet we execute this loop again. That occurs if the bulk request + * has been forwarded by a non-ingest coordinating node to an ingest node. In this case, the coordinating node will have + * already resolved the pipeline for this request. It is important that we are able to distinguish this situation as we + * can not double-resolve the pipeline because we will not be able to distinguish the case of the pipeline having been + * set from a request pipeline parameter versus having been set by the resolution. We need to be able to distinguish + * these cases as we need to reject the request if the pipeline was set by a required pipeline and there is a request + * pipeline parameter too. + */ + indexRequest.isPipelineResolved(true); + } + + // Return whether this index request has a pipeline + return IngestService.NOOP_PIPELINE_NAME.equals(indexRequest.getPipeline()) == false; + } + boolean needToCheck() { return autoCreateIndex.needToCheck(); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 162ef56553d..ee9c0635207 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.bulk; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.TransportBulkActionTookTests.Resolver; @@ -27,11 +28,17 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; +import org.elasticsearch.ingest.IngestService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -44,6 +51,8 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class TransportBulkActionTests extends ESTestCase { @@ -153,4 +162,152 @@ public class TransportBulkActionTests extends ESTestCase { UpdateRequest badUpsertRequest = new UpdateRequest("index", "type", "id1"); assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest)); } + + public void testResolveRequiredOrDefaultPipelineDefaultPipeline() { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias").writeIndex(true).build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // index name matches with IDM: + IndexRequest indexRequest = new IndexRequest("idx"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("default-pipeline")); + + // alias name matches with IDM: + indexRequest = new IndexRequest("alias"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("default-pipeline")); + + // index name matches with ITMD: + IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder("name1") + .patterns(Collections.singletonList("id*")) + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")); + metaData = MetaData.builder().put(templateBuilder).build(); + indexRequest = new IndexRequest("idx"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("default-pipeline")); + } + + public void testResolveRequiredOrDefaultPipelineRequiredPipeline() { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias").writeIndex(true).build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // index name matches with IDM: + IndexRequest indexRequest = new IndexRequest("idx"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + + // alias name matches with IDM: + indexRequest = new IndexRequest("alias"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + + // index name matches with ITMD: + IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder("name1") + .patterns(Collections.singletonList("id*")) + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")); + metaData = MetaData.builder().put(templateBuilder).build(); + indexRequest = new IndexRequest("idx"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + } + + public void testResolveRequiredOrDefaultAndRequiredPipeline() { + IndexTemplateMetaData.Builder builder1 = IndexTemplateMetaData.builder("name1") + .patterns(Collections.singletonList("i*")) + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")); + IndexTemplateMetaData.Builder builder2 = IndexTemplateMetaData.builder("name2") + .patterns(Collections.singletonList("id*")) + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")); + MetaData metaData = MetaData.builder().put(builder1).put(builder2).build(); + + IndexRequest indexRequest = new IndexRequest("idx"); + Exception e = expectThrows(IllegalArgumentException.class, + () -> TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData)); + assertThat(e.getMessage(), + equalTo("required pipeline [required-pipeline] and default pipeline [default-pipeline] can not both be set")); + } + + public void testResolveRequiredOrDefaultPipelineRequestPipeline() { + // no pipeline: + { + MetaData metaData = MetaData.builder().build(); + IndexRequest indexRequest = new IndexRequest("idx"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(false)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo(IngestService.NOOP_PIPELINE_NAME)); + } + + // request pipeline: + { + MetaData metaData = MetaData.builder().build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("request-pipeline")); + } + + // request pipeline with default pipeline: + { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0); + MetaData metaData = MetaData.builder().put(builder).build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("request-pipeline")); + } + + // request pipeline with required pipeline: + { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0); + MetaData metaData = MetaData.builder().put(builder).build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); + Exception e = expectThrows(IllegalArgumentException.class, + () -> TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData)); + assertThat(e.getMessage(), + equalTo("request pipeline [request-pipeline] can not override required pipeline [required-pipeline]")); + } + + // request pipeline set to required pipeline: + { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0); + MetaData metaData = MetaData.builder().put(builder).build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("required-pipeline").isPipelineResolved(true); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + } + } } From 23bceaadf87111047c2df592484cdcf8711a095c Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 25 Sep 2019 13:23:46 +0300 Subject: [PATCH 44/94] Handle RelayState in preparing a SAMLAuthN Request (#46534) (#47092) This change allows for the caller of the `saml/prepare` API to pass a `relay_state` parameter that will then be part of the redirect URL in the response as the `RelayState` query parameter. The SAML IdP is required to reflect back the value of that relay state when sending a SAML Response. The caller of the APIs can then, when receiving the SAML Response, read and consume the value as it see fit. --- build.gradle | 4 ++-- .../SamlPrepareAuthenticationRequest.java | 19 +++++++++++++++++++ ...nsportSamlPrepareAuthenticationAction.java | 6 +++--- .../security/authc/saml/SamlRedirect.java | 2 +- .../RestSamlPrepareAuthenticationAction.java | 1 + ...SamlPrepareAuthenticationRequestTests.java | 5 ++++- 6 files changed, 30 insertions(+), 7 deletions(-) diff --git a/build.gradle b/build.gradle index d2b31e0e662..f698fa052af 100644 --- a/build.gradle +++ b/build.gradle @@ -179,8 +179,8 @@ task verifyVersions { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = true -final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = false +final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/46534" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java index 21ec96ca9a2..a41ac0f3786 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.security.action.saml; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.Nullable; @@ -24,10 +25,16 @@ public final class SamlPrepareAuthenticationRequest extends ActionRequest { @Nullable private String assertionConsumerServiceURL; + @Nullable + private String relayState; + public SamlPrepareAuthenticationRequest(StreamInput in) throws IOException { super(in); realmName = in.readOptionalString(); assertionConsumerServiceURL = in.readOptionalString(); + if (in.getVersion().onOrAfter(Version.V_7_5_0)) { + relayState = in.readOptionalString(); + } } public SamlPrepareAuthenticationRequest() { @@ -54,11 +61,20 @@ public final class SamlPrepareAuthenticationRequest extends ActionRequest { this.assertionConsumerServiceURL = assertionConsumerServiceURL; } + public String getRelayState() { + return relayState; + } + + public void setRelayState(String relayState) { + this.relayState = relayState; + } + @Override public String toString() { return getClass().getSimpleName() + "{" + "realmName=" + realmName + ", assertionConsumerServiceURL=" + assertionConsumerServiceURL + + ", relayState=" + relayState + '}'; } @@ -67,5 +83,8 @@ public final class SamlPrepareAuthenticationRequest extends ActionRequest { super.writeTo(out); out.writeOptionalString(realmName); out.writeOptionalString(assertionConsumerServiceURL); + if (out.getVersion().onOrAfter(Version.V_7_5_0)) { + out.writeOptionalString(relayState); + } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java index 5c8da382855..4b6cd89adea 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java @@ -49,14 +49,14 @@ public final class TransportSamlPrepareAuthenticationAction } else if (realms.size() > 1) { listener.onFailure(SamlUtils.samlException("Found multiple matching realms [{}] for [{}]", realms, request)); } else { - prepareAuthentication(realms.get(0), listener); + prepareAuthentication(realms.get(0), request.getRelayState(), listener); } } - private void prepareAuthentication(SamlRealm realm, ActionListener listener) { + private void prepareAuthentication(SamlRealm realm, String relayState, ActionListener listener) { final AuthnRequest authnRequest = realm.buildAuthenticationRequest(); try { - String redirectUrl = new SamlRedirect(authnRequest, realm.getSigningConfiguration()).getRedirectUrl(); + String redirectUrl = new SamlRedirect(authnRequest, realm.getSigningConfiguration()).getRedirectUrl(relayState); listener.onResponse(new SamlPrepareAuthenticationResponse( realm.name(), authnRequest.getID(), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java index b728fb03bcd..1c46a89478f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java @@ -25,7 +25,7 @@ public class SamlRedirect { private final SAMLObject samlObject; private final String destination; private final String parameterName; - private final SigningConfiguration signing; + private final SigningConfiguration signing; public SamlRedirect(RequestAbstractType request, SigningConfiguration signing) { this.samlObject = request; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java index 84ff7ba1edf..b227f2c767f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java @@ -44,6 +44,7 @@ public class RestSamlPrepareAuthenticationAction extends SamlBaseRestHandler { static { PARSER.declareString(SamlPrepareAuthenticationRequest::setAssertionConsumerServiceURL, new ParseField("acs")); PARSER.declareString(SamlPrepareAuthenticationRequest::setRealmName, new ParseField("realm")); + PARSER.declareString(SamlPrepareAuthenticationRequest::setRelayState, new ParseField("relay_state")); } public RestSamlPrepareAuthenticationAction(Settings settings, RestController controller, XPackLicenseState licenseState) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java index c6102151520..20138dc9a79 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java @@ -18,6 +18,7 @@ public class SamlPrepareAuthenticationRequestTests extends SamlTestCase { final SamlPrepareAuthenticationRequest req = new SamlPrepareAuthenticationRequest(); req.setRealmName("saml1"); req.setAssertionConsumerServiceURL("https://sp.example.com/sso/saml2/post"); + req.setRelayState("the_relay_state"); serialiseAndValidate(req); } @@ -25,6 +26,7 @@ public class SamlPrepareAuthenticationRequestTests extends SamlTestCase { final SamlPrepareAuthenticationRequest req = new SamlPrepareAuthenticationRequest(); req.setRealmName(null); req.setAssertionConsumerServiceURL(null); + req.setRelayState(null); serialiseAndValidate(req); } @@ -36,7 +38,8 @@ public class SamlPrepareAuthenticationRequestTests extends SamlTestCase { assertThat(req2.getRealmName(), Matchers.equalTo(req1.getRealmName())); assertThat(req2.getAssertionConsumerServiceURL(), Matchers.equalTo(req1.getAssertionConsumerServiceURL())); + assertThat(req2.getRelayState(), Matchers.equalTo(req1.getRelayState())); assertThat(req2.getParentTask(), Matchers.equalTo(req1.getParentTask())); } -} \ No newline at end of file +} From f785c31531acd2661c4df2c47b60b3e7fa16bf4b Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 25 Sep 2019 13:52:05 +0300 Subject: [PATCH 45/94] File based role definition documentation additions (#46304) (#47085) This commit clarifies and points out that the Role management UI and the Role management API cannot be used to manage roles that are defined in roles.yml and that file based role management is intended to have a small administrative scope and not handle all possible RBAC use cases. --- .../docs/en/rest-api/security/create-roles.asciidoc | 7 +++---- .../docs/en/rest-api/security/delete-roles.asciidoc | 6 ++---- x-pack/docs/en/rest-api/security/get-roles.asciidoc | 5 +++-- .../en/security/authorization/managing-roles.asciidoc | 11 ++++++++++- 4 files changed, 18 insertions(+), 11 deletions(-) diff --git a/x-pack/docs/en/rest-api/security/create-roles.asciidoc b/x-pack/docs/en/rest-api/security/create-roles.asciidoc index 7eda4c22b0d..19802234f32 100644 --- a/x-pack/docs/en/rest-api/security/create-roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/create-roles.asciidoc @@ -24,10 +24,9 @@ privilege. [[security-api-put-role-desc]] ==== {api-description-title} -The role API is generally the preferred way to manage roles, rather than using -file-based role management. For more information about the native realm, see -{stack-ov}/realms.html[Realms] and <>. - +The role management APIs are generally the preferred way to manage roles, rather than using +{stack-ov}/defining-roles.html#roles-management-file[file-based role management]. The create +or update roles API cannot update roles that are defined in roles files. [[security-api-put-role-path-params]] ==== {api-path-parms-title} diff --git a/x-pack/docs/en/rest-api/security/delete-roles.asciidoc b/x-pack/docs/en/rest-api/security/delete-roles.asciidoc index dec674b6577..ce5906ad8e3 100644 --- a/x-pack/docs/en/rest-api/security/delete-roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/delete-roles.asciidoc @@ -22,10 +22,8 @@ Removes roles in the native realm. [[security-api-delete-role-desc]] ==== {api-description-title} -The Roles API is generally the preferred way to manage roles, rather than using -file-based role management. For more information about the native realm, see -{stack-ov}/realms.html[Realms] and <>. - +The role management APIs are generally the preferred way to manage roles, rather than using +{stack-ov}/defining-roles.html#roles-management-file[file-based role management]. The delete roles API cannot remove roles that are defined in roles files. [[security-api-delete-role-path-params]] ==== {api-path-parms-title} diff --git a/x-pack/docs/en/rest-api/security/get-roles.asciidoc b/x-pack/docs/en/rest-api/security/get-roles.asciidoc index f014166362e..de7234697d3 100644 --- a/x-pack/docs/en/rest-api/security/get-roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/get-roles.asciidoc @@ -23,8 +23,9 @@ privilege. [[security-api-get-role-desc]] ==== {api-description-title} -For more information about the native realm, see -{stack-ov}/realms.html[Realms] and <>. +The role management APIs are generally the preferred way to manage roles, rather than using +{stack-ov}/defining-roles.html#roles-management-file[file-based role management]. The get roles +API cannot retrieve roles that are defined in roles files. [[security-api-get-role-path-params]] ==== {api-path-parms-title} diff --git a/x-pack/docs/en/security/authorization/managing-roles.asciidoc b/x-pack/docs/en/security/authorization/managing-roles.asciidoc index ee984296f08..eab8e7f573b 100644 --- a/x-pack/docs/en/security/authorization/managing-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/managing-roles.asciidoc @@ -214,7 +214,16 @@ _Role Management APIs_, the role found in the file will be used. While the _Role Management APIs_ is the preferred mechanism to define roles, using the `roles.yml` file becomes useful if you want to define fixed roles that no one (beside an administrator having physical access to the {es} nodes) -would be able to change. +would be able to change. Please note however, that the `roles.yml` file is provided as a +minimal administrative function and is not intended to cover and be used +to define roles for all use cases. + +[IMPORTANT] +============================== +You cannot view, edit, or remove any roles that are defined in `roles.yml` by +using the <> or the +<>. +============================== [IMPORTANT] ============================== From 7a5b5af1719fbb10114bc08c90e87c870608db8f Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 25 Sep 2019 11:50:36 +0200 Subject: [PATCH 46/94] Mute MlJobIT.testDeleteJobAsync Relates #45652 --- .../java/org/elasticsearch/xpack/ml/integration/MlJobIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 114437a01c8..97a901d7de9 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -464,6 +464,7 @@ public class MlJobIT extends ESRestTestCase { assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(404)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/45652") public void testDeleteJobAsync() throws Exception { String jobId = "delete-job-async-job"; String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; From eb86d71edd8dd4839c78b0fa3c2a32fd02c2ba8d Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 25 Sep 2019 12:51:24 +0200 Subject: [PATCH 47/94] Mute MlJobIT.testDeleteJob Relates #45652 --- .../java/org/elasticsearch/xpack/ml/integration/MlJobIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 97a901d7de9..1c7a367239e 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -394,6 +394,7 @@ public class MlJobIT extends ESRestTestCase { "avoid the clash by assigning a dedicated results index")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/45652") public void testDeleteJob() throws Exception { String jobId = "delete-job-job"; String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; From ac920e8e645cdcc4320f79d7c11614e4da8ffe62 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 25 Sep 2019 12:31:36 +0100 Subject: [PATCH 48/94] Assert no exceptions during state application (#47090) Today we log and swallow exceptions during cluster state application, but such an exception should not occur. This commit adds assertions of this fact, and updates the Javadocs to explain it. Relates #47038 --- .../cluster/ClusterStateApplier.java | 6 +++++- .../cluster/service/ClusterApplierService.java | 9 ++++++++- .../common/settings/AbstractScopedSettings.java | 1 - .../cluster/IndicesClusterStateService.java | 15 ++++++++++++--- .../cluster/coordination/CoordinatorTests.java | 2 ++ .../service/ClusterApplierServiceTests.java | 12 ++++++++++++ .../coordination/AbstractCoordinatorTestCase.java | 14 ++++++++++++++ 7 files changed, 53 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java index c339a8ed97e..ad983f43bf2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java @@ -28,7 +28,11 @@ import org.elasticsearch.cluster.service.ClusterService; public interface ClusterStateApplier { /** - * Called when a new cluster state ({@link ClusterChangedEvent#state()} needs to be applied + * Called when a new cluster state ({@link ClusterChangedEvent#state()} needs to be applied. The cluster state to be applied is already + * committed when this method is called, so an applier must therefore be prepared to deal with any state it receives without throwing + * an exception. Throwing an exception from an applier is very bad because it will stop the application of this state before it has + * reached all the other appliers, and will likely result in another attempt to apply the same (or very similar) cluster state which + * might continue until this node is removed from the cluster. */ void applyClusterState(ClusterChangedEvent event); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index eb41e710cca..f5bbe2d420b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -390,7 +390,7 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements return true; } - protected void runTask(UpdateTask task) { + private void runTask(UpdateTask task) { if (!lifecycle.started()) { logger.debug("processing [{}]: ignoring, cluster applier service not started", task.source); return; @@ -447,6 +447,9 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements "failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]", executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source), e); } + // failing to apply a cluster state with an exception indicates a bug in validation or in one of the appliers; if we + // continue we will retry with the same cluster state but that might not help. + assert applicationMayFail(); task.listener.onFailure(task.source, e); } } @@ -667,4 +670,8 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements return threadPool.relativeTimeInMillis(); } + // overridden by tests that need to check behaviour in the event of an application failure + protected boolean applicationMayFail() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 9d3e278e889..9db52b9eb93 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -193,7 +193,6 @@ public abstract class AbstractScopedSettings { } catch (Exception ex) { logger.warn("failed to apply settings", ex); throw ex; - } finally { } return lastSettingsApplied = newSettings; } diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 11b7f03da10..21cf49a949d 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -532,8 +532,17 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple final IndexMetaData newIndexMetaData = state.metaData().index(index); assert newIndexMetaData != null : "index " + index + " should have been removed by deleteIndices"; if (ClusterChangedEvent.indexMetaDataChanged(currentIndexMetaData, newIndexMetaData)) { - indexService.updateMetaData(currentIndexMetaData, newIndexMetaData); + String reason = null; try { + reason = "metadata update failed"; + try { + indexService.updateMetaData(currentIndexMetaData, newIndexMetaData); + } catch (Exception e) { + assert false : e; + throw e; + } + + reason = "mapping update failed"; if (indexService.updateMapping(currentIndexMetaData, newIndexMetaData) && sendRefreshMapping) { nodeMappingRefreshAction.nodeMappingRefresh(state.nodes().getMasterNode(), new NodeMappingRefreshAction.NodeMappingRefreshRequest(newIndexMetaData.getIndex().getName(), @@ -541,14 +550,14 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple ); } } catch (Exception e) { - indicesService.removeIndex(indexService.index(), FAILURE, "removing index (mapping update failed)"); + indicesService.removeIndex(indexService.index(), FAILURE, "removing index (" + reason + ")"); // fail shards that would be created or updated by createOrUpdateShards RoutingNode localRoutingNode = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); if (localRoutingNode != null) { for (final ShardRouting shardRouting : localRoutingNode) { if (shardRouting.index().equals(index) && failedShardsCache.containsKey(shardRouting.shardId()) == false) { - sendFailShard(shardRouting, "failed to update mapping for index", e, state); + sendFailShard(shardRouting, "failed to update index (" + reason + ")", e, state); } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index f968f6f4742..1735c8ba033 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -586,6 +586,7 @@ public class CoordinatorTests extends AbstractCoordinatorTestCase { final ClusterNode follower0 = cluster.getAnyNodeExcept(leader); final ClusterNode follower1 = cluster.getAnyNodeExcept(leader, follower0); + follower0.allowClusterStateApplicationFailure(); follower0.setClusterStateApplyResponse(ClusterStateApplyResponse.FAIL); AckCollector ackCollector = leader.submitValue(randomLong()); cluster.stabilise(DEFAULT_CLUSTER_STATE_UPDATE_DELAY); @@ -605,6 +606,7 @@ public class CoordinatorTests extends AbstractCoordinatorTestCase { final ClusterNode follower1 = cluster.getAnyNodeExcept(leader, follower0); final long startingTerm = leader.coordinator.getCurrentTerm(); + leader.allowClusterStateApplicationFailure(); leader.setClusterStateApplyResponse(ClusterStateApplyResponse.FAIL); AckCollector ackCollector = leader.submitValue(randomLong()); cluster.runFor(DEFAULT_CLUSTER_STATE_UPDATE_DELAY, "committing value"); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java index 4da5de7941e..a851cb7069e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java @@ -358,6 +358,7 @@ public class ClusterApplierServiceTests extends ESTestCase { clusterApplierService.addStateApplier(event -> { throw new RuntimeException("dummy exception"); }); + clusterApplierService.allowClusterStateApplicationFailure(); CountDownLatch latch = new CountDownLatch(1); clusterApplierService.onNewClusterState("test", () -> ClusterState.builder(clusterApplierService.state()).build(), @@ -386,6 +387,7 @@ public class ClusterApplierServiceTests extends ESTestCase { AtomicReference error = new AtomicReference<>(); clusterApplierService.clusterSettings.addSettingsUpdateConsumer(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, v -> {}); + clusterApplierService.allowClusterStateApplicationFailure(); CountDownLatch latch = new CountDownLatch(1); clusterApplierService.onNewClusterState("test", () -> ClusterState.builder(clusterApplierService.state()) @@ -496,6 +498,7 @@ public class ClusterApplierServiceTests extends ESTestCase { final ClusterSettings clusterSettings; volatile Long currentTimeOverride = null; + boolean applicationMayFail; TimedClusterApplierService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { super("test_node", settings, clusterSettings, threadPool); @@ -509,6 +512,15 @@ public class ClusterApplierServiceTests extends ESTestCase { } return super.currentTimeInMillis(); } + + @Override + protected boolean applicationMayFail() { + return this.applicationMayFail; + } + + void allowClusterStateApplicationFailure() { + this.applicationMayFail = true; + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 102de69cc43..f660b22428a 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -1172,6 +1172,10 @@ public class AbstractCoordinatorTestCase extends ESTestCase { private boolean isNotUsefullyBootstrapped() { return getLocalNode().isMasterNode() == false || coordinator.isInitialConfigurationSet() == false; } + + void allowClusterStateApplicationFailure() { + clusterApplierService.allowClusterStateApplicationFailure(); + } } private List provideSeedHosts(SeedHostsProvider.HostsResolver ignored) { @@ -1282,6 +1286,7 @@ public class AbstractCoordinatorTestCase extends ESTestCase { private final String nodeName; private final DeterministicTaskQueue deterministicTaskQueue; ClusterStateApplyResponse clusterStateApplyResponse = ClusterStateApplyResponse.SUCCEED; + private boolean applicationMayFail; DisruptableClusterApplierService(String nodeName, Settings settings, ClusterSettings clusterSettings, DeterministicTaskQueue deterministicTaskQueue, Function runnableWrapper) { @@ -1326,6 +1331,15 @@ public class AbstractCoordinatorTestCase extends ESTestCase { protected void connectToNodesAndWait(ClusterState newClusterState) { // don't do anything, and don't block } + + @Override + protected boolean applicationMayFail() { + return this.applicationMayFail; + } + + void allowClusterStateApplicationFailure() { + this.applicationMayFail = true; + } } protected DiscoveryNode createDiscoveryNode(int nodeIndex, boolean masterEligible) { From a4cecc54aba6c7c2c4d99cefea6810cb51b7a4c7 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 25 Sep 2019 14:01:09 +0200 Subject: [PATCH 49/94] Mute monitoring/bulk/20_privileges Relates #30101 --- .../rest-api-spec/test/monitoring/bulk/20_privileges.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml index 437ce21d0c8..c9e69fc0c76 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml @@ -77,6 +77,8 @@ teardown: "Monitoring Bulk API": - skip: features: catch_unauthorized + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/30101" - do: headers: From 81cbd3fba4849c108c129134bc6c4243deab9540 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 25 Sep 2019 14:01:54 +0200 Subject: [PATCH 50/94] Mute ClusterShardLimitIT.testIndexCreationOverLimitFromTemplate Relates #47107 --- .../org/elasticsearch/cluster/shards/ClusterShardLimitIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java b/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java index 189ecc05aba..5e9320deafa 100644 --- a/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java @@ -102,6 +102,7 @@ public class ClusterShardLimitIT extends ESIntegTestCase { assertFalse(clusterState.getMetaData().hasIndex("should-fail")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/47107") public void testIndexCreationOverLimitFromTemplate() { int dataNodes = client().admin().cluster().prepareState().get().getState().getNodes().getDataNodes().size(); From 05fb7be5716949b672db2a5307d5f7b0fbf1d0cf Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 25 Sep 2019 08:16:24 -0400 Subject: [PATCH 51/94] [7.x] [ML][Inference] Feature pre-processing objects and functions (#46777) (#47040) * [ML][Inference] Feature pre-processing objects and functions (#46777) To support inference on pre-trained machine learning models, some basic feature encoding will be necessary. I am using a named object serialization approach so new encodings/pre-processing steps could be added in the future. This PR lays down the ground work for 3 basic encodings: * HotOne * Target Mean * Frequency More feature encodings or pre-processings could be added in the future: * Handling missing columns * Standardization * Label encoding * etc.... * fixing compilation for namedxcontent tests --- .../MlInferenceNamedXContentProvider.java | 48 +++++ .../preprocessing/FrequencyEncoding.java | 161 +++++++++++++++ .../preprocessing/OneHotEncoding.java | 138 +++++++++++++ .../inference/preprocessing/PreProcessor.java | 33 ++++ .../preprocessing/TargetMeanEncoding.java | 183 ++++++++++++++++++ ...icsearch.plugins.spi.NamedXContentProvider | 1 + .../client/RestHighLevelClientTests.java | 10 +- .../preprocessing/FrequencyEncodingTests.java | 60 ++++++ .../preprocessing/OneHotEncodingTests.java | 61 ++++++ .../TargetMeanEncodingTests.java | 64 ++++++ .../xpack/core/XPackClientPlugin.java | 8 + .../MlInferenceNamedXContentProvider.java | 59 ++++++ .../preprocessing/FrequencyEncoding.java | 146 ++++++++++++++ .../LenientlyParsedPreProcessor.java | 12 ++ .../preprocessing/OneHotEncoding.java | 130 +++++++++++++ .../inference/preprocessing/PreProcessor.java | 26 +++ .../StrictlyParsedPreProcessor.java | 12 ++ .../preprocessing/TargetMeanEncoding.java | 161 +++++++++++++++ .../core/ml/utils/NamedXContentObject.java | 21 ++ .../inference/NamedXContentObjectsTests.java | 171 ++++++++++++++++ .../preprocessing/FrequencyEncodingTests.java | 68 +++++++ .../preprocessing/OneHotEncodingTests.java | 70 +++++++ .../preprocessing/PreProcessingTests.java | 67 +++++++ .../TargetMeanEncodingTests.java | 71 +++++++ .../xpack/ml/MachineLearning.java | 3 + 25 files changed, 1782 insertions(+), 2 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncodingTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncodingTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncodingTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncoding.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/LenientlyParsedPreProcessor.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncoding.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessor.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/StrictlyParsedPreProcessor.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncoding.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObject.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncodingTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncodingTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessingTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncodingTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java new file mode 100644 index 00000000000..867c598da90 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference; + +import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; +import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.plugins.spi.NamedXContentProvider; + +import java.util.ArrayList; +import java.util.List; + +public class MlInferenceNamedXContentProvider implements NamedXContentProvider { + + @Override + public List getNamedXContentParsers() { + List namedXContent = new ArrayList<>(); + + // PreProcessing + namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(OneHotEncoding.NAME), + OneHotEncoding::fromXContent)); + namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(TargetMeanEncoding.NAME), + TargetMeanEncoding::fromXContent)); + namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME), + FrequencyEncoding::fromXContent)); + return namedXContent; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java new file mode 100644 index 00000000000..fd0810d613e --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for frequency encoding a set of categorical values for a given field. + */ +public class FrequencyEncoding implements PreProcessor { + + public static final String NAME = "frequency_encoding"; + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + a -> new FrequencyEncoding((String)a[0], (String)a[1], (Map)a[2])); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + FREQUENCY_MAP); + } + + public static FrequencyEncoding fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map frequencyMap; + + public FrequencyEncoding(String field, String featureName, Map frequencyMap) { + this.field = Objects.requireNonNull(field); + this.featureName = Objects.requireNonNull(featureName); + this.frequencyMap = Collections.unmodifiableMap(Objects.requireNonNull(frequencyMap)); + } + + /** + * @return Field name on which to frequency encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: frequency for the frequency encoding + */ + public Map getFrequencyMap() { + return frequencyMap; + } + + /** + * @return The encoded feature name + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FrequencyEncoding that = (FrequencyEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(frequencyMap, that.frequencyMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, frequencyMap); + } + + public Builder builder(String field) { + return new Builder(field); + } + + public static class Builder { + + private String field; + private String featureName; + private Map frequencyMap = new HashMap<>(); + + public Builder(String field) { + this.field = field; + } + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setFeatureName(String featureName) { + this.featureName = featureName; + return this; + } + + public Builder setFrequencyMap(Map frequencyMap) { + this.frequencyMap = new HashMap<>(frequencyMap); + return this; + } + + public Builder addFrequency(String valueName, double frequency) { + this.frequencyMap.put(valueName, frequency); + return this; + } + + public FrequencyEncoding build() { + return new FrequencyEncoding(field, featureName, frequencyMap); + } + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java new file mode 100644 index 00000000000..812cd723f99 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * PreProcessor for one hot encoding a set of categorical values for a given field. + */ +public class OneHotEncoding implements PreProcessor { + + public static final String NAME = "one_hot_encoding"; + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField HOT_MAP = new ParseField("hot_map"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + a -> new OneHotEncoding((String)a[0], (Map)a[1])); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP); + } + + public static OneHotEncoding fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final String field; + private final Map hotMap; + + public OneHotEncoding(String field, Map hotMap) { + this.field = Objects.requireNonNull(field); + this.hotMap = Collections.unmodifiableMap(Objects.requireNonNull(hotMap)); + } + + /** + * @return Field name on which to one hot encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: ColumnName for the one hot encoding + */ + public Map getHotMap() { + return hotMap; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(HOT_MAP.getPreferredName(), hotMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OneHotEncoding that = (OneHotEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(hotMap, that.hotMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, hotMap); + } + + public Builder builder(String field) { + return new Builder(field); + } + + public static class Builder { + + private String field; + private Map hotMap = new HashMap<>(); + + public Builder(String field) { + this.field = field; + } + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setHotMap(Map hotMap) { + this.hotMap = new HashMap<>(hotMap); + return this; + } + + public Builder addOneHot(String valueName, String oneHotFeatureName) { + this.hotMap.put(valueName, oneHotFeatureName); + return this; + } + + public OneHotEncoding build() { + return new OneHotEncoding(field, hotMap); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java new file mode 100644 index 00000000000..ea814a8a0d6 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.ToXContentObject; + + +/** + * Describes a pre-processor for a defined machine learning model + */ +public interface PreProcessor extends ToXContentObject { + + /** + * @return The name of the pre-processor + */ + String getName(); +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java new file mode 100644 index 00000000000..bb29924b98e --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java @@ -0,0 +1,183 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for target mean encoding a set of categorical values for a given field. + */ +public class TargetMeanEncoding implements PreProcessor { + + public static final String NAME = "target_mean_encoding"; + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField TARGET_MEANS = new ParseField("target_means"); + public static final ParseField DEFAULT_VALUE = new ParseField("default_value"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + a -> new TargetMeanEncoding((String)a[0], (String)a[1], (Map)a[2], (Double)a[3])); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + TARGET_MEANS); + PARSER.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE); + } + + public static TargetMeanEncoding fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map meanMap; + private final double defaultValue; + + public TargetMeanEncoding(String field, String featureName, Map meanMap, Double defaultValue) { + this.field = Objects.requireNonNull(field); + this.featureName = Objects.requireNonNull(featureName); + this.meanMap = Collections.unmodifiableMap(Objects.requireNonNull(meanMap)); + this.defaultValue = Objects.requireNonNull(defaultValue); + } + + /** + * @return Field name on which to target mean encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: targetMean for the target mean encoding + */ + public Map getMeanMap() { + return meanMap; + } + + /** + * @return The default value to set when a previously unobserved value is seen + */ + public double getDefaultValue() { + return defaultValue; + } + + /** + * @return The feature name for the encoded value + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(TARGET_MEANS.getPreferredName(), meanMap); + builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TargetMeanEncoding that = (TargetMeanEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(meanMap, that.meanMap) + && Objects.equals(defaultValue, that.defaultValue); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, meanMap, defaultValue); + } + + public Builder builder(String field) { + return new Builder(field); + } + + public static class Builder { + + private String field; + private String featureName; + private Map meanMap = new HashMap<>(); + private double defaultValue; + + public Builder(String field) { + this.field = field; + } + + public String getField() { + return field; + } + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setFeatureName(String featureName) { + this.featureName = featureName; + return this; + } + + public Builder setMeanMap(Map meanMap) { + this.meanMap = meanMap; + return this; + } + + public Builder addMeanMapEntry(String valueName, double meanEncoding) { + this.meanMap.put(valueName, meanEncoding); + return this; + } + + public Builder setDefaultValue(double defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public TargetMeanEncoding build() { + return new TargetMeanEncoding(field, featureName, meanMap, defaultValue); + } + } +} diff --git a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider index dfa56956edb..c3facfa93ff 100644 --- a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider @@ -1,4 +1,5 @@ org.elasticsearch.client.indexlifecycle.IndexLifecycleNamedXContentProvider org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider +org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider org.elasticsearch.client.transform.TransformNamedXContentProvider diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index c0939ef586e..15929bbaf21 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -65,6 +65,9 @@ import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.Binar import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric; +import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; import org.elasticsearch.client.transform.transforms.SyncConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; import org.elasticsearch.common.CheckedFunction; @@ -95,6 +98,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; + import org.hamcrest.Matchers; import org.junit.Before; @@ -676,7 +680,7 @@ public class RestHighLevelClientTests extends ESTestCase { public void testProvidedNamedXContents() { List namedXContents = RestHighLevelClient.getProvidedNamedXContents(); - assertEquals(37, namedXContents.size()); + assertEquals(40, namedXContents.size()); Map, Integer> categories = new HashMap<>(); List names = new ArrayList<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { @@ -686,7 +690,7 @@ public class RestHighLevelClientTests extends ESTestCase { categories.put(namedXContent.categoryClass, counter + 1); } } - assertEquals("Had: " + categories, 9, categories.size()); + assertEquals("Had: " + categories, 10, categories.size()); assertEquals(Integer.valueOf(3), categories.get(Aggregation.class)); assertTrue(names.contains(ChildrenAggregationBuilder.NAME)); assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME)); @@ -733,6 +737,8 @@ public class RestHighLevelClientTests extends ESTestCase { ConfusionMatrixMetric.NAME, MeanSquaredErrorMetric.NAME, RSquaredMetric.NAME)); + assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.inference.preprocessing.PreProcessor.class)); + assertThat(names, hasItems(FrequencyEncoding.NAME, OneHotEncoding.NAME, TargetMeanEncoding.NAME)); } public void testApiNamingConventions() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncodingTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncodingTests.java new file mode 100644 index 00000000000..8e9aa30930a --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncodingTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + + +public class FrequencyEncodingTests extends AbstractXContentTestCase { + + @Override + protected FrequencyEncoding doParseInstance(XContentParser parser) throws IOException { + return FrequencyEncoding.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + @Override + protected FrequencyEncoding createTestInstance() { + return createRandom(); + } + + public static FrequencyEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new FrequencyEncoding(randomAlphaOfLength(10), randomAlphaOfLength(10), valueMap); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncodingTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncodingTests.java new file mode 100644 index 00000000000..d8cd0d1f87a --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncodingTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + + +public class OneHotEncodingTests extends AbstractXContentTestCase { + + @Override + protected OneHotEncoding doParseInstance(XContentParser parser) throws IOException { + return OneHotEncoding.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + @Override + protected OneHotEncoding createTestInstance() { + return createRandom(); + } + + public static OneHotEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + return new OneHotEncoding(randomAlphaOfLength(10), valueMap); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncodingTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncodingTests.java new file mode 100644 index 00000000000..8e751f752f8 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncodingTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + + +public class TargetMeanEncodingTests extends AbstractXContentTestCase { + + @Override + protected TargetMeanEncoding doParseInstance(XContentParser parser) throws IOException { + return TargetMeanEncoding.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + @Override + protected TargetMeanEncoding createTestInstance() { + return createRandom(); + } + + public static TargetMeanEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new TargetMeanEncoding(randomAlphaOfLength(10), + randomAlphaOfLength(10), + valueMap, + randomDoubleBetween(0.0, 1.0, false)); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 9ba8ea02306..5570e82f10c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -145,6 +145,10 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.P import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.Recall; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.ScoreByThresholdResult; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.SoftClassificationMetric; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.TargetMeanEncoding; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; @@ -472,6 +476,10 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl new NamedWriteableRegistry.Entry(EvaluationMetricResult.class, ScoreByThresholdResult.NAME, ScoreByThresholdResult::new), new NamedWriteableRegistry.Entry(EvaluationMetricResult.class, ConfusionMatrix.NAME.getPreferredName(), ConfusionMatrix.Result::new), + // ML - Inference + new NamedWriteableRegistry.Entry(PreProcessor.class, FrequencyEncoding.NAME.getPreferredName(), FrequencyEncoding::new), + new NamedWriteableRegistry.Entry(PreProcessor.class, OneHotEncoding.NAME.getPreferredName(), OneHotEncoding::new), + new NamedWriteableRegistry.Entry(PreProcessor.class, TargetMeanEncoding.NAME.getPreferredName(), TargetMeanEncoding::new), // monitoring new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.MONITORING, MonitoringFeatureSetUsage::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java new file mode 100644 index 00000000000..d7da457b64c --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.plugins.spi.NamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.LenientlyParsedPreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.StrictlyParsedPreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.TargetMeanEncoding; + +import java.util.ArrayList; +import java.util.List; + +public class MlInferenceNamedXContentProvider implements NamedXContentProvider { + + @Override + public List getNamedXContentParsers() { + List namedXContent = new ArrayList<>(); + + // PreProcessing Lenient + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedPreProcessor.class, OneHotEncoding.NAME, + OneHotEncoding::fromXContentLenient)); + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedPreProcessor.class, TargetMeanEncoding.NAME, + TargetMeanEncoding::fromXContentLenient)); + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedPreProcessor.class, FrequencyEncoding.NAME, + FrequencyEncoding::fromXContentLenient)); + + // PreProcessing Strict + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedPreProcessor.class, OneHotEncoding.NAME, + OneHotEncoding::fromXContentStrict)); + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedPreProcessor.class, TargetMeanEncoding.NAME, + TargetMeanEncoding::fromXContentStrict)); + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedPreProcessor.class, FrequencyEncoding.NAME, + FrequencyEncoding::fromXContentStrict)); + + return namedXContent; + } + + public List getNamedWriteables() { + List namedWriteables = new ArrayList<>(); + + // PreProcessing + namedWriteables.add(new NamedWriteableRegistry.Entry(PreProcessor.class, OneHotEncoding.NAME.getPreferredName(), + OneHotEncoding::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(PreProcessor.class, TargetMeanEncoding.NAME.getPreferredName(), + TargetMeanEncoding::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(PreProcessor.class, FrequencyEncoding.NAME.getPreferredName(), + FrequencyEncoding::new)); + + return namedWriteables; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncoding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncoding.java new file mode 100644 index 00000000000..351c0f05960 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncoding.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for frequency encoding a set of categorical values for a given field. + */ +public class FrequencyEncoding implements LenientlyParsedPreProcessor, StrictlyParsedPreProcessor { + + public static final ParseField NAME = new ParseField("frequency_encoding"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map"); + + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + NAME.getPreferredName(), + lenient, + a -> new FrequencyEncoding((String)a[0], (String)a[1], (Map)a[2])); + parser.declareString(ConstructingObjectParser.constructorArg(), FIELD); + parser.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + parser.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + FREQUENCY_MAP); + return parser; + } + + public static FrequencyEncoding fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null); + } + + public static FrequencyEncoding fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map frequencyMap; + + public FrequencyEncoding(String field, String featureName, Map frequencyMap) { + this.field = ExceptionsHelper.requireNonNull(field, FIELD); + this.featureName = ExceptionsHelper.requireNonNull(featureName, FEATURE_NAME); + this.frequencyMap = Collections.unmodifiableMap(ExceptionsHelper.requireNonNull(frequencyMap, FREQUENCY_MAP)); + } + + public FrequencyEncoding(StreamInput in) throws IOException { + this.field = in.readString(); + this.featureName = in.readString(); + this.frequencyMap = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readDouble)); + } + + /** + * @return Field name on which to frequency encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: frequency for the frequency encoding + */ + public Map getFrequencyMap() { + return frequencyMap; + } + + /** + * @return The encoded feature name + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public void process(Map fields) { + String value = (String)fields.get(field); + if (value == null) { + return; + } + fields.put(featureName, frequencyMap.getOrDefault(value, 0.0)); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeString(featureName); + out.writeMap(frequencyMap, StreamOutput::writeString, StreamOutput::writeDouble); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FrequencyEncoding that = (FrequencyEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(frequencyMap, that.frequencyMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, frequencyMap); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/LenientlyParsedPreProcessor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/LenientlyParsedPreProcessor.java new file mode 100644 index 00000000000..e4e8957420d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/LenientlyParsedPreProcessor.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +/** + * To be used in conjunction with a lenient parser. + */ +public interface LenientlyParsedPreProcessor extends PreProcessor { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncoding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncoding.java new file mode 100644 index 00000000000..106cb1e26c1 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncoding.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +/** + * PreProcessor for one hot encoding a set of categorical values for a given field. + */ +public class OneHotEncoding implements LenientlyParsedPreProcessor, StrictlyParsedPreProcessor { + + public static final ParseField NAME = new ParseField("one_hot_encoding"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField HOT_MAP = new ParseField("hot_map"); + + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + NAME.getPreferredName(), + lenient, + a -> new OneHotEncoding((String)a[0], (Map)a[1])); + parser.declareString(ConstructingObjectParser.constructorArg(), FIELD); + parser.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP); + return parser; + } + + public static OneHotEncoding fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null); + } + + public static OneHotEncoding fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null); + } + + private final String field; + private final Map hotMap; + + public OneHotEncoding(String field, Map hotMap) { + this.field = ExceptionsHelper.requireNonNull(field, FIELD); + this.hotMap = Collections.unmodifiableMap(ExceptionsHelper.requireNonNull(hotMap, HOT_MAP)); + } + + public OneHotEncoding(StreamInput in) throws IOException { + this.field = in.readString(); + this.hotMap = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); + } + + /** + * @return Field name on which to one hot encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: ColumnName for the one hot encoding + */ + public Map getHotMap() { + return hotMap; + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public void process(Map fields) { + String value = (String)fields.get(field); + if (value == null) { + return; + } + hotMap.forEach((val, col) -> { + int encoding = value.equals(val) ? 1 : 0; + fields.put(col, encoding); + }); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeMap(hotMap, StreamOutput::writeString, StreamOutput::writeString); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(HOT_MAP.getPreferredName(), hotMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OneHotEncoding that = (OneHotEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(hotMap, that.hotMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, hotMap); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessor.java new file mode 100644 index 00000000000..79e1ce16ad8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessor.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; + +import java.util.Map; + +/** + * Describes a pre-processor for a defined machine learning model + * This processor should take a set of fields and return the modified set of fields. + */ +public interface PreProcessor extends NamedXContentObject, NamedWriteable { + + /** + * Process the given fields and their values and return the modified map. + * + * NOTE: The passed map object is mutated directly + * @param fields The fields and their values to process + */ + void process(Map fields); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/StrictlyParsedPreProcessor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/StrictlyParsedPreProcessor.java new file mode 100644 index 00000000000..925a9b86b1d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/StrictlyParsedPreProcessor.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +/** + * To be used in conjunction with a strict parser. + */ +public interface StrictlyParsedPreProcessor extends PreProcessor { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncoding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncoding.java new file mode 100644 index 00000000000..ebce49db957 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncoding.java @@ -0,0 +1,161 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for target mean encoding a set of categorical values for a given field. + */ +public class TargetMeanEncoding implements LenientlyParsedPreProcessor, StrictlyParsedPreProcessor { + + public static final ParseField NAME = new ParseField("target_mean_encoding"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField TARGET_MEANS = new ParseField("target_means"); + public static final ParseField DEFAULT_VALUE = new ParseField("default_value"); + + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + NAME.getPreferredName(), + lenient, + a -> new TargetMeanEncoding((String)a[0], (String)a[1], (Map)a[2], (Double)a[3])); + parser.declareString(ConstructingObjectParser.constructorArg(), FIELD); + parser.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + parser.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + TARGET_MEANS); + parser.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE); + return parser; + } + + public static TargetMeanEncoding fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null); + } + + public static TargetMeanEncoding fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map meanMap; + private final double defaultValue; + + public TargetMeanEncoding(String field, String featureName, Map meanMap, Double defaultValue) { + this.field = ExceptionsHelper.requireNonNull(field, FIELD); + this.featureName = ExceptionsHelper.requireNonNull(featureName, FEATURE_NAME); + this.meanMap = Collections.unmodifiableMap(ExceptionsHelper.requireNonNull(meanMap, TARGET_MEANS)); + this.defaultValue = ExceptionsHelper.requireNonNull(defaultValue, DEFAULT_VALUE); + } + + public TargetMeanEncoding(StreamInput in) throws IOException { + this.field = in.readString(); + this.featureName = in.readString(); + this.meanMap = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readDouble)); + this.defaultValue = in.readDouble(); + } + + /** + * @return Field name on which to target mean encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: targetMean for the target mean encoding + */ + public Map getMeanMap() { + return meanMap; + } + + /** + * @return The default value to set when a previously unobserved value is seen + */ + public Double getDefaultValue() { + return defaultValue; + } + + /** + * @return The feature name for the encoded value + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public void process(Map fields) { + String value = (String)fields.get(field); + if (value == null) { + return; + } + fields.put(featureName, meanMap.getOrDefault(value, defaultValue)); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeString(featureName); + out.writeMap(meanMap, StreamOutput::writeString, StreamOutput::writeDouble); + out.writeDouble(defaultValue); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(TARGET_MEANS.getPreferredName(), meanMap); + builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TargetMeanEncoding that = (TargetMeanEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(meanMap, that.meanMap) + && Objects.equals(defaultValue, that.defaultValue); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, meanMap, defaultValue); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObject.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObject.java new file mode 100644 index 00000000000..6f8f38787c6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObject.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.utils; + +import org.elasticsearch.common.xcontent.ToXContentObject; + +/** + * Simple interface for XContent Objects that are named. + * + * This affords more general handling when serializing and de-serializing this type of XContent when it is used in a NamedObjects + * parser. + */ +public interface NamedXContentObject extends ToXContentObject { + /** + * @return The name of the XContentObject that is to be serialized + */ + String getName(); +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java new file mode 100644 index 00000000000..bd1740edf07 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java @@ -0,0 +1,171 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncodingTests; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.LenientlyParsedPreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncodingTests; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.StrictlyParsedPreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.TargetMeanEncodingTests; +import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.function.Predicate; + +//TODO these tests are temporary until the named objects are actually used by an encompassing class (i.e. ModelInferer) +public class NamedXContentObjectsTests extends AbstractXContentTestCase { + + static class NamedObjectContainer implements ToXContentObject { + + static ParseField PRE_PROCESSORS = new ParseField("pre_processors"); + + static final ObjectParser STRICT_PARSER = createParser(false); + static final ObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( + "named_xcontent_object_container_test", + lenient, + NamedObjectContainer::new); + parser.declareNamedObjects(NamedObjectContainer::setPreProcessors, + (p, c, n) -> + lenient ? p.namedObject(LenientlyParsedPreProcessor.class, n, null) : + p.namedObject(StrictlyParsedPreProcessor.class, n, null), + (noc) -> noc.setUseExplicitPreprocessorOrder(true), PRE_PROCESSORS); + return parser; + } + + private boolean useExplicitPreprocessorOrder = false; + private List preProcessors; + + void setPreProcessors(List preProcessors) { + this.preProcessors = preProcessors; + } + + void setUseExplicitPreprocessorOrder(boolean value) { + this.useExplicitPreprocessorOrder = value; + } + + static NamedObjectContainer fromXContent(XContentParser parser, boolean lenient) { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + writeNamedObjects(builder, params, useExplicitPreprocessorOrder, PRE_PROCESSORS.getPreferredName(), preProcessors); + builder.endObject(); + return builder; + } + + XContentBuilder writeNamedObjects(XContentBuilder builder, + Params params, + boolean useExplicitOrder, + String namedObjectsName, + List namedObjects) throws IOException { + if (useExplicitOrder) { + builder.startArray(namedObjectsName); + } else { + builder.startObject(namedObjectsName); + } + for (NamedXContentObject object : namedObjects) { + if (useExplicitOrder) { + builder.startObject(); + } + builder.field(object.getName(), object, params); + if (useExplicitOrder) { + builder.endObject(); + } + } + if (useExplicitOrder) { + builder.endArray(); + } else { + builder.endObject(); + } + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NamedObjectContainer that = (NamedObjectContainer) o; + return Objects.equals(preProcessors, that.preProcessors); + } + + @Override + public int hashCode() { + return Objects.hash(preProcessors); + } + } + + private boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + public NamedObjectContainer createTestInstance() { + int max = randomIntBetween(1, 10); + List preProcessors = new ArrayList<>(max); + for (int i = 0; i < max; i++) { + preProcessors.add(randomFrom(FrequencyEncodingTests.createRandom(), + OneHotEncodingTests.createRandom(), + TargetMeanEncodingTests.createRandom())); + } + NamedObjectContainer container = new NamedObjectContainer(); + container.setPreProcessors(preProcessors); + container.setUseExplicitPreprocessorOrder(true); + return container; + } + + @Override + protected NamedObjectContainer doParseInstance(XContentParser parser) throws IOException { + return NamedObjectContainer.fromXContent(parser, lenient); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We only want to add random fields to the root, or the root of the named objects + return field -> + (field.endsWith("frequency_encoding") || + field.endsWith("one_hot_encoding") || + field.endsWith("target_mean_encoding") || + field.isEmpty()) == false; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + List namedXContent = new ArrayList<>(); + namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents()); + return new NamedXContentRegistry(namedXContent); + } +} + diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncodingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncodingTests.java new file mode 100644 index 00000000000..72047178e9f --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncodingTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +public class FrequencyEncodingTests extends PreProcessingTests { + + @Override + protected FrequencyEncoding doParseInstance(XContentParser parser) throws IOException { + return lenient ? FrequencyEncoding.fromXContentLenient(parser) : FrequencyEncoding.fromXContentStrict(parser); + } + + @Override + protected FrequencyEncoding createTestInstance() { + return createRandom(); + } + + public static FrequencyEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new FrequencyEncoding(randomAlphaOfLength(10), randomAlphaOfLength(10), valueMap); + } + + @Override + protected Writeable.Reader instanceReader() { + return FrequencyEncoding::new; + } + + public void testProcessWithFieldPresent() { + String field = "categorical"; + List values = Arrays.asList("foo", "bar", "foobar", "baz", "farequote"); + Map valueMap = values.stream().collect(Collectors.toMap(Function.identity(), + v -> randomDoubleBetween(0.0, 1.0, false))); + String encodedFeatureName = "encoded"; + FrequencyEncoding encoding = new FrequencyEncoding(field, encodedFeatureName, valueMap); + String fieldValue = randomFrom(values); + Map> matchers = Collections.singletonMap(encodedFeatureName, equalTo(valueMap.get(fieldValue))); + Map fieldValues = randomFieldValues(field, fieldValue); + testProcess(encoding, fieldValues, matchers); + + // Test where the value is some unknown Value + fieldValues = randomFieldValues(field, "unknownValue"); + fieldValues.put(field, "unknownValue"); + matchers = Collections.singletonMap(encodedFeatureName, equalTo(0.0)); + testProcess(encoding, fieldValues, matchers); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncodingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncodingTests.java new file mode 100644 index 00000000000..f0627719ec4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncodingTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +public class OneHotEncodingTests extends PreProcessingTests { + + @Override + protected OneHotEncoding doParseInstance(XContentParser parser) throws IOException { + return lenient ? OneHotEncoding.fromXContentLenient(parser) : OneHotEncoding.fromXContentStrict(parser); + } + + @Override + protected OneHotEncoding createTestInstance() { + return createRandom(); + } + + public static OneHotEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + return new OneHotEncoding(randomAlphaOfLength(10), valueMap); + } + + @Override + protected Writeable.Reader instanceReader() { + return OneHotEncoding::new; + } + + public void testProcessWithFieldPresent() { + String field = "categorical"; + List values = Arrays.asList("foo", "bar", "foobar", "baz", "farequote"); + Map valueMap = values.stream().collect(Collectors.toMap(Function.identity(), v -> "Column_" + v)); + OneHotEncoding encoding = new OneHotEncoding(field, valueMap); + String fieldValue = randomFrom(values); + Map fieldValues = randomFieldValues(field, fieldValue); + + Map> matchers = values.stream().map(v -> "Column_" + v) + .collect(Collectors.toMap( + Function.identity(), + v -> v.equals("Column_" + fieldValue) ? equalTo(1) : equalTo(0))); + + fieldValues.put(field, fieldValue); + testProcess(encoding, fieldValues, matchers); + + // Test where the value is some unknown Value + fieldValues = randomFieldValues(field, "unknownValue"); + matchers.put("Column_" + fieldValue, equalTo(0)); + testProcess(encoding, fieldValues, matchers); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessingTests.java new file mode 100644 index 00000000000..4301b09c5ec --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessingTests.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.hamcrest.Matcher; +import org.junit.Before; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.equalTo; + +public abstract class PreProcessingTests extends AbstractSerializingTestCase { + + protected boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + void testProcess(PreProcessor preProcessor, Map fieldValues, Map> assertions) { + preProcessor.process(fieldValues); + assertions.forEach((fieldName, matcher) -> + assertThat(fieldValues.get(fieldName), matcher) + ); + } + + public void testWithMissingField() { + Map fields = randomFieldValues(); + PreProcessor preProcessor = this.createTestInstance(); + Map fieldsCopy = new HashMap<>(fields); + preProcessor.process(fields); + assertThat(fieldsCopy, equalTo(fields)); + } + + Map randomFieldValues() { + int numFields = randomIntBetween(1, 5); + Map fieldValues = new HashMap<>(numFields); + for (int k = 0; k < numFields; k++) { + fieldValues.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + return fieldValues; + } + + Map randomFieldValues(String categoricalField, String catigoricalValue) { + Map fieldValues = randomFieldValues(); + fieldValues.put(categoricalField, catigoricalValue); + return fieldValues; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncodingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncodingTests.java new file mode 100644 index 00000000000..d86d9e09f02 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncodingTests.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +public class TargetMeanEncodingTests extends PreProcessingTests { + + @Override + protected TargetMeanEncoding doParseInstance(XContentParser parser) throws IOException { + return lenient ? TargetMeanEncoding.fromXContentLenient(parser) : TargetMeanEncoding.fromXContentStrict(parser); + } + + @Override + protected TargetMeanEncoding createTestInstance() { + return createRandom(); + } + + public static TargetMeanEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new TargetMeanEncoding(randomAlphaOfLength(10), + randomAlphaOfLength(10), + valueMap, + randomDoubleBetween(0.0, 1.0, false)); + } + + @Override + protected Writeable.Reader instanceReader() { + return TargetMeanEncoding::new; + } + + public void testProcessWithFieldPresent() { + String field = "categorical"; + List values = Arrays.asList("foo", "bar", "foobar", "baz", "farequote"); + Map valueMap = values.stream().collect(Collectors.toMap(Function.identity(), + v -> randomDoubleBetween(0.0, 1.0, false))); + String encodedFeatureName = "encoded"; + Double defaultvalue = randomDouble(); + TargetMeanEncoding encoding = new TargetMeanEncoding(field, encodedFeatureName, valueMap, defaultvalue); + String fieldValue = randomFrom(values); + Map> matchers = Collections.singletonMap(encodedFeatureName, equalTo(valueMap.get(fieldValue))); + Map fieldValues = randomFieldValues(field, fieldValue); + testProcess(encoding, fieldValues, matchers); + + // Test where the value is some unknown Value + fieldValues = randomFieldValues(field, "unknownValue"); + matchers = Collections.singletonMap(encodedFeatureName, equalTo(defaultvalue)); + testProcess(encoding, fieldValues, matchers); + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index bd43879792a..739a4fc8a7e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -122,6 +122,7 @@ import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; import org.elasticsearch.xpack.core.ml.dataframe.analyses.MlDataFrameAnalysisNamedXContentProvider; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; @@ -950,6 +951,8 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlEvaluationNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlDataFrameAnalysisNamedXContentProvider().getNamedXContentParsers()); + namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); return namedXContent; } + } From 9e17b78fee4c727f29b94b86517a0fb2fed12497 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 25 Sep 2019 14:14:10 +0200 Subject: [PATCH 52/94] Mute second test in monitoring/bulk/10_basic Relates #30101 --- .../resources/rest-api-spec/test/monitoring/bulk/10_basic.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml index ce4751d690d..265ef1f8cc9 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml @@ -171,6 +171,10 @@ --- "Bulk indexing of monitoring data on closed indices should throw an export exception": + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/30101" + - do: monitoring.bulk: system_id: "beats" From 83365e94babfc03d9f88337b4d0e7429539ee175 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 25 Sep 2019 14:38:47 +0200 Subject: [PATCH 53/94] [DOCS] Reformat suggesters page. (#47010) --- docs/reference/search/suggesters.asciidoc | 40 ++++++++++++++--------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/docs/reference/search/suggesters.asciidoc b/docs/reference/search/suggesters.asciidoc index e5f715823c6..bf1c35988d8 100644 --- a/docs/reference/search/suggesters.asciidoc +++ b/docs/reference/search/suggesters.asciidoc @@ -1,16 +1,8 @@ [[search-suggesters]] === Suggesters -The suggest feature suggests similar looking terms based on a provided -text by using a suggester. Parts of the suggest feature are still under -development. - -The suggest request part is defined alongside the query part in a `_search` -request. If the query part is left out, only suggestions are returned. - -NOTE: `_suggest` endpoint has been deprecated in favour of using suggest via -`_search` endpoint. In 5.0, the `_search` endpoint has been optimized for -suggest only search requests. +Suggests similar looking terms based on a provided text by using a suggester. +Parts of the suggest feature are still under development. [source,console] -------------------------------------------------- @@ -33,10 +25,27 @@ POST twitter/_search -------------------------------------------------- // TEST[setup:twitter] -Several suggestions can be specified per request. Each suggestion is -identified with an arbitrary name. In the example below two suggestions -are requested. Both `my-suggest-1` and `my-suggest-2` suggestions use -the `term` suggester, but have a different `text`. + +[[search-suggesters-api-request]] +==== {api-request-title} + +The suggest feature suggests similar looking terms based on a provided text by +using a suggester. The suggest request part is defined alongside the query part +in a `_search` request. If the query part is left out, only suggestions are +returned. + +NOTE: `_suggest` endpoint has been deprecated in favour of using suggest via +`_search` endpoint. In 5.0, the `_search` endpoint has been optimized for +suggest only search requests. + + +[[search-suggesters-api-example]] +==== {api-examples-title} + +Several suggestions can be specified per request. Each suggestion is identified +with an arbitrary name. In the example below two suggestions are requested. Both +`my-suggest-1` and `my-suggest-2` suggestions use the `term` suggester, but have +a different `text`. [source,console] -------------------------------------------------- @@ -60,6 +69,7 @@ POST _search -------------------------------------------------- // TEST[setup:twitter] + The below suggest response example includes the suggestion response for `my-suggest-1` and `my-suggest-2`. Each suggestion part contains entries. Each entry is effectively a token from the suggest text and @@ -107,7 +117,7 @@ term suggester's score is based on the edit distance. [float] [[global-suggest]] -==== Global suggest text +===== Global suggest text To avoid repetition of the suggest text, it is possible to define a global text. In the example below the suggest text is defined globally From c4a166fc9a6fb467ea3048516dba17bfde2e548e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 25 Sep 2019 14:53:11 +0200 Subject: [PATCH 54/94] Simplify SnapshotResiliencyTests (#46961) (#47108) Simplify `SnapshotResiliencyTests` to more closely match the structure of `AbstractCoordinatorTestCase` and allow for future drying up between the two classes: * Make the test cluster nodes a nested-class in the test cluster itself * Remove the needless custom network disruption implementation and simply track disconnected node ids like `AbstractCoordinatorTestCase` does --- .../snapshots/SnapshotResiliencyTests.java | 732 +++++++++--------- 1 file changed, 353 insertions(+), 379 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 10c2188d478..d8a8be807cf 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -165,7 +165,6 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.snapshots.mockstore.MockEventuallyConsistentRepository; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.disruption.DisruptableMockTransport; -import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportInterceptor; @@ -240,7 +239,7 @@ public class SnapshotResiliencyTests extends ESTestCase { (BlobStoreRepository) testClusterNodes.randomMasterNodeSafe().repositoriesService.repository("repo"), Runnable::run); } finally { - testClusterNodes.nodes.values().forEach(TestClusterNode::stop); + testClusterNodes.nodes.values().forEach(TestClusterNodes.TestClusterNode::stop); } } @@ -253,7 +252,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final int shards = randomIntBetween(1, 10); final int documents = randomIntBetween(0, 100); - final TestClusterNode masterNode = + final TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseListener = new StepListener<>(); @@ -326,7 +325,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final String index = "test"; final int shards = randomIntBetween(1, 10); - TestClusterNode masterNode = + TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseStepListener = new StepListener<>(); @@ -363,7 +362,7 @@ public class SnapshotResiliencyTests extends ESTestCase { clearDisruptionsAndAwaitSync(); - final TestClusterNode randomMaster = testClusterNodes.randomMasterNode() + final TestClusterNodes.TestClusterNode randomMaster = testClusterNodes.randomMasterNode() .orElseThrow(() -> new AssertionError("expected to find at least one active master node")); SnapshotsInProgress finalSnapshotsInProgress = randomMaster.clusterService.state().custom(SnapshotsInProgress.TYPE); assertThat(finalSnapshotsInProgress.entries(), empty()); @@ -380,7 +379,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final String index = "test"; final int shards = randomIntBetween(1, 10); - TestClusterNode masterNode = + TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseStepListener = new StepListener<>(); @@ -431,7 +430,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final int shards = randomIntBetween(1, 10); - final TestClusterNode masterNode = + final TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final AtomicBoolean createdSnapshot = new AtomicBoolean(); final AdminClient masterAdminClient = masterNode.client.admin(); @@ -443,8 +442,8 @@ public class SnapshotResiliencyTests extends ESTestCase { continueOrDie(clusterStateResponseStepListener, clusterStateResponse -> { final ShardRouting shardToRelocate = clusterStateResponse.getState().routingTable().allShards(index).get(0); - final TestClusterNode currentPrimaryNode = testClusterNodes.nodeById(shardToRelocate.currentNodeId()); - final TestClusterNode otherNode = testClusterNodes.randomDataNodeSafe(currentPrimaryNode.node.getName()); + final TestClusterNodes.TestClusterNode currentPrimaryNode = testClusterNodes.nodeById(shardToRelocate.currentNodeId()); + final TestClusterNodes.TestClusterNode otherNode = testClusterNodes.randomDataNodeSafe(currentPrimaryNode.node.getName()); scheduleNow(() -> testClusterNodes.stopNode(currentPrimaryNode)); scheduleNow(new Runnable() { @Override @@ -504,7 +503,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final int shards = randomIntBetween(1, 10); final int documents = randomIntBetween(2, 100); - TestClusterNode masterNode = + TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseStepListener = new StepListener<>(); @@ -574,7 +573,8 @@ public class SnapshotResiliencyTests extends ESTestCase { assertEquals(0, snapshotInfo.failedShards()); } - private StepListener createRepoAndIndex(TestClusterNode masterNode, String repoName, String index, int shards) { + private StepListener createRepoAndIndex(TestClusterNodes.TestClusterNode masterNode, String repoName, + String index, int shards) { final AdminClient adminClient = masterNode.client.admin(); final StepListener createRepositoryListener = new StepListener<>(); @@ -604,7 +604,7 @@ public class SnapshotResiliencyTests extends ESTestCase { if (randomBoolean()) { disconnectRandomDataNode(); } else { - testClusterNodes.randomDataNode().ifPresent(TestClusterNode::restart); + testClusterNodes.randomDataNode().ifPresent(TestClusterNodes.TestClusterNode::restart); } } @@ -712,7 +712,10 @@ public class SnapshotResiliencyTests extends ESTestCase { // LinkedHashMap so we have deterministic ordering when iterating over the map in tests private final Map nodes = new LinkedHashMap<>(); - private final DisconnectedNodes disruptedLinks = new DisconnectedNodes(); + /** + * Node ids that are disconnected from all other nodes. + */ + private final Set disconnectedNodes = new HashSet<>(); TestClusterNodes(int masterNodes, int dataNodes) { for (int i = 0; i < masterNodes; ++i) { @@ -751,7 +754,7 @@ public class SnapshotResiliencyTests extends ESTestCase { private TestClusterNode newNode(String nodeName, DiscoveryNodeRole role) throws IOException { return new TestClusterNode( new DiscoveryNode(nodeName, randomAlphaOfLength(10), buildNewFakeTransportAddress(), emptyMap(), - Collections.singleton(role), Version.CURRENT), this::getDisruption); + Collections.singleton(role), Version.CURRENT)); } public TestClusterNode randomMasterNodeSafe() { @@ -790,16 +793,16 @@ public class SnapshotResiliencyTests extends ESTestCase { } public void disconnectNode(TestClusterNode node) { - if (disruptedLinks.disconnected.contains(node.node.getName())) { + if (disconnectedNodes.contains(node.node.getId())) { return; } testClusterNodes.nodes.values().forEach(n -> n.transportService.getConnectionManager().disconnectFromNode(node.node)); - disruptedLinks.disconnect(node.node.getName()); + disconnectedNodes.add(node.node.getId()); } public void clearNetworkDisruptions() { - final Set disconnectedNodes = new HashSet<>(disruptedLinks.disconnected); - disruptedLinks.clear(); + final Set disconnectedNodes = new HashSet<>(this.disconnectedNodes); + this.disconnectedNodes.clear(); disconnectedNodes.forEach(nodeName -> { if (testClusterNodes.nodes.containsKey(nodeName)) { final DiscoveryNode node = testClusterNodes.nodes.get(nodeName).node; @@ -808,10 +811,6 @@ public class SnapshotResiliencyTests extends ESTestCase { }); } - private NetworkDisruption.DisruptedLinks getDisruption() { - return disruptedLinks; - } - /** * Builds a {@link DiscoveryNodes} instance that holds the nodes in this test cluster. * @return DiscoveryNodes @@ -833,209 +832,185 @@ public class SnapshotResiliencyTests extends ESTestCase { assertTrue(master.node.isMasterNode()); return master; } - } - private final class TestClusterNode { + private final class TestClusterNode { - private final Logger logger = LogManager.getLogger(TestClusterNode.class); + private final Logger logger = LogManager.getLogger(TestClusterNode.class); - private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Stream.concat( - ClusterModule.getNamedWriteables().stream(), NetworkModule.getNamedWriteables().stream()).collect(Collectors.toList())); + private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Stream.concat( + ClusterModule.getNamedWriteables().stream(), NetworkModule.getNamedWriteables().stream()).collect(Collectors.toList())); - private final TransportService transportService; + private final TransportService transportService; - private final ClusterService clusterService; + private final ClusterService clusterService; - private final RepositoriesService repositoriesService; + private final RepositoriesService repositoriesService; - private final SnapshotsService snapshotsService; + private final SnapshotsService snapshotsService; - private final SnapshotShardsService snapshotShardsService; + private final SnapshotShardsService snapshotShardsService; - private final IndicesService indicesService; + private final IndicesService indicesService; - private final IndicesClusterStateService indicesClusterStateService; + private final IndicesClusterStateService indicesClusterStateService; - private final DiscoveryNode node; + private final DiscoveryNode node; - private final MasterService masterService; + private final MasterService masterService; - private final AllocationService allocationService; + private final AllocationService allocationService; - private final NodeClient client; + private final NodeClient client; - private final NodeEnvironment nodeEnv; + private final NodeEnvironment nodeEnv; - private final DisruptableMockTransport mockTransport; + private final DisruptableMockTransport mockTransport; - private final ThreadPool threadPool; + private final ThreadPool threadPool; - private final Supplier disruption; + private Coordinator coordinator; - private Coordinator coordinator; - - TestClusterNode(DiscoveryNode node, Supplier disruption) throws IOException { - this.disruption = disruption; - this.node = node; - final Environment environment = createEnvironment(node.getName()); - masterService = new FakeThreadPoolMasterService(node.getName(), "test", deterministicTaskQueue::scheduleNow); - final Settings settings = environment.settings(); - final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - threadPool = deterministicTaskQueue.getThreadPool(); - clusterService = new ClusterService(settings, clusterSettings, masterService, - new ClusterApplierService(node.getName(), settings, clusterSettings, threadPool) { - @Override - protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { - return new MockSinglePrioritizingExecutor(node.getName(), deterministicTaskQueue); - } - - @Override - protected void connectToNodesAndWait(ClusterState newClusterState) { - // don't do anything, and don't block - } - }); - mockTransport = new DisruptableMockTransport(node, logger) { - @Override - protected ConnectionStatus getConnectionStatus(DiscoveryNode destination) { - return disruption.get().disrupt(node.getName(), destination.getName()) - ? ConnectionStatus.DISCONNECTED : ConnectionStatus.CONNECTED; - } - - @Override - protected Optional getDisruptableMockTransport(TransportAddress address) { - return testClusterNodes.nodes.values().stream().map(cn -> cn.mockTransport) - .filter(transport -> transport.getLocalNode().getAddress().equals(address)) - .findAny(); - } - - @Override - protected void execute(Runnable runnable) { - scheduleNow(CoordinatorTests.onNodeLog(getLocalNode(), runnable)); - } - - @Override - protected NamedWriteableRegistry writeableRegistry() { - return namedWriteableRegistry; - } - }; - transportService = mockTransport.createTransportService( - settings, deterministicTaskQueue.getThreadPool(runnable -> CoordinatorTests.onNodeLog(node, runnable)), - new TransportInterceptor() { - @Override - public TransportRequestHandler interceptHandler(String action, String executor, - boolean forceExecution, TransportRequestHandler actualHandler) { - // TODO: Remove this hack once recoveries are async and can be used in these tests - if (action.startsWith("internal:index/shard/recovery")) { - return (request, channel, task) -> scheduleSoon( - new AbstractRunnable() { - @Override - protected void doRun() throws Exception { - channel.sendResponse(new TransportException(new IOException("failed to recover shard"))); - } - - @Override - public void onFailure(final Exception e) { - throw new AssertionError(e); - } - }); - } else { - return actualHandler; + TestClusterNode(DiscoveryNode node) throws IOException { + this.node = node; + final Environment environment = createEnvironment(node.getName()); + masterService = new FakeThreadPoolMasterService(node.getName(), "test", deterministicTaskQueue::scheduleNow); + final Settings settings = environment.settings(); + final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool = deterministicTaskQueue.getThreadPool(); + clusterService = new ClusterService(settings, clusterSettings, masterService, + new ClusterApplierService(node.getName(), settings, clusterSettings, threadPool) { + @Override + protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { + return new MockSinglePrioritizingExecutor(node.getName(), deterministicTaskQueue); } + + @Override + protected void connectToNodesAndWait(ClusterState newClusterState) { + // don't do anything, and don't block + } + }); + mockTransport = new DisruptableMockTransport(node, logger) { + @Override + protected ConnectionStatus getConnectionStatus(DiscoveryNode destination) { + if (node.equals(destination)) { + return ConnectionStatus.CONNECTED; + } + // Check if both nodes are still part of the cluster + if (nodes.containsKey(node.getName()) == false || nodes.containsKey(destination.getName()) == false) { + return ConnectionStatus.DISCONNECTED; + } + return disconnectedNodes.contains(node.getId()) || disconnectedNodes.contains(destination.getId()) + ? ConnectionStatus.DISCONNECTED : ConnectionStatus.CONNECTED; } - }, - a -> node, null, emptySet() - ); - final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); - repositoriesService = new RepositoriesService( - settings, clusterService, transportService, - Collections.singletonMap(FsRepository.TYPE, getRepoFactory(environment)), emptyMap(), threadPool - ); - snapshotsService = - new SnapshotsService(settings, clusterService, indexNameExpressionResolver, repositoriesService, threadPool); - nodeEnv = new NodeEnvironment(settings, environment); - final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(Collections.emptyList()); - final ScriptService scriptService = new ScriptService(settings, emptyMap(), emptyMap()); - client = new NodeClient(settings, threadPool); - allocationService = ESAllocationTestCase.createAllocationService(settings); - final IndexScopedSettings indexScopedSettings = - new IndexScopedSettings(settings, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); - final BigArrays bigArrays = new BigArrays(new PageCacheRecycler(settings), null, "test"); - final MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); - indicesService = new IndicesService( - settings, - mock(PluginsService.class), - nodeEnv, - namedXContentRegistry, - new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), - emptyMap(), emptyMap(), emptyMap(), emptyMap()), - indexNameExpressionResolver, - mapperRegistry, - namedWriteableRegistry, - threadPool, - indexScopedSettings, - new NoneCircuitBreakerService(), - bigArrays, - scriptService, - clusterService, - client, - new MetaStateService(nodeEnv, namedXContentRegistry), - Collections.emptyList(), - emptyMap() - ); - final RecoverySettings recoverySettings = new RecoverySettings(settings, clusterSettings); - final ActionFilters actionFilters = new ActionFilters(emptySet()); - snapshotShardsService = new SnapshotShardsService( - settings, clusterService, repositoriesService, threadPool, - transportService, indicesService, actionFilters, indexNameExpressionResolver); - final ShardStateAction shardStateAction = new ShardStateAction( - clusterService, transportService, allocationService, - new BatchedRerouteService(clusterService, allocationService::reroute), - threadPool - ); - final MetaDataMappingService metaDataMappingService = new MetaDataMappingService(clusterService, indicesService); - indicesClusterStateService = new IndicesClusterStateService( - settings, - indicesService, - clusterService, - threadPool, - new PeerRecoveryTargetService(threadPool, transportService, recoverySettings, clusterService), - shardStateAction, - new NodeMappingRefreshAction(transportService, metaDataMappingService), - repositoriesService, - mock(SearchService.class), - new SyncedFlushService(indicesService, clusterService, transportService, indexNameExpressionResolver), - new PeerRecoverySourceService(transportService, indicesService, recoverySettings), - snapshotShardsService, - new PrimaryReplicaSyncer( - transportService, - new TransportResyncReplicationAction( - settings, + + @Override + protected Optional getDisruptableMockTransport(TransportAddress address) { + return nodes.values().stream().map(cn -> cn.mockTransport) + .filter(transport -> transport.getLocalNode().getAddress().equals(address)) + .findAny(); + } + + @Override + protected void execute(Runnable runnable) { + scheduleNow(CoordinatorTests.onNodeLog(getLocalNode(), runnable)); + } + + @Override + protected NamedWriteableRegistry writeableRegistry() { + return namedWriteableRegistry; + } + }; + transportService = mockTransport.createTransportService( + settings, deterministicTaskQueue.getThreadPool(runnable -> CoordinatorTests.onNodeLog(node, runnable)), + new TransportInterceptor() { + @Override + public TransportRequestHandler interceptHandler(String action, String executor, + boolean forceExecution, TransportRequestHandler actualHandler) { + // TODO: Remove this hack once recoveries are async and can be used in these tests + if (action.startsWith("internal:index/shard/recovery")) { + return (request, channel, task) -> scheduleSoon( + new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + channel.sendResponse(new TransportException(new IOException("failed to recover shard"))); + } + + @Override + public void onFailure(final Exception e) { + throw new AssertionError(e); + } + }); + } else { + return actualHandler; + } + } + }, + a -> node, null, emptySet() + ); + final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + repositoriesService = new RepositoriesService( + settings, clusterService, transportService, + Collections.singletonMap(FsRepository.TYPE, getRepoFactory(environment)), emptyMap(), threadPool + ); + snapshotsService = + new SnapshotsService(settings, clusterService, indexNameExpressionResolver, repositoriesService, threadPool); + nodeEnv = new NodeEnvironment(settings, environment); + final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(Collections.emptyList()); + final ScriptService scriptService = new ScriptService(settings, emptyMap(), emptyMap()); + client = new NodeClient(settings, threadPool); + allocationService = ESAllocationTestCase.createAllocationService(settings); + final IndexScopedSettings indexScopedSettings = + new IndexScopedSettings(settings, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + final BigArrays bigArrays = new BigArrays(new PageCacheRecycler(settings), null, "test"); + final MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); + indicesService = new IndicesService( + settings, + mock(PluginsService.class), + nodeEnv, + namedXContentRegistry, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), + emptyMap(), emptyMap(), emptyMap(), emptyMap()), + indexNameExpressionResolver, + mapperRegistry, + namedWriteableRegistry, + threadPool, + indexScopedSettings, + new NoneCircuitBreakerService(), + bigArrays, + scriptService, + clusterService, + client, + new MetaStateService(nodeEnv, namedXContentRegistry), + Collections.emptyList(), + emptyMap() + ); + final RecoverySettings recoverySettings = new RecoverySettings(settings, clusterSettings); + final ActionFilters actionFilters = new ActionFilters(emptySet()); + snapshotShardsService = new SnapshotShardsService( + settings, clusterService, repositoriesService, threadPool, + transportService, indicesService, actionFilters, indexNameExpressionResolver); + final ShardStateAction shardStateAction = new ShardStateAction( + clusterService, transportService, allocationService, + new BatchedRerouteService(clusterService, allocationService::reroute), + threadPool + ); + final MetaDataMappingService metaDataMappingService = new MetaDataMappingService(clusterService, indicesService); + indicesClusterStateService = new IndicesClusterStateService( + settings, + indicesService, + clusterService, + threadPool, + new PeerRecoveryTargetService(threadPool, transportService, recoverySettings, clusterService), + shardStateAction, + new NodeMappingRefreshAction(transportService, metaDataMappingService), + repositoriesService, + mock(SearchService.class), + new SyncedFlushService(indicesService, clusterService, transportService, indexNameExpressionResolver), + new PeerRecoverySourceService(transportService, indicesService, recoverySettings), + snapshotShardsService, + new PrimaryReplicaSyncer( transportService, - clusterService, - indicesService, - threadPool, - shardStateAction, - actionFilters, - indexNameExpressionResolver)), - new GlobalCheckpointSyncAction( - settings, - transportService, - clusterService, - indicesService, - threadPool, - shardStateAction, - actionFilters, - indexNameExpressionResolver), - new RetentionLeaseSyncAction( - settings, - transportService, - clusterService, - indicesService, - threadPool, - shardStateAction, - actionFilters, - indexNameExpressionResolver), - new RetentionLeaseBackgroundSyncAction( + new TransportResyncReplicationAction( settings, transportService, clusterService, @@ -1043,196 +1018,195 @@ public class SnapshotResiliencyTests extends ESTestCase { threadPool, shardStateAction, actionFilters, - indexNameExpressionResolver)); + indexNameExpressionResolver)), + new GlobalCheckpointSyncAction( + settings, + transportService, + clusterService, + indicesService, + threadPool, + shardStateAction, + actionFilters, + indexNameExpressionResolver), + new RetentionLeaseSyncAction( + settings, + transportService, + clusterService, + indicesService, + threadPool, + shardStateAction, + actionFilters, + indexNameExpressionResolver), + new RetentionLeaseBackgroundSyncAction( + settings, + transportService, + clusterService, + indicesService, + threadPool, + shardStateAction, + actionFilters, + indexNameExpressionResolver)); Map actions = new HashMap<>(); - final MetaDataCreateIndexService metaDataCreateIndexService = new MetaDataCreateIndexService(settings, clusterService, - indicesService, - allocationService, new AliasValidator(), environment, indexScopedSettings, - threadPool, namedXContentRegistry, false); - actions.put(CreateIndexAction.INSTANCE, - new TransportCreateIndexAction( - transportService, clusterService, threadPool, - metaDataCreateIndexService, - actionFilters, indexNameExpressionResolver - )); - final MappingUpdatedAction mappingUpdatedAction = new MappingUpdatedAction(settings, clusterSettings); - mappingUpdatedAction.setClient(client); + final MetaDataCreateIndexService metaDataCreateIndexService = new MetaDataCreateIndexService(settings, clusterService, + indicesService, + allocationService, new AliasValidator(), environment, indexScopedSettings, + threadPool, namedXContentRegistry, false); + actions.put(CreateIndexAction.INSTANCE, + new TransportCreateIndexAction( + transportService, clusterService, threadPool, + metaDataCreateIndexService, + actionFilters, indexNameExpressionResolver + )); + final MappingUpdatedAction mappingUpdatedAction = new MappingUpdatedAction(settings, clusterSettings); + mappingUpdatedAction.setClient(client); final TransportShardBulkAction transportShardBulkAction = new TransportShardBulkAction(settings, transportService, clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, new UpdateHelper(scriptService), actionFilters, indexNameExpressionResolver); - actions.put(BulkAction.INSTANCE, - new TransportBulkAction(threadPool, transportService, clusterService, - new IngestService( - clusterService, threadPool, environment, scriptService, - new AnalysisModule(environment, Collections.emptyList()).getAnalysisRegistry(), - Collections.emptyList(), client), + actions.put(BulkAction.INSTANCE, + new TransportBulkAction(threadPool, transportService, clusterService, + new IngestService( + clusterService, threadPool, environment, scriptService, + new AnalysisModule(environment, Collections.emptyList()).getAnalysisRegistry(), + Collections.emptyList(), client), transportShardBulkAction, client, actionFilters, indexNameExpressionResolver, - new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver) - )); - final RestoreService restoreService = new RestoreService( - clusterService, repositoriesService, allocationService, - metaDataCreateIndexService, - new MetaDataIndexUpgradeService( - settings, namedXContentRegistry, - mapperRegistry, - indexScopedSettings, - Collections.emptyList() - ), - clusterSettings - ); - actions.put(PutMappingAction.INSTANCE, - new TransportPutMappingAction(transportService, clusterService, threadPool, metaDataMappingService, - actionFilters, indexNameExpressionResolver, new RequestValidators<>(Collections.emptyList()))); - final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); - final SearchTransportService searchTransportService = new SearchTransportService(transportService, - SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); - final SearchService searchService = new SearchService(clusterService, indicesService, threadPool, scriptService, - bigArrays, new FetchPhase(Collections.emptyList()), responseCollectorService); - actions.put(SearchAction.INSTANCE, - new TransportSearchAction(threadPool, transportService, searchService, - searchTransportService, new SearchPhaseController(searchService::createReduceContext), clusterService, - actionFilters, indexNameExpressionResolver)); - actions.put(RestoreSnapshotAction.INSTANCE, - new TransportRestoreSnapshotAction(transportService, clusterService, threadPool, restoreService, actionFilters, - indexNameExpressionResolver)); - actions.put(DeleteIndexAction.INSTANCE, - new TransportDeleteIndexAction( - transportService, clusterService, threadPool, - new MetaDataDeleteIndexService(settings, clusterService, allocationService), actionFilters, - indexNameExpressionResolver, new DestructiveOperations(settings, clusterSettings))); - actions.put(PutRepositoryAction.INSTANCE, - new TransportPutRepositoryAction( - transportService, clusterService, repositoriesService, threadPool, - actionFilters, indexNameExpressionResolver - )); - actions.put(CreateSnapshotAction.INSTANCE, - new TransportCreateSnapshotAction( - transportService, clusterService, threadPool, - snapshotsService, actionFilters, indexNameExpressionResolver - )); - actions.put(ClusterRerouteAction.INSTANCE, - new TransportClusterRerouteAction(transportService, clusterService, threadPool, allocationService, - actionFilters, indexNameExpressionResolver)); - actions.put(ClusterStateAction.INSTANCE, - new TransportClusterStateAction(transportService, clusterService, threadPool, - actionFilters, indexNameExpressionResolver)); - actions.put(IndicesShardStoresAction.INSTANCE, - new TransportIndicesShardStoresAction( - transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, - new TransportNodesListGatewayStartedShards(settings, + new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver) + )); + final RestoreService restoreService = new RestoreService( + clusterService, repositoriesService, allocationService, + metaDataCreateIndexService, + new MetaDataIndexUpgradeService( + settings, namedXContentRegistry, + mapperRegistry, + indexScopedSettings, + Collections.emptyList() + ), + clusterSettings + ); + actions.put(PutMappingAction.INSTANCE, + new TransportPutMappingAction(transportService, clusterService, threadPool, metaDataMappingService, + actionFilters, indexNameExpressionResolver, new RequestValidators<>(Collections.emptyList()))); + final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); + final SearchTransportService searchTransportService = new SearchTransportService(transportService, + SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); + final SearchService searchService = new SearchService(clusterService, indicesService, threadPool, scriptService, + bigArrays, new FetchPhase(Collections.emptyList()), responseCollectorService); + actions.put(SearchAction.INSTANCE, + new TransportSearchAction(threadPool, transportService, searchService, + searchTransportService, new SearchPhaseController(searchService::createReduceContext), clusterService, + actionFilters, indexNameExpressionResolver)); + actions.put(RestoreSnapshotAction.INSTANCE, + new TransportRestoreSnapshotAction(transportService, clusterService, threadPool, restoreService, actionFilters, + indexNameExpressionResolver)); + actions.put(DeleteIndexAction.INSTANCE, + new TransportDeleteIndexAction( + transportService, clusterService, threadPool, + new MetaDataDeleteIndexService(settings, clusterService, allocationService), actionFilters, + indexNameExpressionResolver, new DestructiveOperations(settings, clusterSettings))); + actions.put(PutRepositoryAction.INSTANCE, + new TransportPutRepositoryAction( + transportService, clusterService, repositoriesService, threadPool, + actionFilters, indexNameExpressionResolver + )); + actions.put(CreateSnapshotAction.INSTANCE, + new TransportCreateSnapshotAction( + transportService, clusterService, threadPool, + snapshotsService, actionFilters, indexNameExpressionResolver + )); + actions.put(ClusterRerouteAction.INSTANCE, + new TransportClusterRerouteAction(transportService, clusterService, threadPool, allocationService, + actionFilters, indexNameExpressionResolver)); + actions.put(ClusterStateAction.INSTANCE, + new TransportClusterStateAction(transportService, clusterService, threadPool, + actionFilters, indexNameExpressionResolver)); + actions.put(IndicesShardStoresAction.INSTANCE, + new TransportIndicesShardStoresAction( + transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, + new TransportNodesListGatewayStartedShards(settings, threadPool, clusterService, transportService, actionFilters, nodeEnv, indicesService, namedXContentRegistry)) ); - actions.put(DeleteSnapshotAction.INSTANCE, - new TransportDeleteSnapshotAction( - transportService, clusterService, threadPool, - snapshotsService, actionFilters, indexNameExpressionResolver - )); - client.initialize(actions, () -> clusterService.localNode().getId(), transportService.getRemoteClusterService()); - } + actions.put(DeleteSnapshotAction.INSTANCE, + new TransportDeleteSnapshotAction( + transportService, clusterService, threadPool, + snapshotsService, actionFilters, indexNameExpressionResolver + )); + client.initialize(actions, () -> clusterService.localNode().getId(), transportService.getRemoteClusterService()); + } - private Repository.Factory getRepoFactory(Environment environment) { - // Run half the tests with the eventually consistent repository - if (blobStoreContext == null) { - return metaData -> { - final Repository repository = new FsRepository(metaData, environment, xContentRegistry(), threadPool) { - @Override - protected void assertSnapshotOrGenericThread() { - // eliminate thread name check as we create repo in the test thread - } + private Repository.Factory getRepoFactory(Environment environment) { + // Run half the tests with the eventually consistent repository + if (blobStoreContext == null) { + return metaData -> { + final Repository repository = new FsRepository(metaData, environment, xContentRegistry(), threadPool) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo in the test thread + } + }; + repository.start(); + return repository; + }; + } else { + return metaData -> { + final Repository repository = new MockEventuallyConsistentRepository( + metaData, xContentRegistry(), deterministicTaskQueue.getThreadPool(), blobStoreContext); + repository.start(); + return repository; }; - repository.start(); - return repository; - }; - } else { - return metaData -> { - final Repository repository = new MockEventuallyConsistentRepository( - metaData, xContentRegistry(), deterministicTaskQueue.getThreadPool(), blobStoreContext); - repository.start(); - return repository; - }; - } - } - public void restart() { - testClusterNodes.disconnectNode(this); - final ClusterState oldState = this.clusterService.state(); - stop(); - testClusterNodes.nodes.remove(node.getName()); - scheduleSoon(() -> { - try { - final TestClusterNode restartedNode = new TestClusterNode( - new DiscoveryNode(node.getName(), node.getId(), node.getAddress(), emptyMap(), - node.getRoles(), Version.CURRENT), disruption); - testClusterNodes.nodes.put(node.getName(), restartedNode); - restartedNode.start(oldState); - } catch (IOException e) { - throw new AssertionError(e); } - }); - } - - public void stop() { - testClusterNodes.disconnectNode(this); - indicesService.close(); - clusterService.close(); - indicesClusterStateService.close(); - if (coordinator != null) { - coordinator.close(); } - nodeEnv.close(); - } - - public void start(ClusterState initialState) { - transportService.start(); - transportService.acceptIncomingRequests(); - snapshotsService.start(); - snapshotShardsService.start(); - final CoordinationState.PersistedState persistedState = - new InMemoryPersistedState(initialState.term(), stateForNode(initialState, node)); - coordinator = new Coordinator(node.getName(), clusterService.getSettings(), - clusterService.getClusterSettings(), transportService, namedWriteableRegistry, - allocationService, masterService, () -> persistedState, - hostsResolver -> testClusterNodes.nodes.values().stream().filter(n -> n.node.isMasterNode()) - .map(n -> n.node.getAddress()).collect(Collectors.toList()), - clusterService.getClusterApplierService(), Collections.emptyList(), random(), - new BatchedRerouteService(clusterService, allocationService::reroute), ElectionStrategy.DEFAULT_INSTANCE); - masterService.setClusterStatePublisher(coordinator); - coordinator.start(); - masterService.start(); - clusterService.getClusterApplierService().setNodeConnectionsService( - new NodeConnectionsService(clusterService.getSettings(), threadPool, transportService)); - clusterService.getClusterApplierService().start(); - indicesService.start(); - indicesClusterStateService.start(); - coordinator.startInitialJoin(); - } - } - - private final class DisconnectedNodes extends NetworkDisruption.DisruptedLinks { - - /** - * Node names that are disconnected from all other nodes. - */ - private final Set disconnected = new HashSet<>(); - - @Override - public boolean disrupt(String node1, String node2) { - if (node1.equals(node2)) { - return false; + public void restart() { + testClusterNodes.disconnectNode(this); + final ClusterState oldState = this.clusterService.state(); + stop(); + nodes.remove(node.getName()); + scheduleSoon(() -> { + try { + final TestClusterNode restartedNode = new TestClusterNode( + new DiscoveryNode(node.getName(), node.getId(), node.getAddress(), emptyMap(), + node.getRoles(), Version.CURRENT)); + nodes.put(node.getName(), restartedNode); + restartedNode.start(oldState); + } catch (IOException e) { + throw new AssertionError(e); + } + }); } - // Check if both nodes are still part of the cluster - if (testClusterNodes.nodes.containsKey(node1) == false - || testClusterNodes.nodes.containsKey(node2) == false) { - return true; + + public void stop() { + testClusterNodes.disconnectNode(this); + indicesService.close(); + clusterService.close(); + indicesClusterStateService.close(); + if (coordinator != null) { + coordinator.close(); + } + nodeEnv.close(); } - return disconnected.contains(node1) || disconnected.contains(node2); - } - public void disconnect(String node) { - disconnected.add(node); - } - - public void clear() { - disconnected.clear(); + public void start(ClusterState initialState) { + transportService.start(); + transportService.acceptIncomingRequests(); + snapshotsService.start(); + snapshotShardsService.start(); + final CoordinationState.PersistedState persistedState = + new InMemoryPersistedState(initialState.term(), stateForNode(initialState, node)); + coordinator = new Coordinator(node.getName(), clusterService.getSettings(), + clusterService.getClusterSettings(), transportService, namedWriteableRegistry, + allocationService, masterService, () -> persistedState, + hostsResolver -> nodes.values().stream().filter(n -> n.node.isMasterNode()) + .map(n -> n.node.getAddress()).collect(Collectors.toList()), + clusterService.getClusterApplierService(), Collections.emptyList(), random(), + new BatchedRerouteService(clusterService, allocationService::reroute), ElectionStrategy.DEFAULT_INSTANCE); + masterService.setClusterStatePublisher(coordinator); + coordinator.start(); + masterService.start(); + clusterService.getClusterApplierService().setNodeConnectionsService( + new NodeConnectionsService(clusterService.getSettings(), threadPool, transportService)); + clusterService.getClusterApplierService().start(); + indicesService.start(); + indicesClusterStateService.start(); + coordinator.startInitialJoin(); + } } } } From 22575bd7e692c2d79d98ca20f5e647fd4e88c591 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 25 Sep 2019 08:25:14 -0400 Subject: [PATCH 55/94] Remove isRecovering method from Engine (#47039) We already prevent flushing in Engine if it's recovering. Hence, we can remove the protection in IndexShard. --- .../org/elasticsearch/index/engine/Engine.java | 7 ------- .../index/engine/InternalEngine.java | 7 +------ .../elasticsearch/index/shard/IndexShard.java | 16 ++-------------- .../index/engine/InternalEngineTests.java | 14 +++++++++----- 4 files changed, 12 insertions(+), 32 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index f26e5b8ad1f..f9367be4729 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1874,13 +1874,6 @@ public abstract class Engine implements Closeable { */ public abstract void skipTranslogRecovery(); - /** - * Returns true iff this engine is currently recovering from translog. - */ - public boolean isRecovering() { - return false; - } - /** * Tries to prune buffered deletes from the version map. */ diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 4324adbb738..ff2c8f3be89 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -2461,7 +2461,7 @@ public class InternalEngine extends Engine { } } - private void ensureCanFlush() { + final void ensureCanFlush() { // translog recover happens after the engine is fully constructed // if we are in this stage we have to prevent flushes from this // engine otherwise we might loose documents if the flush succeeds @@ -2659,11 +2659,6 @@ public class InternalEngine extends Engine { } } - @Override - public boolean isRecovering() { - return pendingTranslogRecovery.get(); - } - /** * Gets the commit data from {@link IndexWriter} as a map. */ diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 5e7009c2973..f34f40833d2 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1054,12 +1054,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl public Engine.SyncedFlushResult syncFlush(String syncId, Engine.CommitId expectedCommitId) { verifyNotClosed(); logger.trace("trying to sync flush. sync id [{}]. expected commit id [{}]]", syncId, expectedCommitId); - Engine engine = getEngine(); - if (engine.isRecovering()) { - throw new IllegalIndexShardStateException(shardId(), state, "syncFlush is only allowed if the engine is not recovery" + - " from translog"); - } - return engine.syncFlush(syncId, expectedCommitId); + return getEngine().syncFlush(syncId, expectedCommitId); } /** @@ -1078,15 +1073,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * since we use Engine#writeIndexingBuffer for this now. */ verifyNotClosed(); - final Engine engine = getEngine(); - if (engine.isRecovering()) { - throw new IllegalIndexShardStateException( - shardId(), - state, - "flush is only allowed if the engine is not recovery from translog"); - } final long time = System.nanoTime(); - final Engine.CommitId commitId = engine.flush(force, waitIfOngoing); + final Engine.CommitId commitId = getEngine().flush(force, waitIfOngoing); flushMetric.inc(System.nanoTime() - time); return commitId; } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index f91f6ee0d8c..da344c53814 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -729,16 +729,20 @@ public class InternalEngineTests extends EngineTestCase { } public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { - assertFalse(engine.isRecovering()); + engine.ensureCanFlush(); // recovered already ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); engine.close(); engine = new InternalEngine(engine.config()); + expectThrows(IllegalStateException.class, engine::ensureCanFlush); expectThrows(IllegalStateException.class, () -> engine.flush(true, true)); - assertTrue(engine.isRecovering()); - engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); - assertFalse(engine.isRecovering()); + if (randomBoolean()) { + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); + } else { + engine.skipTranslogRecovery(); + } + engine.ensureCanFlush(); // ready doc = testParsedDocument("2", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); engine.flush(); @@ -2825,7 +2829,7 @@ public class InternalEngineTests extends EngineTestCase { { for (int i = 0; i < 2; i++) { try (InternalEngine engine = new InternalEngine(config)) { - assertTrue(engine.isRecovering()); + expectThrows(IllegalStateException.class, engine::ensureCanFlush); Map userData = engine.getLastCommittedSegmentInfos().getUserData(); if (i == 0) { assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); From 74fd21f0b0b694ad18e8a719fb6ea759f86a28ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 25 Sep 2019 14:55:09 +0200 Subject: [PATCH 56/94] [DOCS] Reformats ranking evaluation API (#46974) * [DOCS] Reformats ranking evaluation API. Co-Authored-By: James Rodewig --- docs/reference/search/rank-eval.asciidoc | 205 ++++++++++++++++------- 1 file changed, 147 insertions(+), 58 deletions(-) diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index 9f5dd0bcb9e..e6fbb51bcbc 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -3,35 +3,87 @@ experimental["The ranking evaluation API is experimental and may be changed or removed completely in a future release, as well as change in non-backwards compatible ways on minor versions updates. Elastic will take a best effort approach to fix any issues, but experimental features are not subject to the support SLA of official GA features."] -The ranking evaluation API allows to evaluate the quality of ranked search +Allows you to evaluate the quality of ranked search results over a set of +typical search queries. + + +[[search-rank-eval-api-request]] +==== {api-request-title} + +`GET //_rank_eval` + +`POST //_rank_eval` + + +[[search-rank-eval-api-desc]] +==== {api-description-title} + +The ranking evaluation API allows you to evaluate the quality of ranked search results over a set of typical search queries. Given this set of queries and a list of manually rated documents, the `_rank_eval` endpoint calculates and returns typical information retrieval metrics like _mean reciprocal rank_, _precision_ or _discounted cumulative gain_. -[float] -==== Overview +Search quality evaluation starts with looking at the users of your search +application, and the things that they are searching for. Users have a specific +_information need_, for example they are looking for gift in a web shop or want +to book a flight for their next holiday. They usually enter some search terms +into a search box or some other web form. All of this information, together with +meta information about the user (for example the browser, location, earlier +preferences and so on) then gets translated into a query to the underlying +search system. -Search quality evaluation starts with looking at the users of your search application, and the things that they are searching for. -Users have a specific _information need_, e.g. they are looking for gift in a web shop or want to book a flight for their next holiday. -They usually enter some search terms into a search box or some other web form. -All of this information, together with meta information about the user (e.g. the browser, location, earlier preferences etc...) then gets translated into a query to the underlying search system. +The challenge for search engineers is to tweak this translation process from +user entries to a concrete query in such a way, that the search results contain +the most relevant information with respect to the users information need. This +can only be done if the search result quality is evaluated constantly across a +representative test suite of typical user queries, so that improvements in the +rankings for one particular query doesn't negatively effect the ranking for +other types of queries. -The challenge for search engineers is to tweak this translation process from user entries to a concrete query in such a way, that the search results contain the most relevant information with respect to the users information need. -This can only be done if the search result quality is evaluated constantly across a representative test suite of typical user queries, so that improvements in the rankings for one particular query doesn't negatively effect the ranking for other types of queries. +In order to get started with search quality evaluation, three basic things are +needed: -In order to get started with search quality evaluation, three basic things are needed: +. A collection of documents you want to evaluate your query performance against, + usually one or more indices. +. A collection of typical search requests that users enter into your system. +. A set of document ratings that judge the documents relevance with respect to a + search request. + +It is important to note that one set of document ratings is needed per test +query, and that the relevance judgements are based on the information need of +the user that entered the query. -. a collection of documents you want to evaluate your query performance against, usually one or more indices -. a collection of typical search requests that users enter into your system -. a set of document ratings that judge the documents relevance with respect to a search request+ - It is important to note that one set of document ratings is needed per test query, and that - the relevance judgements are based on the information need of the user that entered the query. +The ranking evaluation API provides a convenient way to use this information in +a ranking evaluation request to calculate different search evaluation metrics. +This gives a first estimation of your overall search quality and give you a +measurement to optimize against when fine-tuning various aspect of the query +generation in your application. -The ranking evaluation API provides a convenient way to use this information in a ranking evaluation request to calculate different search evaluation metrics. This gives a first estimation of your overall search quality and give you a measurement to optimize against when fine-tuning various aspect of the query generation in your application. -[float] -==== Ranking evaluation request structure +[[search-rank-eval-api-path-params]] +==== {api-path-parms-title} + +``:: + (Required, string) Comma-separated list or wildcard expression of index names + used to limit the request. + +[[search-rank-eval-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] ++ +-- +Defaults to `open`. +-- + +include::{docdir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] + + +[[search-rank-eval-api-example]] +==== {api-examples-title} In its most basic form, a request to the `_rank_eval` endpoint has two sections: @@ -51,10 +103,13 @@ GET /my_index/_rank_eval <2> definition of the evaluation metric to calculate <3> a specific metric and its parameters -The request section contains several search requests typical to your application, along with the document ratings for each particular search request, e.g. +The request section contains several search requests typical to your +application, along with the document ratings for each particular search request. [source,js] ----------------------------- +GET /my_index/_rank_eval +{ "requests": [ { "id": "amsterdam_query", <1> @@ -77,20 +132,30 @@ The request section contains several search requests typical to your application ] } ] + } ----------------------------- // NOTCONSOLE <1> the search requests id, used to group result details later <2> the query that is being evaluated -<3> a list of document ratings, each entry containing the documents `_index` and `_id` together with -the rating of the documents relevance with regards to this search request +<3> a list of document ratings, each entry containing the documents `_index` and +`_id` together with the rating of the documents relevance with regards to this +search request -A document `rating` can be any integer value that expresses the relevance of the document on a user defined scale. For some of the metrics, just giving a binary rating (e.g. `0` for irrelevant and `1` for relevant) will be sufficient, other metrics can use a more fine grained scale. +A document `rating` can be any integer value that expresses the relevance of the +document on a user defined scale. For some of the metrics, just giving a binary +rating (for example `0` for irrelevant and `1` for relevant) will be sufficient, +other metrics can use a more fine grained scale. -[float] -==== Template based ranking evaluation -As an alternative to having to provide a single query per test request, it is possible to specify query templates in the evaluation request and later refer to them. Queries with similar structure that only differ in their parameters don't have to be repeated all the time in the `requests` section this way. In typical search systems where user inputs usually get filled into a small set of query templates, this helps making the evaluation request more succinct. +===== Template based ranking evaluation + +As an alternative to having to provide a single query per test request, it is +possible to specify query templates in the evaluation request and later refer to +them. Queries with similar structure that only differ in their parameters don't +have to be repeated all the time in the `requests` section this way. In typical +search systems where user inputs usually get filled into a small set of query +templates, this helps making the evaluation request more succinct. [source,js] -------------------------------- @@ -129,23 +194,30 @@ GET /my_index/_rank_eval <3> a reference to a previously defined template <4> the parameters to use to fill the template -[float] -==== Available evaluation metrics -The `metric` section determines which of the available evaluation metrics is going to be used. -Currently, the following metrics are supported: +===== Available evaluation metrics + +The `metric` section determines which of the available evaluation metrics is +going to be used. The following metrics are supported: [float] [[k-precision]] ===== Precision at K (P@k) -This metric measures the number of relevant results in the top k search results. Its a form of the well known https://en.wikipedia.org/wiki/Information_retrieval#Precision[Precision] metric that only looks at the top k documents. It is the fraction of relevant documents in those first k -search. A precision at 10 (P@10) value of 0.6 then means six out of the 10 top hits are relevant with respect to the users information need. +This metric measures the number of relevant results in the top k search results. +Its a form of the well known +https://en.wikipedia.org/wiki/Information_retrieval#Precision[Precision] metric +that only looks at the top k documents. It is the fraction of relevant documents +in those first k search. A precision at 10 (P@10) value of 0.6 then means six +out of the 10 top hits are relevant with respect to the users information need. -P@k works well as a simple evaluation metric that has the benefit of being easy to understand and explain. -Documents in the collection need to be rated either as relevant or irrelevant with respect to the current query. -P@k does not take into account where in the top k results the relevant documents occur, so a ranking of ten results that -contains one relevant result in position 10 is equally good as a ranking of ten results that contains one relevant result in position 1. +P@k works well as a simple evaluation metric that has the benefit of being easy +to understand and explain. Documents in the collection need to be rated either +as relevant or irrelevant with respect to the current query. P@k does not take +into account where in the top k results the relevant documents occur, so a +ranking of ten results that contains one relevant result in position 10 is +equally good as a ranking of ten results that contains one relevant result in +position 1. [source,console] -------------------------------- @@ -181,13 +253,15 @@ in the query. Defaults to 10. If set to 'true', unlabeled documents are ignored and neither count as relevant or irrelevant. Set to 'false' (the default), they are treated as irrelevant. |======================================================================= + [float] ===== Mean reciprocal rank -For every query in the test suite, this metric calculates the reciprocal of the rank of the -first relevant document. For example finding the first relevant result -in position 3 means the reciprocal rank is 1/3. The reciprocal rank for each query -is averaged across all queries in the test suite to give the https://en.wikipedia.org/wiki/Mean_reciprocal_rank[mean reciprocal rank]. +For every query in the test suite, this metric calculates the reciprocal of the +rank of the first relevant document. For example finding the first relevant +result in position 3 means the reciprocal rank is 1/3. The reciprocal rank for +each query is averaged across all queries in the test suite to give the +https://en.wikipedia.org/wiki/Mean_reciprocal_rank[mean reciprocal rank]. [source,console] -------------------------------- @@ -220,12 +294,18 @@ in the query. Defaults to 10. "relevant". Defaults to `1`. |======================================================================= + [float] ===== Discounted cumulative gain (DCG) -In contrast to the two metrics above, https://en.wikipedia.org/wiki/Discounted_cumulative_gain[discounted cumulative gain] takes both, the rank and the rating of the search results, into account. +In contrast to the two metrics above, +https://en.wikipedia.org/wiki/Discounted_cumulative_gain[discounted cumulative gain] +takes both, the rank and the rating of the search results, into account. -The assumption is that highly relevant documents are more useful for the user when appearing at the top of the result list. Therefore, the DCG formula reduces the contribution that high ratings for documents on lower search ranks have on the overall DCG metric. +The assumption is that highly relevant documents are more useful for the user +when appearing at the top of the result list. Therefore, the DCG formula reduces +the contribution that high ratings for documents on lower search ranks have on +the overall DCG metric. [source,console] -------------------------------- @@ -257,23 +337,31 @@ in the query. Defaults to 10. |`normalize` | If set to `true`, this metric will calculate the https://en.wikipedia.org/wiki/Discounted_cumulative_gain#Normalized_DCG[Normalized DCG]. |======================================================================= + [float] ===== Expected Reciprocal Rank (ERR) -Expected Reciprocal Rank (ERR) is an extension of the classical reciprocal rank for the graded relevance case -(Olivier Chapelle, Donald Metzler, Ya Zhang, and Pierre Grinspan. 2009. http://olivier.chapelle.cc/pub/err.pdf[Expected reciprocal rank for graded relevance].) +Expected Reciprocal Rank (ERR) is an extension of the classical reciprocal rank +for the graded relevance case (Olivier Chapelle, Donald Metzler, Ya Zhang, and +Pierre Grinspan. 2009. +http://olivier.chapelle.cc/pub/err.pdf[Expected reciprocal rank for graded relevance].) -It is based on the assumption of a cascade model of search, in which a user scans through ranked search -results in order and stops at the first document that satisfies the information need. For this reason, it -is a good metric for question answering and navigation queries, but less so for survey oriented information -needs where the user is interested in finding many relevant documents in the top k results. +It is based on the assumption of a cascade model of search, in which a user +scans through ranked search results in order and stops at the first document +that satisfies the information need. For this reason, it is a good metric for +question answering and navigation queries, but less so for survey oriented +information needs where the user is interested in finding many relevant +documents in the top k results. -The metric models the expectation of the reciprocal of the position at which a user stops reading through -the result list. This means that relevant document in top ranking positions will contribute much to the -overall score. However, the same document will contribute much less to the score if it appears in a lower rank, -even more so if there are some relevant (but maybe less relevant) documents preceding it. -In this way, the ERR metric discounts documents which are shown after very relevant documents. This introduces -a notion of dependency in the ordering of relevant documents that e.g. Precision or DCG don't account for. +The metric models the expectation of the reciprocal of the position at which a +user stops reading through the result list. This means that relevant document in +top ranking positions will contribute much to the overall score. However, the +same document will contribute much less to the score if it appears in a lower +rank, even more so if there are some relevant (but maybe less relevant) +documents preceding it. In this way, the ERR metric discounts documents which +are shown after very relevant documents. This introduces a notion of dependency +in the ordering of relevant documents that e.g. Precision or DCG don't account +for. [source,console] -------------------------------- @@ -306,12 +394,13 @@ relevance judgments. in the query. Defaults to 10. |======================================================================= -[float] -==== Response format -The response of the `_rank_eval` endpoint contains the overall calculated result for the defined quality metric, -a `details` section with a breakdown of results for each query in the test suite and an optional `failures` section -that shows potential errors of individual queries. The response has the following format: +===== Response format + +The response of the `_rank_eval` endpoint contains the overall calculated result +for the defined quality metric, a `details` section with a breakdown of results +for each query in the test suite and an optional `failures` section that shows +potential errors of individual queries. The response has the following format: [source,js] -------------------------------- From 58d2bf7e09f1e43d5aebc46958ed0ade2a40d2cb Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Wed, 25 Sep 2019 16:05:42 +0300 Subject: [PATCH 57/94] Re-enable BWC tests (#47102) Re-enable BWC tests now that #46534 has been backported to 7.x --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index f698fa052af..d2b31e0e662 100644 --- a/build.gradle +++ b/build.gradle @@ -179,8 +179,8 @@ task verifyVersions { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = false -final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/46534" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = true +final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") From 93fcd23da8aca351a03317d52f1d37b5deb3d941 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 25 Sep 2019 15:55:33 +0200 Subject: [PATCH 58/94] Fail Snapshot on Corrupted Metadata Blob (#47009) (#47096) We should not be quietly ignoring a corrupted shard-level index-N blob. Simply creating a new empty shard-level index-N and moving on means that all snapshots of that shard show `SUCESS` as their state at the repository root but are in fact broken. This change at least makes it visible to the user that they can't snapshot the given shard any more and forces the user to move on to a new repository since the current one is broken and will not allow snapshotting the inconsistent shard again. Also, this change stops the delete action for shards with broken index-N blobs instead of simply deleting all blobs in the path containing the broken index-N. This prevents a temporarily broken/missing index-N blob from corrupting all snapshots of that shard. --- .../blobstore/BlobStoreRepository.java | 30 ++++++++----------- .../SharedClusterSnapshotRestoreIT.java | 19 ++---------- 2 files changed, 16 insertions(+), 33 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index b2db14a5c29..47df18146d1 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -988,7 +988,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e); } - Tuple tuple = buildBlobStoreIndexShardSnapshots(blobs, shardContainer); + Tuple tuple = buildBlobStoreIndexShardSnapshots(blobs.keySet(), shardContainer); BlobStoreIndexShardSnapshots snapshots = tuple.v1(); long fileListGeneration = tuple.v2(); @@ -1233,7 +1233,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp /** * Delete shard snapshot */ - private void deleteShardSnapshot(RepositoryData repositoryData, IndexId indexId, ShardId snapshotShardId, SnapshotId snapshotId) { + private void deleteShardSnapshot(RepositoryData repositoryData, IndexId indexId, ShardId snapshotShardId, SnapshotId snapshotId) + throws IOException { final BlobContainer shardContainer = shardContainer(indexId, snapshotShardId); final Map blobs; try { @@ -1242,7 +1243,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp throw new IndexShardSnapshotException(snapshotShardId, "Failed to list content of shard directory", e); } - Tuple tuple = buildBlobStoreIndexShardSnapshots(blobs, shardContainer); + Tuple tuple = buildBlobStoreIndexShardSnapshots(blobs.keySet(), shardContainer); BlobStoreIndexShardSnapshots snapshots = tuple.v1(); long fileListGeneration = tuple.v2(); @@ -1313,21 +1314,16 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp * @param blobs list of blobs in repository * @return tuple of BlobStoreIndexShardSnapshots and the last snapshot index generation */ - private Tuple buildBlobStoreIndexShardSnapshots(Map blobs, - BlobContainer shardContainer) { - Set blobKeys = blobs.keySet(); - long latest = latestGeneration(blobKeys); + private Tuple buildBlobStoreIndexShardSnapshots(Set blobs, BlobContainer shardContainer) + throws IOException { + long latest = latestGeneration(blobs); if (latest >= 0) { - try { - final BlobStoreIndexShardSnapshots shardSnapshots = - indexShardSnapshotsFormat.read(shardContainer, Long.toString(latest)); - return new Tuple<>(shardSnapshots, latest); - } catch (IOException e) { - final String file = SNAPSHOT_INDEX_PREFIX + latest; - logger.warn(() -> new ParameterizedMessage("failed to read index file [{}]", file), e); - } - } else if (blobKeys.isEmpty() == false) { - logger.warn("Could not find a readable index-N file in a non-empty shard snapshot directory [{}]", shardContainer.path()); + final BlobStoreIndexShardSnapshots shardSnapshots = indexShardSnapshotsFormat.read(shardContainer, Long.toString(latest)); + return new Tuple<>(shardSnapshots, latest); + } else if (blobs.stream().anyMatch(b -> b.startsWith(SNAPSHOT_PREFIX) || b.startsWith(INDEX_FILE_PREFIX) + || b.startsWith(DATA_BLOB_PREFIX))) { + throw new IllegalStateException( + "Could not find a readable index-N file in a non-empty shard snapshot directory [" + shardContainer.path() + "]"); } return new Tuple<>(BlobStoreIndexShardSnapshots.EMPTY, latest); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 21136f6a97d..be3f24f41c4 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -2926,23 +2926,10 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setWaitForCompletion(true) .get() .getSnapshotInfo(); - assertThat(snapshotInfo2.state(), equalTo(SnapshotState.SUCCESS)); - assertThat(snapshotInfo2.failedShards(), equalTo(0)); - assertThat(snapshotInfo2.successfulShards(), equalTo(snapshotInfo.totalShards())); + assertThat(snapshotInfo2.state(), equalTo(SnapshotState.PARTIAL)); + assertThat(snapshotInfo2.failedShards(), equalTo(1)); + assertThat(snapshotInfo2.successfulShards(), equalTo(snapshotInfo.totalShards() - 1)); assertThat(snapshotInfo2.indices(), hasSize(1)); - - logger.info("--> deleting index [{}]", indexName); - assertAcked(client().admin().indices().prepareDelete(indexName)); - - logger.info("--> restoring snapshot [{}]", snapshot2); - client().admin().cluster().prepareRestoreSnapshot("test-repo", snapshot2) - .setRestoreGlobalState(randomBoolean()) - .setWaitForCompletion(true) - .get(); - - ensureGreen(); - - assertHitCount(client().prepareSearch(indexName).setSize(0).get(), 2 * nDocs); } public void testCannotCreateSnapshotsWithSameName() throws Exception { From d5f396fe5571c34c11f625a2be1e471125a155d9 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 25 Sep 2019 08:11:37 -0700 Subject: [PATCH 59/94] [DOCS] Adds transforms to Elasticsearch book (#46846) (#47055) --- docs/reference/data-rollup-transform.asciidoc | 6 +- .../reference/transform/api-quickref.asciidoc | 4 +- docs/reference/transform/apis/index.asciidoc | 2 - .../transform/apis/put-transform.asciidoc | 3 +- docs/reference/transform/checkpoints.asciidoc | 4 +- ...e.asciidoc => ecommerce-tutorial.asciidoc} | 10 ++-- ...me-examples.asciidoc => examples.asciidoc} | 15 ++--- docs/reference/transform/index.asciidoc | 32 +++++----- docs/reference/transform/limitations.asciidoc | 60 +++++++++---------- docs/reference/transform/overview.asciidoc | 4 +- .../transform/troubleshooting.asciidoc | 10 +++- docs/reference/transform/usage.asciidoc | 4 +- 12 files changed, 79 insertions(+), 75 deletions(-) rename docs/reference/transform/{ecommerce-example.asciidoc => ecommerce-tutorial.asciidoc} (96%) rename docs/reference/transform/{dataframe-examples.asciidoc => examples.asciidoc} (97%) diff --git a/docs/reference/data-rollup-transform.asciidoc b/docs/reference/data-rollup-transform.asciidoc index 5fe08d6f0d5..413b7d89d82 100644 --- a/docs/reference/data-rollup-transform.asciidoc +++ b/docs/reference/data-rollup-transform.asciidoc @@ -9,8 +9,12 @@ * <> + include::rollup/index.asciidoc[tag=rollup-intro] -* {stack-ov}/ml-dataframes.html[Transforming your data] +* <> ++ +include::transform/index.asciidoc[tag=transform-intro] -- include::rollup/index.asciidoc[] + +include::transform/index.asciidoc[] diff --git a/docs/reference/transform/api-quickref.asciidoc b/docs/reference/transform/api-quickref.asciidoc index 9d2590a1540..d2dff5c3021 100644 --- a/docs/reference/transform/api-quickref.asciidoc +++ b/docs/reference/transform/api-quickref.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[df-api-quickref]] -== API quick reference +[[transform-api-quickref]] +=== API quick reference All {transform} endpoints have the following base: diff --git a/docs/reference/transform/apis/index.asciidoc b/docs/reference/transform/apis/index.asciidoc index e496401d340..80384af9598 100644 --- a/docs/reference/transform/apis/index.asciidoc +++ b/docs/reference/transform/apis/index.asciidoc @@ -3,8 +3,6 @@ [[transform-apis]] == {transform-cap} APIs -See also {stack-ov}/ml-dataframes.html[{transforms-cap}]. - * <> * <> * <> diff --git a/docs/reference/transform/apis/put-transform.asciidoc b/docs/reference/transform/apis/put-transform.asciidoc index 49c3d7981dd..d72f2722f4e 100644 --- a/docs/reference/transform/apis/put-transform.asciidoc +++ b/docs/reference/transform/apis/put-transform.asciidoc @@ -37,8 +37,7 @@ entities are defined by the set of `group_by` fields in the `pivot` object. You can also think of the destination index as a two-dimensional tabular data structure (known as a {dataframe}). The ID for each document in the {dataframe} is generated from a hash of the entity, so there is a unique row -per entity. For more information, see -{stack-ov}/ml-dataframes.html[{transforms-cap}]. +per entity. For more information, see <>. When the {transform} is created, a series of validations occur to ensure its success. For example, there is a check for the existence of the diff --git a/docs/reference/transform/checkpoints.asciidoc b/docs/reference/transform/checkpoints.asciidoc index 4c41b876b23..379834e8553 100644 --- a/docs/reference/transform/checkpoints.asciidoc +++ b/docs/reference/transform/checkpoints.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[ml-transform-checkpoints]] -== How {transform} checkpoints work +[[transform-checkpoints]] +=== How {transform} checkpoints work ++++ How checkpoints work ++++ diff --git a/docs/reference/transform/ecommerce-example.asciidoc b/docs/reference/transform/ecommerce-tutorial.asciidoc similarity index 96% rename from docs/reference/transform/ecommerce-example.asciidoc rename to docs/reference/transform/ecommerce-tutorial.asciidoc index 7f8267baa16..026127f97ba 100644 --- a/docs/reference/transform/ecommerce-example.asciidoc +++ b/docs/reference/transform/ecommerce-tutorial.asciidoc @@ -1,11 +1,11 @@ [role="xpack"] [testenv="basic"] -[[ecommerce-dataframes]] -=== Transforming the eCommerce sample data +[[ecommerce-transforms]] +=== Tutorial: Transforming the eCommerce sample data beta[] -<> enable you to retrieve information +<> enable you to retrieve information from an {es} index, transform it, and store it in another index. Let's use the {kibana-ref}/add-sample-data.html[{kib} sample data] to demonstrate how you can pivot and summarize your data with {transforms}. @@ -23,7 +23,9 @@ You also need `read` and `view_index_metadata` index privileges on the source index and `read`, `create_index`, and `index` privileges on the destination index. -For more information, see <> and <>. +For more information, see +{stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. -- . Choose your _source index_. diff --git a/docs/reference/transform/dataframe-examples.asciidoc b/docs/reference/transform/examples.asciidoc similarity index 97% rename from docs/reference/transform/dataframe-examples.asciidoc rename to docs/reference/transform/examples.asciidoc index 6c03ad3ecb3..6b15b0517e7 100644 --- a/docs/reference/transform/dataframe-examples.asciidoc +++ b/docs/reference/transform/examples.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] -[[dataframe-examples]] -== {transform-cap} examples +[[transform-examples]] +=== {transform-cap} examples ++++ Examples ++++ @@ -12,17 +12,14 @@ These examples demonstrate how to use {transforms} to derive useful insights from your data. All the examples use one of the {kibana-ref}/add-sample-data.html[{kib} sample datasets]. For a more detailed, step-by-step example, see -<>. +<>. -* <> * <> * <> * <> -include::ecommerce-example.asciidoc[] - [[example-best-customers]] -=== Finding your best customers +==== Finding your best customers In this example, we use the eCommerce orders sample dataset to find the customers who spent the most in our hypothetical webshop. Let's transform the data such @@ -106,7 +103,7 @@ navigate data from a customer centric perspective. In some cases, it can even make creating visualizations much simpler. [[example-airline]] -=== Finding air carriers with the most delays +==== Finding air carriers with the most delays In this example, we use the Flights sample dataset to find out which air carrier had the most delays. First, we filter the source data such that it excludes all @@ -193,7 +190,7 @@ or flight stats for any of the featured destination or origin airports. [[example-clientips]] -=== Finding suspicious client IPs by using scripted metrics +==== Finding suspicious client IPs by using scripted metrics With {transforms}, you can use {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[scripted diff --git a/docs/reference/transform/index.asciidoc b/docs/reference/transform/index.asciidoc index 41ffd97ee39..25a7f3dd407 100644 --- a/docs/reference/transform/index.asciidoc +++ b/docs/reference/transform/index.asciidoc @@ -1,27 +1,27 @@ [role="xpack"] -[[ml-dataframes]] -= Transforming data - -[partintro] --- +[[transforms]] +== Transforming data +// tag::transform-intro[] {transforms-cap} enable you to convert existing {es} indices into summarized -indices, which provide opportunities for new insights and analytics. For example, -you can use {transforms} to pivot your data into entity-centric indices that -summarize the behavior of users or sessions or other entities in your data. +indices, which provide opportunities for new insights and analytics. +// end::transform-intro[] +For example, you can use {transforms} to pivot your data into entity-centric +indices that summarize the behavior of users or sessions or other entities in +your data. -* <> -* <> -* <> -* <> -* <> -* <> --- +* <> +* <> +* <> +* <> +* <> +* <> include::overview.asciidoc[] include::usage.asciidoc[] include::checkpoints.asciidoc[] include::api-quickref.asciidoc[] -include::dataframe-examples.asciidoc[] +include::ecommerce-tutorial.asciidoc[] +include::examples.asciidoc[] include::troubleshooting.asciidoc[] include::limitations.asciidoc[] \ No newline at end of file diff --git a/docs/reference/transform/limitations.asciidoc b/docs/reference/transform/limitations.asciidoc index a97737464b3..70a3ffb2543 100644 --- a/docs/reference/transform/limitations.asciidoc +++ b/docs/reference/transform/limitations.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[dataframe-limitations]] -== {transform-cap} limitations +[[transform-limitations]] +=== {transform-cap} limitations [subs="attributes"] ++++ Limitations @@ -12,8 +12,8 @@ The following limitations and known problems apply to the 7.4 release of the Elastic {dataframe} feature: [float] -[[df-compatibility-limitations]] -=== Beta {transforms} do not have guaranteed backwards or forwards compatibility +[[transform-compatibility-limitations]] +==== Beta {transforms} do not have guaranteed backwards or forwards compatibility Whilst {transforms} are beta, it is not guaranteed that a {transform} created in a previous version of the {stack} will be able @@ -25,8 +25,8 @@ destination index. This is a normal {es} index and is not affected by the beta status. [float] -[[df-ui-limitation]] -=== {dataframe-cap} UI will not work during a rolling upgrade from 7.2 +[[transform-ui-limitation]] +==== {dataframe-cap} UI will not work during a rolling upgrade from 7.2 If your cluster contains mixed version nodes, for example during a rolling upgrade from 7.2 to a newer version, and {transforms} have been @@ -35,22 +35,22 @@ have been upgraded to the newer version before using the {dataframe} UI. [float] -[[df-datatype-limitations]] -=== {dataframe-cap} data type limitation +[[transform-datatype-limitations]] +==== {dataframe-cap} data type limitation {dataframes-cap} do not (yet) support fields containing arrays – in the UI or the API. If you try to create one, the UI will fail to show the source index table. [float] -[[df-ccs-limitations]] -=== {ccs-cap} is not supported +[[transform-ccs-limitations]] +==== {ccs-cap} is not supported {ccs-cap} is not supported for {transforms}. [float] -[[df-kibana-limitations]] -=== Up to 1,000 {transforms} are supported +[[transform-kibana-limitations]] +==== Up to 1,000 {transforms} are supported A single cluster will support up to 1,000 {transforms}. When using the @@ -59,8 +59,8 @@ When using the enumerate through the full list. [float] -[[df-aggresponse-limitations]] -=== Aggregation responses may be incompatible with destination index mappings +[[transform-aggresponse-limitations]] +==== Aggregation responses may be incompatible with destination index mappings When a {transform} is first started, it will deduce the mappings required for the destination index. This process is based on the field types of @@ -77,8 +77,8 @@ workaround, you may define custom mappings prior to starting the {ref}/indices-templates.html[define an index template]. [float] -[[df-batch-limitations]] -=== Batch {transforms} may not account for changed documents +[[transform-batch-limitations]] +==== Batch {transforms} may not account for changed documents A batch {transform} uses a {ref}/search-aggregations-bucket-composite-aggregation.html[composite aggregation] @@ -88,8 +88,8 @@ do not yet support a search context, therefore if the source data is changed results may not include these changes. [float] -[[df-consistency-limitations]] -=== {cdataframe-cap} consistency does not account for deleted or updated documents +[[transform-consistency-limitations]] +==== {cdataframe-cap} consistency does not account for deleted or updated documents While the process for {transforms} allows the continual recalculation of the {transform} as new data is being ingested, it does also have @@ -114,16 +114,16 @@ updated when viewing the {dataframe} destination index. [float] -[[df-deletion-limitations]] -=== Deleting a {transform} does not delete the {dataframe} destination index or {kib} index pattern +[[transform-deletion-limitations]] +==== Deleting a {transform} does not delete the {dataframe} destination index or {kib} index pattern When deleting a {transform} using `DELETE _data_frame/transforms/index` neither the {dataframe} destination index nor the {kib} index pattern, should one have been created, are deleted. These objects must be deleted separately. [float] -[[df-aggregation-page-limitations]] -=== Handling dynamic adjustment of aggregation page size +[[transform-aggregation-page-limitations]] +==== Handling dynamic adjustment of aggregation page size During the development of {transforms}, control was favoured over performance. In the design considerations, it is preferred for the @@ -153,8 +153,8 @@ requested has been reduced to its minimum, then the {transform} will be set to a failed state. [float] -[[df-dynamic-adjustments-limitations]] -=== Handling dynamic adjustments for many terms +[[transform-dynamic-adjustments-limitations]] +==== Handling dynamic adjustments for many terms For each checkpoint, entities are identified that have changed since the last time the check was performed. This list of changed entities is supplied as a @@ -176,8 +176,8 @@ Using smaller values for `max_page_search_size` may result in a longer duration for the {transform} checkpoint to complete. [float] -[[df-scheduling-limitations]] -=== {cdataframe-cap} scheduling limitations +[[transform-scheduling-limitations]] +==== {cdataframe-cap} scheduling limitations A {cdataframe} periodically checks for changes to source data. The functionality of the scheduler is currently limited to a basic periodic timer which can be @@ -188,8 +188,8 @@ search/index operations has other users in your cluster. Also note that retries occur at `frequency` interval. [float] -[[df-failed-limitations]] -=== Handling of failed {transforms} +[[transform-failed-limitations]] +==== Handling of failed {transforms} Failed {transforms} remain as a persistent task and should be handled appropriately, either by deleting it or by resolving the root cause of the @@ -199,8 +199,8 @@ When using the API to delete a failed {transform}, first stop it using `_stop?force=true`, then delete it. [float] -[[df-availability-limitations]] -=== {cdataframes-cap} may give incorrect results if documents are not yet available to search +[[transform-availability-limitations]] +==== {cdataframes-cap} may give incorrect results if documents are not yet available to search After a document is indexed, there is a very small delay until it is available to search. diff --git a/docs/reference/transform/overview.asciidoc b/docs/reference/transform/overview.asciidoc index fa161f2e9ea..e3c852d8be9 100644 --- a/docs/reference/transform/overview.asciidoc +++ b/docs/reference/transform/overview.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[ml-transform-overview]] -== {transform-cap} overview +[[transform-overview]] +=== {transform-cap} overview ++++ Overview ++++ diff --git a/docs/reference/transform/troubleshooting.asciidoc b/docs/reference/transform/troubleshooting.asciidoc index 9d76e93415d..b453529a65f 100644 --- a/docs/reference/transform/troubleshooting.asciidoc +++ b/docs/reference/transform/troubleshooting.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] -[[dataframe-troubleshooting]] -== Troubleshooting {transforms} +[[transform-troubleshooting]] +=== Troubleshooting {transforms} [subs="attributes"] ++++ Troubleshooting @@ -9,7 +9,11 @@ Use the information in this section to troubleshoot common problems. -include::{stack-repo-dir}/help.asciidoc[tag=get-help] +For issues that you cannot fix yourself … we’re here to help. +If you are an existing Elastic customer with a support contract, please create +a ticket in the +https://support.elastic.co/customers/s/login/[Elastic Support portal]. +Or post in the https://discuss.elastic.co/[Elastic forum]. If you encounter problems with your {transforms}, you can gather more information from the following files and APIs: diff --git a/docs/reference/transform/usage.asciidoc b/docs/reference/transform/usage.asciidoc index 70dfe0f80b3..f78a0388bcb 100644 --- a/docs/reference/transform/usage.asciidoc +++ b/docs/reference/transform/usage.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] -[[ml-transforms-usage]] -== When to use {transforms} +[[transform-usage]] +=== When to use {transforms} {es} aggregations are a powerful and flexible feature that enable you to summarize and retrieve complex insights about your data. You can summarize From 365aa30b7f7a271ab7e0259faf334a8e3e3dfb8e Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 25 Sep 2019 12:35:48 -0400 Subject: [PATCH 60/94] [DOCS] Remove support for `// CONSOLE` magic comment (#46936) (#47110) #46180 added support for the `[source,console]` language for snippets which should be tested. This removes support for the `// CONSOLE` magic comment, which serve a similar purpose. Snippets that include the `// CONSOLE` magic comment will return an exception. --- .../gradle/doc/RestTestsFromSnippetsTask.groovy | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index 54db2cd7162..b42bc83b472 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -206,8 +206,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { response(snippet) return } - if (snippet.test || snippet.console || - snippet.language == 'console') { + if ((snippet.language == 'js') && (snippet.console)) { + throw new InvalidUserDataException( + "$snippet: Use `[source,console]` instead of `// CONSOLE`.") + } + if (snippet.test || snippet.language == 'console') { test(snippet) previousTest = snippet return From 9ddc99de230809ea0e3c1855a8c1059405d01d98 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 25 Sep 2019 12:36:23 -0400 Subject: [PATCH 61/94] [DOCS] Reformat clone index API docs (#46762) --- docs/reference/indices/clone-index.asciidoc | 144 +++++++++++------- docs/reference/rest-api/common-parms.asciidoc | 31 +++- 2 files changed, 120 insertions(+), 55 deletions(-) diff --git a/docs/reference/indices/clone-index.asciidoc b/docs/reference/indices/clone-index.asciidoc index d7eb2d20913..e9c9dd2796a 100644 --- a/docs/reference/indices/clone-index.asciidoc +++ b/docs/reference/indices/clone-index.asciidoc @@ -1,12 +1,60 @@ [[indices-clone-index]] -=== Clone Index +=== Clone index API +++++ +Clone index +++++ -The clone index API allows you to clone an existing index into a new index, -where each original primary shard is cloned into a new primary shard in -the new index. +Clones an existing index. -[float] -==== How does cloning work? +[source,console] +-------------------------------------------------- +POST /twitter/_clone/cloned-twitter-index +-------------------------------------------------- +// TEST[s/^/PUT twitter\n{"settings":{"index.number_of_shards" : 5,"blocks.write":true}}\n/] + + +[[clone-index-api-request]] +==== {api-request-title} + +`POST //_clone/` + +`PUT //_clone/` + + +[[clone-index-api-prereqs]] +==== {api-prereq-title} + +To clone an index, +the index must be marked as read-only +and have a <> status of `green`. + +For example, +the following request prevents write operations on `my_source_index` +so it can be cloned. +Metadata changes like deleting the index are still allowed. + +[source,console] +-------------------------------------------------- +PUT /my_source_index/_settings +{ + "settings": { + "index.blocks.write": true + } +} +-------------------------------------------------- +// TEST[s/^/PUT my_source_index\n/] + + +[[clone-index-api-desc]] +==== {api-description-title} + +Use the clone index API +to clone an existing index into a new index, +where each original primary shard is cloned +into a new primary shard in the new index. + +[[cloning-works]] +===== How cloning works Cloning works as follows: @@ -20,49 +68,15 @@ Cloning works as follows: * Finally, it recovers the target index as though it were a closed index which had just been re-opened. -[float] -==== Preparing an index for cloning - -Create a new index: - -[source,console] --------------------------------------------------- -PUT my_source_index -{ - "settings": { - "index.number_of_shards" : 5 - } -} --------------------------------------------------- - -In order to clone an index, the index must be marked as read-only, -and have <> `green`. - -This can be achieved with the following request: - -[source,console] --------------------------------------------------- -PUT /my_source_index/_settings -{ - "settings": { - "index.blocks.write": true <1> - } -} --------------------------------------------------- -// TEST[continued] - -<1> Prevents write operations to this index while still allowing metadata - changes like deleting the index. - -[float] -==== Cloning an index +[[clone-index]] +===== Clone an index To clone `my_source_index` into a new index called `my_target_index`, issue the following request: [source,console] -------------------------------------------------- -POST my_source_index/_clone/my_target_index +POST /my_source_index/_clone/my_target_index -------------------------------------------------- // TEST[continued] @@ -72,9 +86,9 @@ the cluster state -- it doesn't wait for the clone operation to start. [IMPORTANT] ===================================== -Indices can only be cloned if they satisfy the following requirements: +Indices can only be cloned if they meet the following requirements: -* the target index must not exist +* The target index must not exist. * The source index must have the same number of primary shards as the target index. @@ -88,7 +102,7 @@ and accepts `settings` and `aliases` parameters for the target index: [source,console] -------------------------------------------------- -POST my_source_index/_clone/my_target_index +POST /my_source_index/_clone/my_target_index { "settings": { "index.number_of_shards": 5 <1> @@ -107,10 +121,10 @@ POST my_source_index/_clone/my_target_index NOTE: Mappings may not be specified in the `_clone` request. The mappings of the source index will be used for the target index. -[float] -==== Monitoring the clone process +[[monitor-cloning]] +===== Monitor the cloning process -The clone process can be monitored with the <>, or the <> can be used to wait until all primary shards have been allocated by setting the `wait_for_status` parameter to `yellow`. @@ -123,12 +137,38 @@ can be allocated on that node. Once the primary shard is allocated, it moves to state `initializing`, and the clone process begins. When the clone operation completes, the shard will -become `active`. At that point, Elasticsearch will try to allocate any +become `active`. At that point, {es} will try to allocate any replicas and may decide to relocate the primary shard to another node. -[float] -==== Wait For Active Shards +[[clone-wait-active-shards]] +===== Wait for active shards Because the clone operation creates a new index to clone the shards to, the <> setting on index creation applies to the clone index action as well. + + +[[clone-index-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of the source index to clone. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=target-index] + + +[[clone-index-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=timeoutparms] + + +[[clone-index-api-request-body]] +==== {api-request-body-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=target-index-aliases] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=target-index-settings] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 9a40236a1ce..4b76893b730 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -18,6 +18,13 @@ If specified, the index alias only applies to documents returned by the filter. end::index-alias-filter[] +tag::target-index-aliases[] +`aliases`:: +(Optional, <>) +Index aliases which include the target index. +See <>. +end::target-index-aliases[] + tag::allow-no-indices[] `allow_no_indices`:: (Optional, boolean) If `true`, @@ -489,15 +496,22 @@ the segment has most likely been written to disk but needs a <> to be searchable. end::segment-search[] +tag::segment-size[] +Disk space used by the segment, such as `50kb`. +end::segment-size[] + tag::settings[] `settings`:: (Optional, <>) Configuration options for the index. See <>. end::settings[] -tag::segment-size[] -Disk space used by the segment, such as `50kb`. -end::segment-size[] +tag::target-index-settings[] +`settings`:: +(Optional, <>) +Configuration options for the target index. +See <>. +end::target-index-settings[] tag::slices[] `slices`:: @@ -534,6 +548,17 @@ tag::stats[] purposes. end::stats[] +tag::target-index[] +``:: ++ +-- +(Required, string) +Name of the target index to create. + +include::{docdir}/indices/create-index.asciidoc[tag=index-name-reqs] +-- +end::target-index[] + tag::terminate_after[] `terminate_after`:: (Optional, integer) The maximum number of documents to collect for each shard, From 35f056661f2357f10b8fa42e53f6c7c7d392b7be Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Wed, 25 Sep 2019 08:53:33 +0100 Subject: [PATCH 62/94] Updates 7.4.0 release notes --- docs/reference/release-notes/7.4.asciidoc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/reference/release-notes/7.4.asciidoc b/docs/reference/release-notes/7.4.asciidoc index 1d8fd00950a..9714eb582fd 100644 --- a/docs/reference/release-notes/7.4.asciidoc +++ b/docs/reference/release-notes/7.4.asciidoc @@ -199,6 +199,9 @@ Features/Watcher:: Geo:: * Support WKT point conversion to geo_point type {pull}44107[#44107] (issue: {issue}41821[#41821]) +Infra/Circuit Breakers:: +* Fix G1 GC default IHOP {pull}46169[#46169] + Infra/Core:: * Add OCI annotations and adjust existing annotations {pull}45167[#45167] (issues: {issue}45162[#45162], {issue}45166[#45166]) * Use the full hash in build info {pull}45163[#45163] (issue: {issue}45162[#45162]) @@ -309,6 +312,7 @@ Authentication:: * Fix X509AuthenticationToken principal {pull}43932[#43932] (issues: {issue}34396[#34396], {issue}43796[#43796]) Authorization:: +* Do not rewrite aliases on remove-index from aliases requests {pull}46989[#46989] * Give kibana user privileges to create APM agent config index {pull}46765[#46765] (issue: {issue}45610[#45610]) * Add `manage_own_api_key` cluster privilege {pull}45696[#45696] (issue: {issue}40031[#40031]) * Sparse role queries can throw an NPE {pull}45053[#45053] @@ -376,6 +380,7 @@ Features/Ingest:: * Allow dropping documents with auto-generated ID {pull}46773[#46773] (issue: {issue}46678[#46678]) Features/Java High Level REST Client:: +* [HLRC] Send min_score as query string parameter to the count API {pull}46829[#46829] (issue: {issue}46474[#46474]) * HLRC multisearchTemplate forgot params {pull}46492[#46492] (issue: {issue}46488[#46488]) * terminateAfter added to the RequestConverter {pull}46474[#46474] (issue: {issue}46446[#46446]) * [Closes #44045] Added 'slices' parameter when submitting reindex request via Java high level REST client {pull}45690[#45690] (issue: {issue}44045[#44045]) @@ -425,6 +430,7 @@ Infra/Settings:: * bug fix about elasticsearch.common.settings.Settings.processSetting {pull}44047[#44047] (issue: {issue}43791[#43791]) Machine Learning:: +* [ML] fix two datafeed flush lockup bugs {pull}46982[#46982] * [ML] Throw an error when a datafeed needs CCS but it is not enabled for the node {pull}46044[#46044] (issue: {issue}46025[#46025]) * Handle "null" value of Estimate memory usage API response gracefully. {pull}45726[#45726] (issue: {issue}44699[#44699]) * [ML] Remove timeout on waiting for DF analytics result processor to complete {pull}45724[#45724] (issue: {issue}45723[#45723]) @@ -489,6 +495,8 @@ Security:: * Use system context for looking up connected nodes {pull}43991[#43991] (issue: {issue}43974[#43974]) Snapshot/Restore:: +* Fix Bug in Snapshot Status Response Timestamps {pull}46919[#46919] (issue: {issue}46913[#46913]) +* GCS deleteBlobsIgnoringIfNotExists should catch StorageException {pull}46832[#46832] (issue: {issue}46772[#46772]) * Fix TransportSnapshotsStatusAction ThreadPool Use {pull}45824[#45824] * Stop Executing SLM Policy Transport Action on Snapshot Pool {pull}45727[#45727] (issue: {issue}45594[#45594]) * Check again on-going snapshots/restores of indices before closing {pull}43873[#43873] From a46eef9634ada3bbe111aa13b24ecd05fb0c8e9f Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Wed, 25 Sep 2019 11:32:08 -0600 Subject: [PATCH 63/94] Change SLM stats format (#46991) Using arrays of objects with embedded IDs is preferred for new APIs over using entity IDs as JSON keys. This commit changes the SLM stats API to use the preferred format. --- .../client/slm/SnapshotLifecycleStats.java | 23 ++++++--- docs/reference/ilm/apis/slm-api.asciidoc | 10 ++-- .../xpack/slm/SnapshotLifecycleStats.java | 51 +++++++++++-------- .../xpack/slm/SnapshotLifecycleRestIT.java | 19 +++++-- 4 files changed, 68 insertions(+), 35 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java index fc54f74649b..6a8eb323030 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java @@ -74,7 +74,7 @@ public class SnapshotLifecycleStats implements ToXContentObject { PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS); - PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> SnapshotPolicyStats.parse(p, n), POLICY_STATS); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS); } // Package visible for testing @@ -178,22 +178,25 @@ public class SnapshotLifecycleStats implements ToXContentObject { private final long snapshotsDeleted; private final long snapshotDeleteFailures; + public static final ParseField POLICY_ID = new ParseField("policy"); static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken"); static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed"); static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted"); static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures"); - private static final ConstructingObjectParser PARSER = + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("snapshot_policy_stats", true, - (a, id) -> { - long taken = (long) a[0]; - long failed = (long) a[1]; - long deleted = (long) a[2]; - long deleteFailed = (long) a[3]; + a -> { + String id = (String) a[0]; + long taken = (long) a[1]; + long failed = (long) a[2]; + long deleted = (long) a[3]; + long deleteFailed = (long) a[4]; return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed); }); static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED); @@ -209,7 +212,11 @@ public class SnapshotLifecycleStats implements ToXContentObject { } public static SnapshotPolicyStats parse(XContentParser parser, String policyId) { - return PARSER.apply(parser, policyId); + return PARSER.apply(parser, null); + } + + public String getPolicyId() { + return policyId; } public long getSnapshotsTaken() { diff --git a/docs/reference/ilm/apis/slm-api.asciidoc b/docs/reference/ilm/apis/slm-api.asciidoc index 59c1601ab9b..cd9d2364e69 100644 --- a/docs/reference/ilm/apis/slm-api.asciidoc +++ b/docs/reference/ilm/apis/slm-api.asciidoc @@ -142,6 +142,7 @@ The output looks similar to the following: "retention": {} }, "stats": { + "policy": "daily-snapshots", "snapshots_taken": 0, "snapshots_failed": 0, "snapshots_deleted": 0, @@ -231,6 +232,7 @@ Which, in this case shows an error because the index did not exist: "retention": {} }, "stats": { + "policy": "daily-snapshots", "snapshots_taken": 0, "snapshots_failed": 1, "snapshots_deleted": 0, @@ -319,6 +321,7 @@ Which now includes the successful snapshot information: "retention": {} }, "stats": { + "policy": "daily-snapshots", "snapshots_taken": 1, "snapshots_failed": 1, "snapshots_deleted": 0, @@ -371,14 +374,15 @@ Which returns a response similar to: "retention_timed_out": 0, "retention_deletion_time": "1.4s", "retention_deletion_time_millis": 1404, - "policy_metrics": { - "daily-snapshots": { + "policy_metrics": [ + { + "policy": "daily-snapshots", "snapshots_taken": 1, "snapshots_failed": 1, "snapshots_deleted": 0, "snapshot_deletion_failures": 0 } - }, + ], "total_snapshots_taken": 1, "total_snapshots_failed": 1, "total_snapshots_deleted": 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java index fa018abc6c4..7b401cb4025 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -71,7 +72,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS); - PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> SnapshotPolicyStats.parse(p, n), POLICY_STATS); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS); } public SnapshotLifecycleStats() { @@ -213,23 +214,25 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { builder.field(RETENTION_TIME.getPreferredName(), retentionTime); builder.field(RETENTION_TIME_MILLIS.getPreferredName(), retentionTime.millis()); - Map metrics = getMetrics(); - long totalTaken = metrics.values().stream().mapToLong(s -> s.snapshotsTaken.count()).sum(); - long totalFailed = metrics.values().stream().mapToLong(s -> s.snapshotsFailed.count()).sum(); - long totalDeleted = metrics.values().stream().mapToLong(s -> s.snapshotsDeleted.count()).sum(); - long totalDeleteFailures = metrics.values().stream().mapToLong(s -> s.snapshotDeleteFailures.count()).sum(); + List metrics = getMetrics().values().stream() + .sorted(Comparator.comparing(SnapshotPolicyStats::getPolicyId)) // maintain a consistent order when serializing + .collect(Collectors.toList()); + long totalTaken = metrics.stream().mapToLong(s -> s.snapshotsTaken.count()).sum(); + long totalFailed = metrics.stream().mapToLong(s -> s.snapshotsFailed.count()).sum(); + long totalDeleted = metrics.stream().mapToLong(s -> s.snapshotsDeleted.count()).sum(); + long totalDeleteFailures = metrics.stream().mapToLong(s -> s.snapshotDeleteFailures.count()).sum(); builder.field(TOTAL_TAKEN.getPreferredName(), totalTaken); builder.field(TOTAL_FAILED.getPreferredName(), totalFailed); builder.field(TOTAL_DELETIONS.getPreferredName(), totalDeleted); builder.field(TOTAL_DELETION_FAILURES.getPreferredName(), totalDeleteFailures); - builder.startObject(POLICY_STATS.getPreferredName()); - for (Map.Entry policy : metrics.entrySet()) { - SnapshotPolicyStats perPolicyMetrics = policy.getValue(); - builder.startObject(perPolicyMetrics.policyId); - perPolicyMetrics.toXContent(builder, params); + + builder.startArray(POLICY_STATS.getPreferredName()); + for (SnapshotPolicyStats stats : metrics) { + builder.startObject(); + stats.toXContent(builder, params); builder.endObject(); } - builder.endObject(); + builder.endArray(); builder.endObject(); return builder; } @@ -268,22 +271,25 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { private final CounterMetric snapshotsDeleted = new CounterMetric(); private final CounterMetric snapshotDeleteFailures = new CounterMetric(); + public static final ParseField POLICY_ID = new ParseField("policy"); public static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken"); public static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed"); public static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted"); public static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures"); - private static final ConstructingObjectParser PARSER = + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("snapshot_policy_stats", true, - (a, id) -> { - long taken = (long) a[0]; - long failed = (long) a[1]; - long deleted = (long) a[2]; - long deleteFailed = (long) a[3]; + a -> { + String id = (String) a[0]; + long taken = (long) a[1]; + long failed = (long) a[2]; + long deleted = (long) a[3]; + long deleteFailed = (long) a[4]; return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed); }); static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED); @@ -310,8 +316,8 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { this.snapshotDeleteFailures.inc(in.readVLong()); } - public static SnapshotPolicyStats parse(XContentParser parser, String policyId) { - return PARSER.apply(parser, policyId); + public static SnapshotPolicyStats parse(XContentParser parser) { + return PARSER.apply(parser, null); } public SnapshotPolicyStats merge(SnapshotPolicyStats other) { @@ -339,6 +345,10 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { snapshotDeleteFailures.inc(); } + public String getPolicyId() { + return policyId; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(policyId); @@ -372,6 +382,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(SnapshotPolicyStats.POLICY_ID.getPreferredName(), policyId); builder.field(SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName(), snapshotsTaken.count()); builder.field(SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName(), snapshotsFailed.count()); builder.field(SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName(), snapshotsDeleted.count()); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java index 21874386f55..2383e072272 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java @@ -41,6 +41,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.slm.history.SnapshotHistoryItem.CREATE_OPERATION; @@ -131,7 +133,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { assertHistoryIsPresent(policyName, true, repoId, CREATE_OPERATION); Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsTaken = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName()); int totalTaken = (int) stats.get(SnapshotLifecycleStats.TOTAL_TAKEN.getPreferredName()); @@ -180,7 +182,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { assertHistoryIsPresent(policyName, false, repoName, CREATE_OPERATION); Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsFailed = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName()); int totalFailed = (int) stats.get(SnapshotLifecycleStats.TOTAL_FAILED.getPreferredName()); @@ -229,7 +231,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { } Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsTaken = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName()); int totalTaken = (int) stats.get(SnapshotLifecycleStats.TOTAL_TAKEN.getPreferredName()); @@ -301,7 +303,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { assertHistoryIsPresent(policyName, true, repoId, DELETE_OPERATION); Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsTaken = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName()); int snapsDeleted = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName()); @@ -482,4 +484,13 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { request.setJsonEntity(Strings.toString(document)); assertOK(client.performRequest(request)); } + + @SuppressWarnings("unchecked") + private static Map policyStatsAsMap(Map stats) { + return ((List>) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName())) + .stream() + .collect(Collectors.toMap( + m -> (String) m.get(SnapshotLifecycleStats.SnapshotPolicyStats.POLICY_ID.getPreferredName()), + Function.identity())); + } } From d0495b8f933195bd16ff621e141449cd20a341f5 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 25 Sep 2019 11:19:39 -0700 Subject: [PATCH 64/94] Track enabled test task candidate class files as task input (#47054) --- .../precommit/TestingConventionsTasks.java | 22 +++++++------------ 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java index 95ad323ceda..2f5028315a4 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java @@ -70,19 +70,13 @@ public class TestingConventionsTasks extends DefaultTask { } @Input - public Map> classFilesPerEnabledTask(FileTree testClassFiles) { - Map> collector = new HashMap<>(); - - // Gradle Test - collector.putAll( - getProject().getTasks().withType(Test.class).stream() - .filter(Task::getEnabled) - .collect(Collectors.toMap( - Task::getPath, - task -> task.getCandidateClassFiles().getFiles() - )) - ); - return Collections.unmodifiableMap(collector); + public Map> getClassFilesPerEnabledTask() { + return getProject().getTasks().withType(Test.class).stream() + .filter(Task::getEnabled) + .collect(Collectors.toMap( + Task::getPath, + task -> task.getCandidateClassFiles().getFiles() + )); } @Input @@ -154,7 +148,7 @@ public class TestingConventionsTasks extends DefaultTask { .collect(Collectors.toList()) ).getAsFileTree(); - final Map> classFilesPerTask = classFilesPerEnabledTask(allTestClassFiles); + final Map> classFilesPerTask = getClassFilesPerEnabledTask(); final Map>> testClassesPerTask = classFilesPerTask.entrySet().stream() .collect( From 287d96d1a11ea3dcb324033f85ed580d40b8f09d Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 25 Sep 2019 13:02:28 -0700 Subject: [PATCH 65/94] [DOCS] Fix links to transform pages (#47134) --- docs/reference/transform/apis/transformresource.asciidoc | 6 ++---- docs/reference/transform/index.asciidoc | 1 + 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/reference/transform/apis/transformresource.asciidoc b/docs/reference/transform/apis/transformresource.asciidoc index 55b2095a6ec..190f827cd8e 100644 --- a/docs/reference/transform/apis/transformresource.asciidoc +++ b/docs/reference/transform/apis/transformresource.asciidoc @@ -5,8 +5,7 @@ {transform-cap} resources relate to the <>. -For more information, see -{stack-ov}/ecommerce-dataframes.html[Transforming your data with {dataframes}]. +For more information, see <>. [discrete] [[transform-properties]] @@ -101,8 +100,7 @@ pivot function `group by` fields and the aggregation to reduce the data. * {ref}/search-aggregations-pipeline-bucket-selector-aggregation.html[Bucket Selector] IMPORTANT: {transforms-cap} support a subset of the functionality in -composite aggregations. See -{stack-ov}/dataframe-limitations.html[{dataframe-cap} limitations]. +composite aggregations. See <>. -- diff --git a/docs/reference/transform/index.asciidoc b/docs/reference/transform/index.asciidoc index 25a7f3dd407..595cbdef56d 100644 --- a/docs/reference/transform/index.asciidoc +++ b/docs/reference/transform/index.asciidoc @@ -13,6 +13,7 @@ your data. * <> * <> * <> +* <> * <> * <> * <> From a267df30fa4d427d84e426679f91735556e67b98 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 25 Sep 2019 14:24:54 -0600 Subject: [PATCH 66/94] Wait for snapshot completion in SLM snapshot invocation (#47051) * Wait for snapshot completion in SLM snapshot invocation This changes the snapshots internally invoked by SLM to wait for completion. This allows us to capture more snapshotting failure scenarios. For example, previously a snapshot would be created and then registered as a "success", however, the snapshot may have been aborted, or it may have had a subset of its shards fail. These cases are now handled by inspecting the response to the `CreateSnapshotRequest` and ensuring that there are no failures. If any failures are present, the history store now stores the action as a failure instead of a success. Relates to #38461 and #43663 --- .../create/CreateSnapshotResponse.java | 2 +- .../core/slm/SnapshotLifecyclePolicy.java | 2 +- .../xpack/ilm/IndexLifecycle.java | 3 +- .../xpack/slm/SnapshotLifecycleTask.java | 30 +++++-- .../xpack/slm/SnapshotRetentionTask.java | 15 ++-- .../xpack/slm/SnapshotLifecycleTaskTests.java | 82 +++++++++++++++++++ 6 files changed, 119 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index a63f2cf8104..b16959e9d18 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -53,7 +53,7 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent CreateSnapshotResponse() {} - CreateSnapshotResponse(@Nullable SnapshotInfo snapshotInfo) { + public CreateSnapshotResponse(@Nullable SnapshotInfo snapshotInfo) { this.snapshotInfo = snapshotInfo; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java index e038d3bb6e3..0ef7912c580 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java @@ -260,7 +260,7 @@ public class SnapshotLifecyclePolicy extends AbstractDiffable mergedConfiguration = new HashMap<>(configuration); mergedConfiguration.put("metadata", metadataWithAddedPolicyName); req.source(mergedConfiguration); - req.waitForCompletion(false); + req.waitForCompletion(true); return req; } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index 2fc2033ab44..ebf7c3b4282 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -158,7 +158,8 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE_SETTING, RolloverAction.LIFECYCLE_ROLLOVER_ALIAS_SETTING, LifecycleSettings.SLM_HISTORY_INDEX_ENABLED_SETTING, - LifecycleSettings.SLM_RETENTION_SCHEDULE_SETTING); + LifecycleSettings.SLM_RETENTION_SCHEDULE_SETTING, + LifecycleSettings.SLM_RETENTION_DURATION_SETTING); } @Override diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java index 4c740f42786..ec3baaf2be5 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java @@ -23,6 +23,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.snapshots.SnapshotException; +import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.slm.SnapshotInvocationRecord; @@ -91,16 +93,32 @@ public class SnapshotLifecycleTask implements SchedulerEngine.Listener { public void onResponse(CreateSnapshotResponse createSnapshotResponse) { logger.debug("snapshot response for [{}]: {}", policyMetadata.getPolicy().getId(), Strings.toString(createSnapshotResponse)); - final long timestamp = Instant.now().toEpochMilli(); - clusterService.submitStateUpdateTask("slm-record-success-" + policyMetadata.getPolicy().getId(), - WriteJobStatus.success(policyMetadata.getPolicy().getId(), request.snapshot(), timestamp)); - historyStore.putAsync(SnapshotHistoryItem.creationSuccessRecord(timestamp, policyMetadata.getPolicy(), - request.snapshot())); + final SnapshotInfo snapInfo = createSnapshotResponse.getSnapshotInfo(); + + // Check that there are no failed shards, since the request may not entirely + // fail, but may still have failures (such as in the case of an aborted snapshot) + if (snapInfo.failedShards() == 0) { + final long timestamp = Instant.now().toEpochMilli(); + clusterService.submitStateUpdateTask("slm-record-success-" + policyMetadata.getPolicy().getId(), + WriteJobStatus.success(policyMetadata.getPolicy().getId(), request.snapshot(), timestamp)); + historyStore.putAsync(SnapshotHistoryItem.creationSuccessRecord(timestamp, policyMetadata.getPolicy(), + request.snapshot())); + } else { + int failures = snapInfo.failedShards(); + int total = snapInfo.totalShards(); + final SnapshotException e = new SnapshotException(request.repository(), request.snapshot(), + "failed to create snapshot successfully, " + failures + " out of " + total + " total shards failed"); + // Add each failed shard's exception as suppressed, the exception contains + // information about which shard failed + snapInfo.shardFailures().forEach(failure -> e.addSuppressed(failure.getCause())); + // Call the failure handler to register this as a failure and persist it + onFailure(e); + } } @Override public void onFailure(Exception e) { - logger.error("failed to issue create snapshot request for snapshot lifecycle policy [{}]: {}", + logger.error("failed to create snapshot for snapshot lifecycle policy [{}]: {}", policyMetadata.getPolicy().getId(), e); final long timestamp = Instant.now().toEpochMilli(); clusterService.submitStateUpdateTask("slm-record-failure-" + policyMetadata.getPolicy().getId(), diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java index 368dbcae678..f09b793a1a6 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java @@ -339,6 +339,7 @@ public class SnapshotRetentionTask implements SchedulerEngine.Listener { List snapshots = entry.getValue(); for (SnapshotInfo info : snapshots) { final String policyId = getPolicyId(info); + final long deleteStartTime = nowNanoSupplier.getAsLong(); deleteSnapshot(policyId, repo, info.snapshotId(), slmStats, ActionListener.wrap(acknowledgedResponse -> { deleted.incrementAndGet(); if (acknowledgedResponse.isAcknowledged()) { @@ -364,13 +365,15 @@ public class SnapshotRetentionTask implements SchedulerEngine.Listener { })); // Check whether we have exceeded the maximum time allowed to spend deleting // snapshots, if we have, short-circuit the rest of the deletions - TimeValue elapsedDeletionTime = TimeValue.timeValueNanos(nowNanoSupplier.getAsLong() - startTime); - logger.debug("elapsed time for deletion of [{}] snapshot: {}", info.snapshotId(), elapsedDeletionTime); - if (elapsedDeletionTime.compareTo(maximumTime) > 0) { + long finishTime = nowNanoSupplier.getAsLong(); + TimeValue deletionTime = TimeValue.timeValueNanos(finishTime - deleteStartTime); + logger.debug("elapsed time for deletion of [{}] snapshot: {}", info.snapshotId(), deletionTime); + TimeValue totalDeletionTime = TimeValue.timeValueNanos(finishTime - startTime); + if (totalDeletionTime.compareTo(maximumTime) > 0) { logger.info("maximum snapshot retention deletion time reached, time spent: [{}]," + " maximum allowed time: [{}], deleted [{}] out of [{}] snapshots scheduled for deletion, failed to delete [{}]", - elapsedDeletionTime, maximumTime, deleted, count, failed); - slmStats.deletionTime(elapsedDeletionTime); + totalDeletionTime, maximumTime, deleted, count, failed); + slmStats.deletionTime(totalDeletionTime); slmStats.retentionTimedOut(); return; } @@ -402,8 +405,8 @@ public class SnapshotRetentionTask implements SchedulerEngine.Listener { } else { logger.warn("[{}] snapshot [{}] delete issued but the request was not acknowledged", repo, snapshot); } - listener.onResponse(acknowledgedResponse); slmStats.snapshotDeleted(slmPolicy); + listener.onResponse(acknowledgedResponse); } @Override diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java index 84c1d12cce6..5474602cdfd 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java @@ -23,6 +23,10 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -47,6 +51,7 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; @@ -196,6 +201,83 @@ public class SnapshotLifecycleTaskTests extends ESTestCase { threadPool.shutdownNow(); } + public void testPartialFailureSnapshot() throws Exception { + final String id = randomAlphaOfLength(4); + final SnapshotLifecyclePolicyMetadata slpm = makePolicyMeta(id); + final SnapshotLifecycleMetadata meta = + new SnapshotLifecycleMetadata(Collections.singletonMap(id, slpm), OperationMode.RUNNING, new SnapshotLifecycleStats()); + + final ClusterState state = ClusterState.builder(new ClusterName("test")) + .metaData(MetaData.builder() + .putCustom(SnapshotLifecycleMetadata.TYPE, meta) + .build()) + .build(); + + final ThreadPool threadPool = new TestThreadPool("test"); + final AtomicBoolean clientCalled = new AtomicBoolean(false); + final SetOnce snapshotName = new SetOnce<>(); + try (ClusterService clusterService = ClusterServiceUtils.createClusterService(state, threadPool); + VerifyingClient client = new VerifyingClient(threadPool, + (action, request, listener) -> { + assertFalse(clientCalled.getAndSet(true)); + assertThat(action, instanceOf(CreateSnapshotAction.class)); + assertThat(request, instanceOf(CreateSnapshotRequest.class)); + + CreateSnapshotRequest req = (CreateSnapshotRequest) request; + + SnapshotLifecyclePolicy policy = slpm.getPolicy(); + assertThat(req.snapshot(), startsWith(policy.getName() + "-")); + assertThat(req.repository(), equalTo(policy.getRepository())); + snapshotName.set(req.snapshot()); + if (req.indices().length > 0) { + assertThat(Arrays.asList(req.indices()), equalTo(policy.getConfig().get("indices"))); + } + boolean globalState = policy.getConfig().get("include_global_state") == null || + Boolean.parseBoolean((String) policy.getConfig().get("include_global_state")); + assertThat(req.includeGlobalState(), equalTo(globalState)); + + return new CreateSnapshotResponse( + new SnapshotInfo( + new SnapshotId(req.snapshot(), "uuid"), + Arrays.asList(req.indices()), + randomNonNegativeLong(), + "snapshot started", + randomNonNegativeLong(), + 3, + Collections.singletonList( + new SnapshotShardFailure("nodeId", new ShardId("index", "uuid", 0), "forced failure")), + req.includeGlobalState(), + req.userMetadata() + )); + })) { + final AtomicBoolean historyStoreCalled = new AtomicBoolean(false); + SnapshotHistoryStore historyStore = new VerifyingHistoryStore(null, ZoneOffset.UTC, + item -> { + assertFalse(historyStoreCalled.getAndSet(true)); + final SnapshotLifecyclePolicy policy = slpm.getPolicy(); + assertEquals(policy.getId(), item.getPolicyId()); + assertEquals(policy.getRepository(), item.getRepository()); + assertEquals(policy.getConfig(), item.getSnapshotConfiguration()); + assertEquals(snapshotName.get(), item.getSnapshotName()); + assertFalse("item should be a failure", item.isSuccess()); + assertThat(item.getErrorDetails(), + containsString("failed to create snapshot successfully, 1 out of 3 total shards failed")); + assertThat(item.getErrorDetails(), + containsString("forced failure")); + }); + + SnapshotLifecycleTask task = new SnapshotLifecycleTask(client, clusterService, historyStore); + // Trigger the event with a matching job name for the policy + task.triggered(new SchedulerEngine.Event(SnapshotLifecycleService.getJobId(slpm), + System.currentTimeMillis(), System.currentTimeMillis())); + + assertTrue("snapshot should be triggered once", clientCalled.get()); + assertTrue("history store should be called once", historyStoreCalled.get()); + } + + threadPool.shutdownNow(); + } + /** * A client that delegates to a verifying function for action/request/listener */ From 27520cac3ba8360d5d40a44bcd0a5b4074f7ef53 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Wed, 25 Sep 2019 21:44:16 +0100 Subject: [PATCH 67/94] ILM: parse origination date from index name (#46755) (#47124) * ILM: parse origination date from index name (#46755) Introduce the `index.lifecycle.parse_origination_date` setting that indicates if the origination date should be parsed from the index name. If set to true an index which doesn't match the expected format (namely `indexName-{dateFormat}-optional_digits` will fail before being created. The origination date will be parsed when initialising a lifecycle for an index and it will be set as the `index.lifecycle.origination_date` for that index. A user set value for `index.lifecycle.origination_date` will always override a possible parsable date from the index name. (cherry picked from commit c363d27f0210733dad0c307d54fa224a92ddb569) Signed-off-by: Andrei Dan * Drop usage of Map.of to be java 8 compliant --- docs/reference/settings/ilm-settings.asciidoc | 8 ++ .../IndexLifecycleOriginationDateParser.java | 50 ++++++++ .../core/ilm/InitializePolicyContextStep.java | 24 +++- .../xpack/core/ilm/LifecycleSettings.java | 3 + ...exLifecycleOriginationDateParserTests.java | 112 ++++++++++++++++++ .../xpack/ilm/IndexLifecycle.java | 10 ++ .../xpack/ilm/IndexLifecycleService.java | 14 ++- .../IndexLifecycleInitialisationTests.java | 86 +++++++++++++- .../xpack/ilm/IndexLifecycleServiceTests.java | 18 +++ 9 files changed, 315 insertions(+), 10 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParser.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParserTests.java diff --git a/docs/reference/settings/ilm-settings.asciidoc b/docs/reference/settings/ilm-settings.asciidoc index 80c20b59b2b..97ae65d1815 100644 --- a/docs/reference/settings/ilm-settings.asciidoc +++ b/docs/reference/settings/ilm-settings.asciidoc @@ -27,6 +27,14 @@ information about rollover, see <>. (<>) How often {ilm} checks for indices that meet policy criteria. Defaults to `10m`. +`index.lifecycle.parse_origination_date`:: +When configured to `true` the origination date will be parsed from the index +name. The index format must match the pattern `^.*-{date_format}-\\d+`, where +the `date_format` is `yyyy.MM.dd` and the trailing digits are optional (an +index that was rolled over would normally match the full format eg. +`logs-2016.10.31-000002`). If the index name doesn't match the pattern +the index creation will fail. + `index.lifecycle.origination_date`:: The timestamp that will be used to calculate the index age for its phase transitions. This allows the users to create an index containing old data and diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParser.java new file mode 100644 index 00000000000..05b362d733e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParser.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ilm; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_ORIGINATION_DATE; +import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE; + +public class IndexLifecycleOriginationDateParser { + + private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("yyyy.MM.dd"); + private static final String INDEX_NAME_REGEX = "^.*-(\\d{4}.\\d{2}.\\d{2})(-[\\d]+)?$"; + private static final Pattern INDEX_NAME_PATTERN = Pattern.compile(INDEX_NAME_REGEX); + + /** + * Determines if the origination date needs to be parsed from the index name. + */ + public static boolean shouldParseIndexName(Settings indexSettings) { + return indexSettings.getAsLong(LIFECYCLE_ORIGINATION_DATE, -1L) == -1L && + indexSettings.getAsBoolean(LIFECYCLE_PARSE_ORIGINATION_DATE, false); + } + + /** + * Parses the index according to the supported format and extracts the origination date. If the index does not match the expected + * format or the date in the index name doesn't match the `yyyy.MM.dd` format it throws an {@link IllegalArgumentException} + */ + public static long parseIndexNameAndExtractDate(String indexName) { + Matcher matcher = INDEX_NAME_PATTERN.matcher(indexName); + if (matcher.matches()) { + String dateAsString = matcher.group(1); + try { + return DATE_FORMATTER.parseMillis(dateAsString); + } catch (ElasticsearchParseException | IllegalArgumentException e) { + throw new IllegalArgumentException("index name [" + indexName + "] contains date [" + dateAsString + "] which " + + "couldn't be parsed using the 'yyyy.MM.dd' format", e); + } + } + + throw new IllegalArgumentException("index name [" + indexName + "] does not match pattern '" + INDEX_NAME_REGEX + "'"); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java index c0ba7ba5411..3e7ad7a6a07 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java @@ -10,8 +10,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.parseIndexNameAndExtractDate; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.shouldParseIndexName; import static org.elasticsearch.xpack.core.ilm.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; /** @@ -34,19 +37,34 @@ public final class InitializePolicyContextStep extends ClusterStateActionStep { // Index must have been since deleted, ignore it return clusterState; } + LifecycleExecutionState lifecycleState = LifecycleExecutionState .fromIndexMetadata(indexMetaData); + if (lifecycleState.getLifecycleDate() != null) { return clusterState; } + IndexMetaData.Builder indexMetadataBuilder = IndexMetaData.builder(indexMetaData); + if (shouldParseIndexName(indexMetaData.getSettings())) { + long parsedOriginationDate = parseIndexNameAndExtractDate(index.getName()); + indexMetadataBuilder.settingsVersion(indexMetaData.getSettingsVersion() + 1) + .settings(Settings.builder() + .put(indexMetaData.getSettings()) + .put(LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, parsedOriginationDate) + .build() + ); + } + ClusterState.Builder newClusterStateBuilder = ClusterState.builder(clusterState); LifecycleExecutionState.Builder newCustomData = LifecycleExecutionState.builder(lifecycleState); newCustomData.setIndexCreationDate(indexMetaData.getCreationDate()); - newClusterStateBuilder.metaData(MetaData.builder(clusterState.getMetaData()).put(IndexMetaData - .builder(indexMetaData) - .putCustom(ILM_CUSTOM_METADATA_KEY, newCustomData.build().asMap()))); + indexMetadataBuilder.putCustom(ILM_CUSTOM_METADATA_KEY, newCustomData.build().asMap()); + + newClusterStateBuilder.metaData( + MetaData.builder(clusterState.getMetaData()).put(indexMetadataBuilder) + ); return newClusterStateBuilder.build(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java index bec63ed654f..5607bfc744e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java @@ -18,6 +18,7 @@ public class LifecycleSettings { public static final String LIFECYCLE_NAME = "index.lifecycle.name"; public static final String LIFECYCLE_INDEXING_COMPLETE = "index.lifecycle.indexing_complete"; public static final String LIFECYCLE_ORIGINATION_DATE = "index.lifecycle.origination_date"; + public static final String LIFECYCLE_PARSE_ORIGINATION_DATE = "index.lifecycle.parse_origination_date"; public static final String SLM_HISTORY_INDEX_ENABLED = "slm.history_index_enabled"; public static final String SLM_RETENTION_SCHEDULE = "slm.retention_schedule"; @@ -32,6 +33,8 @@ public class LifecycleSettings { Setting.Property.Dynamic, Setting.Property.IndexScope); public static final Setting LIFECYCLE_ORIGINATION_DATE_SETTING = Setting.longSetting(LIFECYCLE_ORIGINATION_DATE, -1, -1, Setting.Property.Dynamic, Setting.Property.IndexScope); + public static final Setting LIFECYCLE_PARSE_ORIGINATION_DATE_SETTING = Setting.boolSetting(LIFECYCLE_PARSE_ORIGINATION_DATE, + false, Setting.Property.Dynamic, Setting.Property.IndexScope); public static final Setting SLM_HISTORY_INDEX_ENABLED_SETTING = Setting.boolSetting(SLM_HISTORY_INDEX_ENABLED, true, Setting.Property.NodeScope); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParserTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParserTests.java new file mode 100644 index 00000000000..b13d487f9a5 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParserTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ilm; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Locale; +import java.util.TimeZone; + +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.parseIndexNameAndExtractDate; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.shouldParseIndexName; +import static org.hamcrest.Matchers.is; + +public class IndexLifecycleOriginationDateParserTests extends ESTestCase { + + public void testShouldParseIndexNameReturnsFalseWhenOriginationDateIsSet() { + Settings settings = Settings.builder() + .put(LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, 1L) + .build(); + assertThat(shouldParseIndexName(settings), is(false)); + } + + public void testShouldParseIndexNameReturnsFalseIfParseOriginationDateIsDisabled() { + Settings settings = Settings.builder() + .put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, false) + .build(); + assertThat(shouldParseIndexName(settings), is(false)); + } + + public void testShouldParseIndexNameReturnsTrueIfParseOriginationDateIsTrueAndOriginationDateIsNotSet() { + Settings settings = Settings.builder() + .put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true) + .build(); + assertThat(shouldParseIndexName(settings), is(true)); + } + + public void testParseIndexNameThatMatchesExpectedFormat() throws ParseException { + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd", Locale.getDefault()); + dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + long expectedDate = dateFormat.parse("2019.09.04").getTime(); + + { + long parsedDate = parseIndexNameAndExtractDate("indexName-2019.09.04"); + assertThat("indexName-yyyy.MM.dd is a valid index format", parsedDate, is(expectedDate)); + } + + { + long parsedDate = parseIndexNameAndExtractDate("indexName-2019.09.04-0000001"); + assertThat("indexName-yyyy.MM.dd-\\d+$ is a valid index format", parsedDate, is(expectedDate)); + } + + { + long parsedDate = parseIndexNameAndExtractDate("indexName-2019.09.04-2019.09.24"); + long secondDateInIndexName = dateFormat.parse("2019.09.24").getTime(); + assertThat("indexName-yyyy.MM.dd-yyyy.MM.dd is a valid index format and the second date should be parsed", + parsedDate, is(secondDateInIndexName)); + } + + { + long parsedDate = parseIndexNameAndExtractDate("index-2019.09.04-2019.09.24-00002"); + long secondDateInIndexName = dateFormat.parse("2019.09.24").getTime(); + assertThat("indexName-yyyy.MM.dd-yyyy.MM.dd-digits is a valid index format and the second date should be parsed", + parsedDate, is(secondDateInIndexName)); + } + } + + public void testParseIndexNameThrowsIllegalArgumentExceptionForInvalidIndexFormat() { + expectThrows( + IllegalArgumentException.class, + "plainIndexName does not match the expected pattern", + () -> parseIndexNameAndExtractDate("plainIndexName") + ); + + expectThrows( + IllegalArgumentException.class, + "indexName--00001 does not match the expected pattern as the origination date is missing", + () -> parseIndexNameAndExtractDate("indexName--00001") + ); + + expectThrows( + IllegalArgumentException.class, + "indexName-00001 does not match the expected pattern as the origination date is missing", + () -> parseIndexNameAndExtractDate("indexName-00001") + ); + + expectThrows( + IllegalArgumentException.class, + "indexName_2019.09.04_00001 does not match the expected pattern as _ is not the expected delimiter", + () -> parseIndexNameAndExtractDate("indexName_2019.09.04_00001") + ); + } + + public void testParseIndexNameThrowsIllegalArgumentExceptionForInvalidDateFormat() { + expectThrows( + IllegalArgumentException.class, + "indexName-2019.04-00001 does not match the expected pattern as the date does not conform with the yyyy.MM.dd pattern", + () -> parseIndexNameAndExtractDate("indexName-2019.04-00001") + ); + + expectThrows( + IllegalArgumentException.class, + "java.lang.IllegalArgumentException: failed to parse date field [2019.09.44] with format [yyyy.MM.dd]", + () -> parseIndexNameAndExtractDate("index-2019.09.44") + ); + } +} diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index ebf7c3b4282..a236074e83d 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; @@ -155,6 +156,7 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { LifecycleSettings.LIFECYCLE_POLL_INTERVAL_SETTING, LifecycleSettings.LIFECYCLE_NAME_SETTING, LifecycleSettings.LIFECYCLE_ORIGINATION_DATE_SETTING, + LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE_SETTING, LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE_SETTING, RolloverAction.LIFECYCLE_ROLLOVER_ALIAS_SETTING, LifecycleSettings.SLM_HISTORY_INDEX_ENABLED_SETTING, @@ -280,6 +282,14 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { return actions; } + @Override + public void onIndexModule(IndexModule indexModule) { + if (ilmEnabled) { + assert indexLifecycleInitialisationService.get() != null; + indexModule.addIndexEventListener(indexLifecycleInitialisationService.get()); + } + } + @Override public void close() { try { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java index 22f8bb746e8..1cf860bf5c7 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java @@ -22,6 +22,8 @@ import org.elasticsearch.common.component.Lifecycle.State; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; @@ -39,11 +41,14 @@ import java.util.Collections; import java.util.Set; import java.util.function.LongSupplier; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.parseIndexNameAndExtractDate; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.shouldParseIndexName; + /** * A service which runs the {@link LifecyclePolicy}s associated with indexes. */ public class IndexLifecycleService - implements ClusterStateListener, ClusterStateApplier, SchedulerEngine.Listener, Closeable, LocalNodeMasterListener { + implements ClusterStateListener, ClusterStateApplier, SchedulerEngine.Listener, Closeable, LocalNodeMasterListener, IndexEventListener { private static final Logger logger = LogManager.getLogger(IndexLifecycleService.class); private static final Set IGNORE_STEPS_MAINTENANCE_REQUESTED = Collections.singleton(ShrinkStep.NAME); private volatile boolean isMaster = false; @@ -148,6 +153,13 @@ public class IndexLifecycleService return ThreadPool.Names.MANAGEMENT; } + @Override + public void beforeIndexAddedToCluster(Index index, Settings indexSettings) { + if (shouldParseIndexName(indexSettings)) { + parseIndexNameAndExtractDate(index.getName()); + } + } + private void updatePollInterval(TimeValue newInterval) { this.pollInterval = newInterval; maybeScheduleJob(); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java index d568f643f27..694df95bee5 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java @@ -51,13 +51,17 @@ import org.elasticsearch.xpack.core.ilm.action.StopILMAction; import org.junit.Before; import java.io.IOException; +import java.text.SimpleDateFormat; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.TimeZone; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; @@ -72,6 +76,7 @@ import static org.elasticsearch.xpack.core.ilm.LifecyclePolicyTestsUtils.newLock import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.CombinableMatcher.both; @@ -229,7 +234,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { PhaseExecutionInfo expectedExecutionInfo = new PhaseExecutionInfo(lifecyclePolicy.getName(), mockPhase, 1L, actualModifiedDate); assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat(indexResponse.getStep(), equalTo("observable_cluster_state_action")); assertThat(indexResponse.getPhaseExecutionInfo(), equalTo(expectedExecutionInfo)); originalLifecycleDate.set(indexResponse.getLifecycleDate()); @@ -242,7 +247,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat("The configured origination date dictates the lifecycle date", indexResponse.getLifecycleDate(), equalTo(1000L)); }); @@ -254,7 +259,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat("Without the origination date, the index create date should dictate the lifecycle date", indexResponse.getLifecycleDate(), equalTo(originalLifecycleDate.get())); }); @@ -267,7 +272,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { PhaseExecutionInfo expectedExecutionInfo = new PhaseExecutionInfo(lifecyclePolicy.getName(), null, 1L, actualModifiedDate); assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat(indexResponse.getPhase(), equalTo(TerminalPolicyStep.COMPLETED_PHASE)); assertThat(indexResponse.getStep(), equalTo(TerminalPolicyStep.KEY.getName())); assertThat(indexResponse.getPhaseExecutionInfo(), equalTo(expectedExecutionInfo)); @@ -275,11 +280,80 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { } } - private IndexLifecycleExplainResponse executeExplainRequestAndGetTestIndexResponse() throws ExecutionException, InterruptedException { + public void testExplainParseOriginationDate() throws Exception { + // start node + logger.info("Starting server1"); + internalCluster().startNode(); + logger.info("Starting server2"); + internalCluster().startNode(); + logger.info("Creating lifecycle [test_lifecycle]"); + PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); + PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); + assertAcked(putLifecycleResponse); + + GetLifecycleAction.Response getLifecycleResponse = client().execute(GetLifecycleAction.INSTANCE, + new GetLifecycleAction.Request()).get(); + assertThat(getLifecycleResponse.getPolicies().size(), equalTo(1)); + GetLifecycleAction.LifecyclePolicyResponseItem responseItem = getLifecycleResponse.getPolicies().get(0); + assertThat(responseItem.getLifecyclePolicy(), equalTo(lifecyclePolicy)); + assertThat(responseItem.getVersion(), equalTo(1L)); + long actualModifiedDate = Instant.parse(responseItem.getModifiedDate()).toEpochMilli(); + + String indexName = "test-2019.09.14"; + logger.info("Creating index [{}]", indexName); + CreateIndexResponse createIndexResponse = + client().admin().indices().create(createIndexRequest(indexName) + .settings(Settings.builder().put(settings).put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true)) + ).actionGet(); + assertAcked(createIndexResponse); + + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd", Locale.getDefault()); + dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + long parsedLifecycleDate = dateFormat.parse("2019.09.14").getTime(); + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(parsedLifecycleDate)); + }); + + // disabling the lifecycle parsing would maintain the parsed value as that was set as the origination date + client().admin().indices().prepareUpdateSettings(indexName) + .setSettings(Collections.singletonMap(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, false)).get(); + + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(parsedLifecycleDate)); + }); + + // setting the lifecycle origination date setting to null should make the lifecyle date fallback on the index creation date + client().admin().indices().prepareUpdateSettings(indexName) + .setSettings(Collections.singletonMap(LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, null)).get(); + + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(greaterThan(parsedLifecycleDate))); + }); + + // setting the lifecycle origination date to an explicit value overrides the date parsing + long originationDate = 42L; + Map settings = new HashMap<>(); + settings.put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true); + settings.put(LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, originationDate); + client().admin().indices().prepareUpdateSettings(indexName) + .setSettings(settings) + .get(); + + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(originationDate)); + }); + } + + private IndexLifecycleExplainResponse executeExplainRequestAndGetTestIndexResponse(String indexName) throws ExecutionException, + InterruptedException { ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); assertThat(explainResponse.getIndexResponses().size(), equalTo(1)); - return explainResponse.getIndexResponses().get("test"); + return explainResponse.getIndexResponses().get(indexName); } public void testMasterDedicatedDataDedicated() throws Exception { diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index 6776f13ea17..87151627b02 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -50,6 +50,7 @@ import java.time.ZoneId; import java.util.Collections; import java.util.SortedMap; import java.util.TreeMap; +import java.util.UUID; import java.util.concurrent.ExecutorService; import static org.elasticsearch.node.Node.NODE_MASTER_SETTING; @@ -305,4 +306,21 @@ public class IndexLifecycleServiceTests extends ESTestCase { indexLifecycleService.triggered(schedulerEvent); Mockito.verifyZeroInteractions(indicesClient, clusterService); } + + public void testParsingOriginationDateBeforeIndexCreation() { + Settings indexSettings = Settings.builder().put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true).build(); + Index index = new Index("invalid_index_name", UUID.randomUUID().toString()); + expectThrows(IllegalArgumentException.class, + "The parse origination date setting was configured for index " + index.getName() + + " but the index name did not match the expected format", + () -> indexLifecycleService.beforeIndexAddedToCluster(index, indexSettings) + ); + + // disabling the parsing origination date setting should prevent the validation from throwing exception + try { + indexLifecycleService.beforeIndexAddedToCluster(index, Settings.EMPTY); + } catch (Exception e) { + fail("Did not expect the before index validation to throw an exception as the parse origination date setting was not set"); + } + } } From 7c5a088aa58f25bf56e986c6a930611852c8ac3c Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 25 Sep 2019 16:52:19 -0400 Subject: [PATCH 68/94] Increase ensureGreen timeout for testReplicaCorruption (#47136) We can have a large number of shard copies in this test. For example, the two recent failures have 24 and 27 copies respectively and all replicas have to copy segment files as their stores are corrupted. Our CI needs more than 30 seconds to start all these copies. Note that in two recent failures, the cluster was green just after the cluster health timed out. Closes #41899 --- .../java/org/elasticsearch/index/store/CorruptedFileIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 3ca29b6b375..a9ac976d45a 100644 --- a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -53,6 +53,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -603,7 +604,7 @@ public class CorruptedFileIT extends ESIntegTestCase { Settings.builder().putNull(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey()) )); - ensureGreen(); + ensureGreen(TimeValue.timeValueSeconds(60)); } private int numShards(String... index) { From 4f47e1f1695afedcef6f5ade99c3fde717a86c20 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 25 Sep 2019 15:58:18 -0600 Subject: [PATCH 69/94] Extract proxy connection logic to specialized class (#47138) Currently the logic to check if a connection to a remote discovery node exists and otherwise create a proxy connection is mixed with the collect nodes, cluster connection lifecycle, and other RemoteClusterConnection logic. This commit introduces a specialized RemoteConnectionManager class which handles the open connections. Additionally, it reworks the "round-robin" proxy logic to create the list of potential connections at connection open/close time, opposed to each time a connection is requested. --- .../coordination/FollowersChecker.java | 3 +- .../cluster/coordination/LeaderChecker.java | 3 +- .../discovery/zen/FaultDetection.java | 3 +- .../transport/ConnectionManager.java | 21 +-- .../transport/RemoteClusterConnection.java | 105 ++---------- .../transport/RemoteConnectionManager.java | 155 ++++++++++++++++++ .../TransportConnectionListener.java | 4 +- .../search/TransportSearchActionTests.java | 4 +- .../TransportClientNodesServiceTests.java | 2 +- .../discovery/PeerFinderTests.java | 9 +- .../discovery/ZenFaultDetectionTests.java | 4 +- .../zen/PublishClusterStateActionTests.java | 5 +- .../transport/ConnectionManagerTests.java | 13 +- .../transport/RemoteClusterClientTests.java | 2 +- .../RemoteClusterConnectionTests.java | 29 ++-- .../RemoteConnectionManagerTests.java | 117 +++++++++++++ .../test/transport/MockTransport.java | 2 +- .../test/transport/MockTransportService.java | 2 +- .../transport/StubbableConnectionManager.java | 15 +- .../AbstractSimpleTransportTestCase.java | 12 +- 20 files changed, 351 insertions(+), 159 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java create mode 100644 server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java index 5f166c7d3dc..22647089c01 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.discovery.zen.NodesFaultDetection; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportException; @@ -120,7 +121,7 @@ public class FollowersChecker { channel.sendResponse(new NodesFaultDetection.PingResponse())); transportService.addConnectionListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { handleDisconnectedNode(node); } }); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java index d1b58320fe9..62a90317176 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java @@ -37,6 +37,7 @@ import org.elasticsearch.discovery.zen.MasterFaultDetection; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.NodeDisconnectedException; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -118,7 +119,7 @@ public class LeaderChecker { transportService.addConnectionListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { handleDisconnectedNode(node); } }); diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java index 424dc6d31b0..cab2f0def68 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportService; @@ -97,7 +98,7 @@ public abstract class FaultDetection implements Closeable { private class FDConnectionListener implements TransportConnectionListener { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { AbstractRunnable runnable = new AbstractRunnable() { @Override public void onFailure(Exception e) { diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java index 03a9c0f40f3..c11afa088aa 100644 --- a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java @@ -31,10 +31,8 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.internal.io.IOUtils; import java.io.Closeable; -import java.util.Collections; import java.util.Iterator; import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -150,13 +148,13 @@ public class ConnectionManager implements Closeable { } else { logger.debug("connected to node [{}]", node); try { - connectionListener.onNodeConnected(node); + connectionListener.onNodeConnected(node, conn); } finally { final Transport.Connection finalConnection = conn; conn.addCloseListener(ActionListener.wrap(() -> { logger.trace("unregistering {} after connection close and marking as disconnected", node); connectedNodes.remove(node, finalConnection); - connectionListener.onNodeDisconnected(node); + connectionListener.onNodeDisconnected(node, conn); })); } } @@ -218,13 +216,6 @@ public class ConnectionManager implements Closeable { return connectedNodes.size(); } - /** - * Returns the set of nodes this manager is connected to. - */ - public Set connectedNodes() { - return Collections.unmodifiableSet(connectedNodes.keySet()); - } - @Override public void close() { internalClose(true); @@ -283,16 +274,16 @@ public class ConnectionManager implements Closeable { private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); @Override - public void onNodeDisconnected(DiscoveryNode key) { + public void onNodeDisconnected(DiscoveryNode key, Transport.Connection connection) { for (TransportConnectionListener listener : listeners) { - listener.onNodeDisconnected(key); + listener.onNodeDisconnected(key, connection); } } @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { for (TransportConnectionListener listener : listeners) { - listener.onNodeConnected(node); + listener.onNodeConnected(node, connection); } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index f8fdb342b67..0281565a7d2 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -23,7 +23,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.StepListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -53,7 +52,6 @@ import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; @@ -78,7 +76,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos private static final Logger logger = LogManager.getLogger(RemoteClusterConnection.class); private final TransportService transportService; - private final ConnectionManager connectionManager; + private final RemoteConnectionManager remoteConnectionManager; private final String clusterAlias; private final int maxNumRemoteConnections; private final Predicate nodePredicate; @@ -116,7 +114,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos this.maxNumRemoteConnections = maxNumRemoteConnections; this.nodePredicate = nodePredicate; this.clusterAlias = clusterAlias; - this.connectionManager = connectionManager; + this.remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); this.seedNodes = Collections.unmodifiableList(seedNodes); this.skipUnavailable = RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE .getConcreteSettingForNamespace(clusterAlias).get(settings); @@ -168,8 +166,8 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } @Override - public void onNodeDisconnected(DiscoveryNode node) { - if (connectionManager.size() < maxNumRemoteConnections) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { + if (remoteConnectionManager.size() < maxNumRemoteConnections) { // try to reconnect and fill up the slot of the disconnected node connectHandler.connect(ActionListener.wrap( ignore -> logger.trace("successfully connected after disconnect of {}", node), @@ -182,7 +180,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * will invoke the listener immediately. */ void ensureConnected(ActionListener voidActionListener) { - if (connectionManager.size() == 0) { + if (remoteConnectionManager.size() == 0) { connectHandler.connect(voidActionListener); } else { voidActionListener.onResponse(null); @@ -211,8 +209,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos request.clear(); request.nodes(true); request.local(true); // run this on the node that gets the request it's as good as any other - final DiscoveryNode node = getAnyConnectedNode(); - Transport.Connection connection = connectionManager.getConnection(node); + Transport.Connection connection = remoteConnectionManager.getAnyRemoteConnection(); transportService.sendRequest(connection, ClusterStateAction.NAME, request, TransportRequestOptions.EMPTY, new TransportResponseHandler() { @@ -256,12 +253,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * If such node is not connected, the returned connection will be a proxy connection that redirects to it. */ Transport.Connection getConnection(DiscoveryNode remoteClusterNode) { - if (connectionManager.nodeConnected(remoteClusterNode)) { - return connectionManager.getConnection(remoteClusterNode); - } - DiscoveryNode discoveryNode = getAnyConnectedNode(); - Transport.Connection connection = connectionManager.getConnection(discoveryNode); - return new ProxyConnection(connection, remoteClusterNode); + return remoteConnectionManager.getRemoteConnection(remoteClusterNode); } private Predicate getRemoteClusterNamePredicate() { @@ -280,67 +272,19 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos }; } - - static final class ProxyConnection implements Transport.Connection { - private final Transport.Connection proxyConnection; - private final DiscoveryNode targetNode; - - private ProxyConnection(Transport.Connection proxyConnection, DiscoveryNode targetNode) { - this.proxyConnection = proxyConnection; - this.targetNode = targetNode; - } - - @Override - public DiscoveryNode getNode() { - return targetNode; - } - - @Override - public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) - throws IOException, TransportException { - proxyConnection.sendRequest(requestId, TransportActionProxy.getProxyAction(action), - TransportActionProxy.wrapRequest(targetNode, request), options); - } - - @Override - public void close() { - assert false: "proxy connections must not be closed"; - } - - @Override - public void addCloseListener(ActionListener listener) { - proxyConnection.addCloseListener(listener); - } - - @Override - public boolean isClosed() { - return proxyConnection.isClosed(); - } - - @Override - public Version getVersion() { - return proxyConnection.getVersion(); - } - } - Transport.Connection getConnection() { - return connectionManager.getConnection(getAnyConnectedNode()); + return remoteConnectionManager.getAnyRemoteConnection(); } @Override public void close() throws IOException { - IOUtils.close(connectHandler); - connectionManager.closeNoBlock(); + IOUtils.close(connectHandler, remoteConnectionManager); } public boolean isClosed() { return connectHandler.isClosed(); } - public String getProxyAddress() { - return proxyAddress; - } - public List>> getSeedNodes() { return seedNodes; } @@ -456,14 +400,14 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos final ConnectionProfile profile = ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG); final StepListener openConnectionStep = new StepListener<>(); try { - connectionManager.openConnection(seedNode, profile, openConnectionStep); + remoteConnectionManager.openConnection(seedNode, profile, openConnectionStep); } catch (Exception e) { onFailure.accept(e); } final StepListener handShakeStep = new StepListener<>(); openConnectionStep.whenComplete(connection -> { - ConnectionProfile connectionProfile = connectionManager.getConnectionProfile(); + ConnectionProfile connectionProfile = remoteConnectionManager.getConnectionManager().getConnectionProfile(); transportService.handshake(connection, connectionProfile.getHandshakeTimeout().millis(), getRemoteClusterNamePredicate(), handShakeStep); }, onFailure); @@ -472,8 +416,8 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos handShakeStep.whenComplete(handshakeResponse -> { final DiscoveryNode handshakeNode = maybeAddProxyAddress(proxyAddress, handshakeResponse.getDiscoveryNode()); - if (nodePredicate.test(handshakeNode) && connectionManager.size() < maxNumRemoteConnections) { - connectionManager.connectToNode(handshakeNode, null, + if (nodePredicate.test(handshakeNode) && remoteConnectionManager.size() < maxNumRemoteConnections) { + remoteConnectionManager.connectToNode(handshakeNode, null, transportService.connectionValidator(handshakeNode), fullConnectionStep); } else { fullConnectionStep.onResponse(null); @@ -565,8 +509,8 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos private void handleNodes(Iterator nodesIter) { while (nodesIter.hasNext()) { final DiscoveryNode node = maybeAddProxyAddress(proxyAddress, nodesIter.next()); - if (nodePredicate.test(node) && connectionManager.size() < maxNumRemoteConnections) { - connectionManager.connectToNode(node, null, + if (nodePredicate.test(node) && remoteConnectionManager.size() < maxNumRemoteConnections) { + remoteConnectionManager.connectToNode(node, null, transportService.connectionValidator(node), new ActionListener() { @Override public void onResponse(Void aVoid) { @@ -625,20 +569,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } boolean isNodeConnected(final DiscoveryNode node) { - return connectionManager.nodeConnected(node); - } - - private final AtomicLong nextNodeId = new AtomicLong(); - - DiscoveryNode getAnyConnectedNode() { - List nodes = new ArrayList<>(connectionManager.connectedNodes()); - if (nodes.isEmpty()) { - throw new NoSuchRemoteClusterException(clusterAlias); - } else { - long curr; - while ((curr = nextNodeId.incrementAndGet()) == Long.MIN_VALUE); - return nodes.get(Math.toIntExact(Math.floorMod(curr, nodes.size()))); - } + return remoteConnectionManager.getConnectionManager().nodeConnected(node); } /** @@ -655,7 +586,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } int getNumNodesConnected() { - return connectionManager.size(); + return remoteConnectionManager.size(); } private static ConnectionManager createConnectionManager(ConnectionProfile connectionProfile, TransportService transportService) { @@ -663,6 +594,6 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } ConnectionManager getConnectionManager() { - return connectionManager; + return remoteConnectionManager.getConnectionManager(); } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java new file mode 100644 index 00000000000..90340d841ba --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java @@ -0,0 +1,155 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.node.DiscoveryNode; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +public class RemoteConnectionManager implements Closeable { + + private final String clusterAlias; + private final ConnectionManager connectionManager; + private final AtomicLong counter = new AtomicLong(); + private volatile List connections = Collections.emptyList(); + + RemoteConnectionManager(String clusterAlias, ConnectionManager connectionManager) { + this.clusterAlias = clusterAlias; + this.connectionManager = connectionManager; + this.connectionManager.addListener(new TransportConnectionListener() { + @Override + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { + addConnection(connection); + } + + @Override + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { + removeConnection(connection); + } + }); + } + + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile, + ConnectionManager.ConnectionValidator connectionValidator, + ActionListener listener) throws ConnectTransportException { + connectionManager.connectToNode(node, connectionProfile, connectionValidator, listener); + } + + public void openConnection(DiscoveryNode node, ConnectionProfile profile, ActionListener listener) { + connectionManager.openConnection(node, profile, listener); + } + + public Transport.Connection getRemoteConnection(DiscoveryNode node) { + try { + return connectionManager.getConnection(node); + } catch (NodeNotConnectedException e) { + return new ProxyConnection(getAnyRemoteConnection(), node); + } + } + + public Transport.Connection getAnyRemoteConnection() { + List localConnections = this.connections; + if (localConnections.isEmpty()) { + throw new NoSuchRemoteClusterException(clusterAlias); + } else { + long curr; + while ((curr = counter.incrementAndGet()) == Long.MIN_VALUE); + return localConnections.get(Math.toIntExact(Math.floorMod(curr, (long) localConnections.size()))); + } + } + + public ConnectionManager getConnectionManager() { + return connectionManager; + } + + public int size() { + return connectionManager.size(); + } + + public void close() { + connectionManager.closeNoBlock(); + } + + private synchronized void addConnection(Transport.Connection addedConnection) { + ArrayList newConnections = new ArrayList<>(this.connections); + newConnections.add(addedConnection); + this.connections = Collections.unmodifiableList(newConnections); + } + + private synchronized void removeConnection(Transport.Connection removedConnection) { + int newSize = this.connections.size() - 1; + ArrayList newConnections = new ArrayList<>(newSize); + for (Transport.Connection connection : this.connections) { + if (connection.equals(removedConnection) == false) { + newConnections.add(connection); + } + } + assert newConnections.size() == newSize : "Expected connection count: " + newSize + ", Found: " + newConnections.size(); + this.connections = Collections.unmodifiableList(newConnections); + } + + static final class ProxyConnection implements Transport.Connection { + private final Transport.Connection connection; + private final DiscoveryNode targetNode; + + private ProxyConnection(Transport.Connection connection, DiscoveryNode targetNode) { + this.connection = connection; + this.targetNode = targetNode; + } + + @Override + public DiscoveryNode getNode() { + return targetNode; + } + + @Override + public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) + throws IOException, TransportException { + connection.sendRequest(requestId, TransportActionProxy.getProxyAction(action), + TransportActionProxy.wrapRequest(targetNode, request), options); + } + + @Override + public void close() { + assert false: "proxy connections must not be closed"; + } + + @Override + public void addCloseListener(ActionListener listener) { + connection.addCloseListener(listener); + } + + @Override + public boolean isClosed() { + return connection.isClosed(); + } + + @Override + public Version getVersion() { + return connection.getVersion(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java b/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java index c41a328637c..f07f6b0417a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java @@ -43,10 +43,10 @@ public interface TransportConnectionListener { /** * Called once a node connection is opened and registered. */ - default void onNodeConnected(DiscoveryNode node) {} + default void onNodeConnected(DiscoveryNode node, Transport.Connection connection) {} /** * Called once a node connection is closed and unregistered. */ - default void onNodeDisconnected(DiscoveryNode node) {} + default void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) {} } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 60078486335..4054cc0355b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -492,7 +492,7 @@ public class TransportSearchActionTests extends ESTestCase { CountDownLatch disconnectedLatch = new CountDownLatch(numDisconnectedClusters); RemoteClusterServiceTests.addConnectionListener(remoteClusterService, new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (disconnectedNodes.remove(node)) { disconnectedLatch.countDown(); } @@ -662,7 +662,7 @@ public class TransportSearchActionTests extends ESTestCase { CountDownLatch disconnectedLatch = new CountDownLatch(numDisconnectedClusters); RemoteClusterServiceTests.addConnectionListener(remoteClusterService, new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (disconnectedNodes.remove(node)) { disconnectedLatch.countDown(); } diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index fd3dfe9e25b..0e4ae650dc4 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -166,7 +166,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { assert addr == null : "boundAddress: " + addr; return DiscoveryNode.createLocal(settings, buildNewFakeTransportAddress(), UUIDs.randomBase64UUID()); }, null, Collections.emptySet()); - transportService.addNodeConnectedBehavior(cm -> Collections.emptySet()); + transportService.addNodeConnectedBehavior((cm, dn) -> false); transportService.addGetConnectionBehavior((connectionManager, discoveryNode) -> { // The FailAndRetryTransport does not use the connection profile PlainActionFuture future = PlainActionFuture.newFuture(); diff --git a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java index b4ead893846..33397ae5802 100644 --- a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes.Builder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.PeerFinder.TransportAddressConnector; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; @@ -215,9 +214,11 @@ public class PeerFinderTests extends ESTestCase { = new ConnectionManager(settings, capturingTransport); StubbableConnectionManager connectionManager = new StubbableConnectionManager(innerConnectionManager, settings, capturingTransport); - connectionManager.setDefaultNodeConnectedBehavior(cm -> { - assertTrue(Sets.haveEmptyIntersection(connectedNodes, disconnectedNodes)); - return connectedNodes; + connectionManager.setDefaultNodeConnectedBehavior((cm, discoveryNode) -> { + final boolean isConnected = connectedNodes.contains(discoveryNode); + final boolean isDisconnected = disconnectedNodes.contains(discoveryNode); + assert isConnected != isDisconnected : discoveryNode + ": isConnected=" + isConnected + ", isDisconnected=" + isDisconnected; + return isConnected; }); connectionManager.setDefaultGetConnectionBehavior((cm, discoveryNode) -> capturingTransport.createConnection(discoveryNode)); transportService = new TransportService(settings, capturingTransport, deterministicTaskQueue.getThreadPool(), diff --git a/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index 6b4eb4e2577..610cf4c0221 100644 --- a/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -102,12 +102,12 @@ public class ZenFaultDetectionTests extends ESTestCase { final CountDownLatch latch = new CountDownLatch(2); TransportConnectionListener waitForConnection = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { fail("disconnect should not be called " + node); } }; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java index 3b276f23701..0d23cd1d43b 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportResponse; @@ -193,12 +194,12 @@ public class PublishClusterStateActionTests extends ESTestCase { final CountDownLatch latch = new CountDownLatch(nodes.size() * 2); TransportConnectionListener waitForConnection = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { fail("disconnect should not be called " + node); } }; diff --git a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java index c31e6d3e7d9..f34d39ae725 100644 --- a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java @@ -77,12 +77,12 @@ public class ConnectionManagerTests extends ESTestCase { AtomicInteger nodeDisconnectedCount = new AtomicInteger(); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { nodeConnectedCount.incrementAndGet(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { nodeDisconnectedCount.incrementAndGet(); } }); @@ -204,12 +204,12 @@ public class ConnectionManagerTests extends ESTestCase { AtomicInteger nodeDisconnectedCount = new AtomicInteger(); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { nodeConnectedCount.incrementAndGet(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { nodeDisconnectedCount.incrementAndGet(); } }); @@ -244,12 +244,12 @@ public class ConnectionManagerTests extends ESTestCase { AtomicInteger nodeDisconnectedCount = new AtomicInteger(); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { nodeConnectedCount.incrementAndGet(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { nodeDisconnectedCount.incrementAndGet(); } }); @@ -293,7 +293,6 @@ public class ConnectionManagerTests extends ESTestCase { @Override public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) throws TransportException { - } } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index 3d347512160..2d0097b43a7 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -87,7 +87,7 @@ public class RemoteClusterClientTests extends ESTestCase { service.getRemoteClusterService().getConnections().forEach(con -> { con.getConnectionManager().addListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (remoteNode.equals(node)) { semaphore.release(); } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 2488551f7d6..5cb00cbdc9d 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -101,6 +101,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.startsWith; +import static org.mockito.Mockito.mock; public class RemoteClusterConnectionTests extends ESTestCase { @@ -362,14 +363,14 @@ public class RemoteClusterConnectionTests extends ESTestCase { CountDownLatch latchConnected = new CountDownLatch(1); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (node.equals(discoverableNode)) { latchDisconnect.countDown(); } } @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { if (node.equals(spareNode)) { latchConnected.countDown(); } @@ -498,7 +499,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { ConnectionManager delegate = new ConnectionManager(Settings.EMPTY, service.transport); StubbableConnectionManager connectionManager = new StubbableConnectionManager(delegate, Settings.EMPTY, service.transport); - connectionManager.addConnectBehavior(seedNode.getAddress(), (cm, discoveryNode) -> { + connectionManager.addGetConnectionBehavior(seedNode.getAddress(), (cm, discoveryNode) -> { if (discoveryNode == seedNode) { return seedConnection; } @@ -1003,8 +1004,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { barrier.await(); for (int j = 0; j < numGetCalls; j++) { try { - DiscoveryNode node = connection.getAnyConnectedNode(); - assertNotNull(node); + Transport.Connection lowLevelConnection = connection.getConnection(); + assertNotNull(lowLevelConnection); } catch (NoSuchRemoteClusterException e) { // ignore, this is an expected exception } @@ -1034,7 +1035,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { ActionListener.map(fut, x -> null))); } else { DiscoveryNode node = randomFrom(discoverableNodes).v2().get(); - connection.onNodeDisconnected(node); + connection.onNodeDisconnected(node, mock(Transport.Connection.class)); } } } catch (Exception ex) { @@ -1142,14 +1143,14 @@ public class RemoteClusterConnectionTests extends ESTestCase { ConnectionManager delegate = new ConnectionManager(Settings.EMPTY, service.transport); StubbableConnectionManager connectionManager = new StubbableConnectionManager(delegate, Settings.EMPTY, service.transport); - connectionManager.setDefaultNodeConnectedBehavior(cm -> Collections.singleton(connectedNode)); + connectionManager.setDefaultNodeConnectedBehavior((cm, node) -> connectedNode.equals(node)); - connectionManager.addConnectBehavior(connectedNode.getAddress(), (cm, discoveryNode) -> { - if (discoveryNode == connectedNode) { - return seedConnection; - } - return cm.getConnection(discoveryNode); + connectionManager.addGetConnectionBehavior(connectedNode.getAddress(), (cm, discoveryNode) -> seedConnection); + + connectionManager.addGetConnectionBehavior(disconnectedNode.getAddress(), (cm, discoveryNode) -> { + throw new NodeNotConnectedException(discoveryNode, ""); }); + service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", @@ -1163,13 +1164,13 @@ public class RemoteClusterConnectionTests extends ESTestCase { for (int i = 0; i < 10; i++) { // we don't use the transport service connection manager so we will get a proxy connection for the local node Transport.Connection remoteConnection = connection.getConnection(service.getLocalNode()); - assertThat(remoteConnection, instanceOf(RemoteClusterConnection.ProxyConnection.class)); + assertThat(remoteConnection, instanceOf(RemoteConnectionManager.ProxyConnection.class)); assertThat(remoteConnection.getNode(), equalTo(service.getLocalNode())); } for (int i = 0; i < 10; i++) { //always a proxy connection as the target node is not connected Transport.Connection remoteConnection = connection.getConnection(disconnectedNode); - assertThat(remoteConnection, instanceOf(RemoteClusterConnection.ProxyConnection.class)); + assertThat(remoteConnection, instanceOf(RemoteConnectionManager.ProxyConnection.class)); assertThat(remoteConnection.getNode(), sameInstance(disconnectedNode)); } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java new file mode 100644 index 00000000000..e2d33a6263a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.ESTestCase; + +import java.net.InetAddress; +import java.util.HashSet; +import java.util.Set; + +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class RemoteConnectionManagerTests extends ESTestCase { + + private Transport transport; + private RemoteConnectionManager remoteConnectionManager; + private ConnectionManager.ConnectionValidator validator = (connection, profile, listener) -> listener.onResponse(null); + + @Override + public void setUp() throws Exception { + super.setUp(); + transport = mock(Transport.class); + remoteConnectionManager = new RemoteConnectionManager("remote-cluster", new ConnectionManager(Settings.EMPTY, transport)); + } + + @SuppressWarnings("unchecked") + public void testGetConnection() { + TransportAddress address = new TransportAddress(InetAddress.getLoopbackAddress(), 1000); + + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new TestRemoteConnection((DiscoveryNode) invocationOnMock.getArguments()[0])); + return null; + }).when(transport).openConnection(any(DiscoveryNode.class), any(ConnectionProfile.class), any(ActionListener.class)); + + DiscoveryNode node1 = new DiscoveryNode("node-1", address, Version.CURRENT); + PlainActionFuture future1 = PlainActionFuture.newFuture(); + remoteConnectionManager.connectToNode(node1, null, validator, future1); + assertTrue(future1.isDone()); + + // Add duplicate connect attempt to ensure that we do not get duplicate connections in the round robin + remoteConnectionManager.connectToNode(node1, null, validator, PlainActionFuture.newFuture()); + + DiscoveryNode node2 = new DiscoveryNode("node-2", address, Version.CURRENT.minimumCompatibilityVersion()); + PlainActionFuture future2 = PlainActionFuture.newFuture(); + remoteConnectionManager.connectToNode(node2, null, validator, future2); + assertTrue(future2.isDone()); + + assertEquals(node1, remoteConnectionManager.getRemoteConnection(node1).getNode()); + assertEquals(node2, remoteConnectionManager.getRemoteConnection(node2).getNode()); + + DiscoveryNode node4 = new DiscoveryNode("node-4", address, Version.CURRENT); + assertThat(remoteConnectionManager.getRemoteConnection(node4), instanceOf(RemoteConnectionManager.ProxyConnection.class)); + + // Test round robin + Set versions = new HashSet<>(); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + + assertThat(versions, hasItems(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion())); + + // Test that the connection is cleared from the round robin list when it is closed + remoteConnectionManager.getRemoteConnection(node1).close(); + + versions.clear(); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + + assertThat(versions, hasItems(Version.CURRENT.minimumCompatibilityVersion())); + assertEquals(1, versions.size()); + } + + private static class TestRemoteConnection extends CloseableConnection { + + private final DiscoveryNode node; + + private TestRemoteConnection(DiscoveryNode node) { + this.node = node; + } + + @Override + public DiscoveryNode getNode() { + return node; + } + + @Override + public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) + throws TransportException { + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java index 015c5fb736d..f796b3037a7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java @@ -81,7 +81,7 @@ public class MockTransport implements Transport, LifecycleComponent { @Nullable ClusterSettings clusterSettings, Set taskHeaders) { StubbableConnectionManager connectionManager = new StubbableConnectionManager(new ConnectionManager(settings, this), settings, this); - connectionManager.setDefaultNodeConnectedBehavior(cm -> Collections.emptySet()); + connectionManager.setDefaultNodeConnectedBehavior((cm, node) -> false); connectionManager.setDefaultGetConnectionBehavior((cm, discoveryNode) -> createConnection(discoveryNode)); return new TransportService(settings, this, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders, connectionManager); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 3ecb76b0dee..cb509dfbf9d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -463,7 +463,7 @@ public final class MockTransportService extends TransportService { * @return {@code true} if no other get connection behavior was registered for this address before. */ public boolean addGetConnectionBehavior(TransportAddress transportAddress, StubbableConnectionManager.GetConnectionBehavior behavior) { - return connectionManager().addConnectBehavior(transportAddress, behavior); + return connectionManager().addGetConnectionBehavior(transportAddress, behavior); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java index 8f07bc19d0b..ea6e145d882 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java @@ -28,7 +28,6 @@ import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportConnectionListener; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -37,7 +36,7 @@ public class StubbableConnectionManager extends ConnectionManager { private final ConnectionManager delegate; private final ConcurrentMap getConnectionBehaviors; private volatile GetConnectionBehavior defaultGetConnectionBehavior = ConnectionManager::getConnection; - private volatile NodeConnectedBehavior defaultNodeConnectedBehavior = ConnectionManager::connectedNodes; + private volatile NodeConnectedBehavior defaultNodeConnectedBehavior = ConnectionManager::nodeConnected; public StubbableConnectionManager(ConnectionManager delegate, Settings settings, Transport transport) { super(settings, transport); @@ -45,7 +44,7 @@ public class StubbableConnectionManager extends ConnectionManager { this.getConnectionBehaviors = new ConcurrentHashMap<>(); } - public boolean addConnectBehavior(TransportAddress transportAddress, GetConnectionBehavior connectBehavior) { + public boolean addGetConnectionBehavior(TransportAddress transportAddress, GetConnectionBehavior connectBehavior) { return getConnectionBehaviors.put(transportAddress, connectBehavior) == null; } @@ -64,7 +63,6 @@ public class StubbableConnectionManager extends ConnectionManager { public void clearBehaviors() { defaultGetConnectionBehavior = ConnectionManager::getConnection; getConnectionBehaviors.clear(); - defaultNodeConnectedBehavior = ConnectionManager::connectedNodes; } public void clearBehavior(TransportAddress transportAddress) { @@ -85,12 +83,7 @@ public class StubbableConnectionManager extends ConnectionManager { @Override public boolean nodeConnected(DiscoveryNode node) { - return defaultNodeConnectedBehavior.connectedNodes(delegate).contains(node); - } - - @Override - public Set connectedNodes() { - return defaultNodeConnectedBehavior.connectedNodes(delegate); + return defaultNodeConnectedBehavior.connectedNodes(delegate, node); } @Override @@ -132,6 +125,6 @@ public class StubbableConnectionManager extends ConnectionManager { @FunctionalInterface public interface NodeConnectedBehavior { - Set connectedNodes(ConnectionManager connectionManager); + boolean connectedNodes(ConnectionManager connectionManager, DiscoveryNode node); } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index d755a3a1642..ba5563267c7 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -163,12 +163,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { final CountDownLatch latch = new CountDownLatch(2); TransportConnectionListener waitForConnection = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { fail("disconnect should not be called " + node); } }; @@ -689,12 +689,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { final CountDownLatch latch = new CountDownLatch(1); TransportConnectionListener disconnectListener = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { fail("node connected should not be called, all connection have been done previously, node: " + node); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } }; @@ -1731,12 +1731,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { final CountDownLatch latch = new CountDownLatch(4); TransportConnectionListener waitForConnection = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { fail("disconnect should not be called " + node); } }; From 7ac647c365c883e8f1a5b952893e68b8ba3b8412 Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Wed, 25 Sep 2019 16:15:10 -0600 Subject: [PATCH 70/94] Add support for POST requests to SLM Execute API (#47061) This commit adds support for POST requests to the SLM `_execute` API, because POST is a more appropriate HTTP verb for this action as it is not idempotent. The docs are also changed to favor POST over PUT, although PUT is not removed or officially deprecated. --- .../elasticsearch/client/IndexLifecycleRequestConverters.java | 2 +- docs/reference/ilm/apis/slm-api.asciidoc | 4 ++-- docs/reference/ilm/getting-started-slm.asciidoc | 2 +- .../org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java | 4 ++-- .../xpack/slm/action/RestExecuteSnapshotLifecycleAction.java | 1 + 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java index fb5db72cbc9..5589d6367fd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java @@ -204,7 +204,7 @@ final class IndexLifecycleRequestConverters { } static Request executeSnapshotLifecyclePolicy(ExecuteSnapshotLifecyclePolicyRequest executeSnapshotLifecyclePolicyRequest) { - Request request = new Request(HttpPut.METHOD_NAME, + Request request = new Request(HttpPost.METHOD_NAME, new RequestConverters.EndpointBuilder() .addPathPartAsIs("_slm/policy") .addPathPartAsIs(executeSnapshotLifecyclePolicyRequest.getPolicyId()) diff --git a/docs/reference/ilm/apis/slm-api.asciidoc b/docs/reference/ilm/apis/slm-api.asciidoc index cd9d2364e69..013225f9696 100644 --- a/docs/reference/ilm/apis/slm-api.asciidoc +++ b/docs/reference/ilm/apis/slm-api.asciidoc @@ -185,7 +185,7 @@ To take an immediate snapshot using a policy, use the following [source,console] -------------------------------------------------- -PUT /_slm/policy/daily-snapshots/_execute +POST /_slm/policy/daily-snapshots/_execute -------------------------------------------------- // TEST[skip:we can't easily handle snapshots from docs tests] @@ -279,7 +279,7 @@ Another snapshot can immediately be executed to ensure the new policy works: [source,console] -------------------------------------------------- -PUT /_slm/policy/daily-snapshots/_execute +POST /_slm/policy/daily-snapshots/_execute -------------------------------------------------- // TEST[skip:we can't handle snapshots in docs tests] diff --git a/docs/reference/ilm/getting-started-slm.asciidoc b/docs/reference/ilm/getting-started-slm.asciidoc index 32a5c5ef4d8..e6f7dff2749 100644 --- a/docs/reference/ilm/getting-started-slm.asciidoc +++ b/docs/reference/ilm/getting-started-slm.asciidoc @@ -132,7 +132,7 @@ as using the configuration from our policy right now instead of waiting for [source,console] -------------------------------------------------- -PUT /_slm/policy/nightly-snapshots/_execute +POST /_slm/policy/nightly-snapshots/_execute -------------------------------------------------- // TEST[skip:we can't easily handle snapshots from docs tests] diff --git a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java index 2383e072272..cfe5b2f0e06 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java @@ -207,7 +207,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { createSnapshotPolicy(policyName, "snap", "1 2 3 4 5 ?", repoId, indexName, true); ResponseException badResp = expectThrows(ResponseException.class, - () -> client().performRequest(new Request("PUT", "/_slm/policy/" + policyName + "-bad/_execute"))); + () -> client().performRequest(new Request("POST", "/_slm/policy/" + policyName + "-bad/_execute"))); assertThat(EntityUtils.toString(badResp.getResponse().getEntity()), containsString("no such snapshot lifecycle policy [" + policyName + "-bad]")); @@ -335,7 +335,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { */ private String executePolicy(String policyId) { try { - Response executeRepsonse = client().performRequest(new Request("PUT", "/_slm/policy/" + policyId + "/_execute")); + Response executeRepsonse = client().performRequest(new Request("POST", "/_slm/policy/" + policyId + "/_execute")); try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, EntityUtils.toByteArray(executeRepsonse.getEntity()))) { return parser.mapStrings().get("snapshot_name"); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java index a644df789d0..72eceee0f8e 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java @@ -17,6 +17,7 @@ public class RestExecuteSnapshotLifecycleAction extends BaseRestHandler { public RestExecuteSnapshotLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_slm/policy/{name}/_execute", this); + controller.registerHandler(RestRequest.Method.POST, "/_slm/policy/{name}/_execute", this); } @Override From fcddaa90dea6baecce4889014896934218ebf1b7 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 25 Sep 2019 19:11:15 -0400 Subject: [PATCH 71/94] [7.x] [ML][Inference] adding tree model (#47044) (#47141) * [ML][Inference] adding tree model (#47044) * [ML][Inference] adding tree model * renaming features for updated schema * fixing 7.x compilation --- .../MlInferenceNamedXContentProvider.java | 6 + .../inference/trainedmodel/TrainedModel.java | 36 ++ .../ml/inference/trainedmodel/tree/Tree.java | 192 ++++++++++ .../inference/trainedmodel/tree/TreeNode.java | 280 ++++++++++++++ .../client/RestHighLevelClientTests.java | 7 +- .../trainedmodel/tree/TreeNodeTests.java | 72 ++++ .../trainedmodel/tree/TreeTests.java | 88 +++++ .../xpack/core/XPackClientPlugin.java | 6 +- .../MlInferenceNamedXContentProvider.java | 13 + .../LenientlyParsedTrainedModel.java | 9 + .../StrictlyParsedTrainedModel.java | 9 + .../inference/trainedmodel/TrainedModel.java | 44 +++ .../ml/inference/trainedmodel/tree/Tree.java | 311 ++++++++++++++++ .../inference/trainedmodel/tree/TreeNode.java | 346 ++++++++++++++++++ .../inference/NamedXContentObjectsTests.java | 24 +- .../trainedmodel/tree/TreeNodeTests.java | 100 +++++ .../trainedmodel/tree/TreeTests.java | 172 +++++++++ 17 files changed, 1711 insertions(+), 4 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNodeTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LenientlyParsedTrainedModel.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/StrictlyParsedTrainedModel.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNode.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNodeTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java index 867c598da90..be7c3c00af2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.client.ml.inference; +import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; @@ -42,6 +44,10 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider { TargetMeanEncoding::fromXContent)); namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME), FrequencyEncoding::fromXContent)); + + // Model + namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent)); + return namedXContent; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java new file mode 100644 index 00000000000..fb1f5c3b4ab --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel; + +import org.elasticsearch.common.xcontent.ToXContentObject; + +import java.util.List; + +public interface TrainedModel extends ToXContentObject { + + /** + * @return List of featureNames expected by the model. In the order that they are expected + */ + List getFeatureNames(); + + /** + * @return The name of the model + */ + String getName(); +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java new file mode 100644 index 00000000000..de040ec6f9e --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java @@ -0,0 +1,192 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class Tree implements TrainedModel { + + public static final String NAME = "tree"; + + public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); + public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure"); + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); + + static { + PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES); + PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE); + } + + public static Tree fromXContent(XContentParser parser) { + return PARSER.apply(parser, null).build(); + } + + private final List featureNames; + private final List nodes; + + Tree(List featureNames, List nodes) { + this.featureNames = Collections.unmodifiableList(Objects.requireNonNull(featureNames)); + this.nodes = Collections.unmodifiableList(Objects.requireNonNull(nodes)); + } + + @Override + public String getName() { + return NAME; + } + + @Override + public List getFeatureNames() { + return featureNames; + } + + public List getNodes() { + return nodes; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FEATURE_NAMES.getPreferredName(), featureNames); + builder.field(TREE_STRUCTURE.getPreferredName(), nodes); + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Tree that = (Tree) o; + return Objects.equals(featureNames, that.featureNames) + && Objects.equals(nodes, that.nodes); + } + + @Override + public int hashCode() { + return Objects.hash(featureNames, nodes); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private List featureNames; + private ArrayList nodes; + private int numNodes; + + public Builder() { + nodes = new ArrayList<>(); + // allocate space in the root node and set to a leaf + nodes.add(null); + addLeaf(0, 0.0); + numNodes = 1; + } + + public Builder setFeatureNames(List featureNames) { + this.featureNames = featureNames; + return this; + } + + public Builder addNode(TreeNode.Builder node) { + nodes.add(node); + return this; + } + + public Builder setNodes(List nodes) { + this.nodes = new ArrayList<>(nodes); + return this; + } + + public Builder setNodes(TreeNode.Builder... nodes) { + return setNodes(Arrays.asList(nodes)); + } + + /** + * Add a decision node. Space for the child nodes is allocated + * @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index + * @param featureIndex The feature index the decision is made on + * @param isDefaultLeft Default left branch if the feature is missing + * @param decisionThreshold The decision threshold + * @return The created node + */ + public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) { + int leftChild = numNodes++; + int rightChild = numNodes++; + nodes.ensureCapacity(nodeIndex + 1); + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + + TreeNode.Builder node = TreeNode.builder(nodeIndex) + .setDefaultLeft(isDefaultLeft) + .setLeftChild(leftChild) + .setRightChild(rightChild) + .setSplitFeature(featureIndex) + .setThreshold(decisionThreshold); + nodes.set(nodeIndex, node); + + // allocate space for the child nodes + while (nodes.size() <= rightChild) { + nodes.add(null); + } + + return node; + } + + /** + * Sets the node at {@code nodeIndex} to a leaf node. + * @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)} + * @param value The prediction value + * @return this + */ + public Builder addLeaf(int nodeIndex, double value) { + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(value)); + return this; + } + + public Tree build() { + return new Tree(featureNames, + nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList())); + } + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java new file mode 100644 index 00000000000..020aaa09716 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java @@ -0,0 +1,280 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.client.ml.job.config.Operator; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class TreeNode implements ToXContentObject { + + public static final String NAME = "tree_node"; + + public static final ParseField DECISION_TYPE = new ParseField("decision_type"); + public static final ParseField THRESHOLD = new ParseField("threshold"); + public static final ParseField LEFT_CHILD = new ParseField("left_child"); + public static final ParseField RIGHT_CHILD = new ParseField("right_child"); + public static final ParseField DEFAULT_LEFT = new ParseField("default_left"); + public static final ParseField SPLIT_FEATURE = new ParseField("split_feature"); + public static final ParseField NODE_INDEX = new ParseField("node_index"); + public static final ParseField SPLIT_GAIN = new ParseField("split_gain"); + public static final ParseField LEAF_VALUE = new ParseField("leaf_value"); + + + private static final ObjectParser PARSER = new ObjectParser<>( + NAME, + true, + Builder::new); + static { + PARSER.declareDouble(Builder::setThreshold, THRESHOLD); + PARSER.declareField(Builder::setOperator, + p -> Operator.fromString(p.text()), + DECISION_TYPE, + ObjectParser.ValueType.STRING); + PARSER.declareInt(Builder::setLeftChild, LEFT_CHILD); + PARSER.declareInt(Builder::setRightChild, RIGHT_CHILD); + PARSER.declareBoolean(Builder::setDefaultLeft, DEFAULT_LEFT); + PARSER.declareInt(Builder::setSplitFeature, SPLIT_FEATURE); + PARSER.declareInt(Builder::setNodeIndex, NODE_INDEX); + PARSER.declareDouble(Builder::setSplitGain, SPLIT_GAIN); + PARSER.declareDouble(Builder::setLeafValue, LEAF_VALUE); + } + + public static Builder fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final Operator operator; + private final Double threshold; + private final Integer splitFeature; + private final int nodeIndex; + private final Double splitGain; + private final Double leafValue; + private final Boolean defaultLeft; + private final Integer leftChild; + private final Integer rightChild; + + + TreeNode(Operator operator, + Double threshold, + Integer splitFeature, + int nodeIndex, + Double splitGain, + Double leafValue, + Boolean defaultLeft, + Integer leftChild, + Integer rightChild) { + this.operator = operator; + this.threshold = threshold; + this.splitFeature = splitFeature; + this.nodeIndex = nodeIndex; + this.splitGain = splitGain; + this.leafValue = leafValue; + this.defaultLeft = defaultLeft; + this.leftChild = leftChild; + this.rightChild = rightChild; + } + + public Operator getOperator() { + return operator; + } + + public Double getThreshold() { + return threshold; + } + + public Integer getSplitFeature() { + return splitFeature; + } + + public Integer getNodeIndex() { + return nodeIndex; + } + + public Double getSplitGain() { + return splitGain; + } + + public Double getLeafValue() { + return leafValue; + } + + public Boolean isDefaultLeft() { + return defaultLeft; + } + + public Integer getLeftChild() { + return leftChild; + } + + public Integer getRightChild() { + return rightChild; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + addOptionalField(builder, DECISION_TYPE, operator); + addOptionalField(builder, THRESHOLD, threshold); + addOptionalField(builder, SPLIT_FEATURE, splitFeature); + addOptionalField(builder, SPLIT_GAIN, splitGain); + addOptionalField(builder, NODE_INDEX, nodeIndex); + addOptionalField(builder, LEAF_VALUE, leafValue); + addOptionalField(builder, DEFAULT_LEFT, defaultLeft ); + addOptionalField(builder, LEFT_CHILD, leftChild); + addOptionalField(builder, RIGHT_CHILD, rightChild); + builder.endObject(); + return builder; + } + + private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { + if (value != null) { + builder.field(field.getPreferredName(), value); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TreeNode that = (TreeNode) o; + return Objects.equals(operator, that.operator) + && Objects.equals(threshold, that.threshold) + && Objects.equals(splitFeature, that.splitFeature) + && Objects.equals(nodeIndex, that.nodeIndex) + && Objects.equals(splitGain, that.splitGain) + && Objects.equals(leafValue, that.leafValue) + && Objects.equals(defaultLeft, that.defaultLeft) + && Objects.equals(leftChild, that.leftChild) + && Objects.equals(rightChild, that.rightChild); + } + + @Override + public int hashCode() { + return Objects.hash(operator, + threshold, + splitFeature, + splitGain, + nodeIndex, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + public static Builder builder(int nodeIndex) { + return new Builder(nodeIndex); + } + + public static class Builder { + private Operator operator; + private Double threshold; + private Integer splitFeature; + private int nodeIndex; + private Double splitGain; + private Double leafValue; + private Boolean defaultLeft; + private Integer leftChild; + private Integer rightChild; + + public Builder(int nodeIndex) { + nodeIndex = nodeIndex; + } + + private Builder() { + } + + public Builder setOperator(Operator operator) { + this.operator = operator; + return this; + } + + public Builder setThreshold(Double threshold) { + this.threshold = threshold; + return this; + } + + public Builder setSplitFeature(Integer splitFeature) { + this.splitFeature = splitFeature; + return this; + } + + public Builder setNodeIndex(int nodeIndex) { + this.nodeIndex = nodeIndex; + return this; + } + + public Builder setSplitGain(Double splitGain) { + this.splitGain = splitGain; + return this; + } + + public Builder setLeafValue(Double leafValue) { + this.leafValue = leafValue; + return this; + } + + public Builder setDefaultLeft(Boolean defaultLeft) { + this.defaultLeft = defaultLeft; + return this; + } + + public Builder setLeftChild(Integer leftChild) { + this.leftChild = leftChild; + return this; + } + + public Integer getLeftChild() { + return leftChild; + } + + public Builder setRightChild(Integer rightChild) { + this.rightChild = rightChild; + return this; + } + + public Integer getRightChild() { + return rightChild; + } + + public TreeNode build() { + return new TreeNode(operator, + threshold, + splitFeature, + nodeIndex, + splitGain, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 15929bbaf21..44335f1b1a0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -65,6 +65,7 @@ import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.Binar import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric; +import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; @@ -680,7 +681,7 @@ public class RestHighLevelClientTests extends ESTestCase { public void testProvidedNamedXContents() { List namedXContents = RestHighLevelClient.getProvidedNamedXContents(); - assertEquals(40, namedXContents.size()); + assertEquals(41, namedXContents.size()); Map, Integer> categories = new HashMap<>(); List names = new ArrayList<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { @@ -690,7 +691,7 @@ public class RestHighLevelClientTests extends ESTestCase { categories.put(namedXContent.categoryClass, counter + 1); } } - assertEquals("Had: " + categories, 10, categories.size()); + assertEquals("Had: " + categories, 11, categories.size()); assertEquals(Integer.valueOf(3), categories.get(Aggregation.class)); assertTrue(names.contains(ChildrenAggregationBuilder.NAME)); assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME)); @@ -739,6 +740,8 @@ public class RestHighLevelClientTests extends ESTestCase { RSquaredMetric.NAME)); assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.inference.preprocessing.PreProcessor.class)); assertThat(names, hasItems(FrequencyEncoding.NAME, OneHotEncoding.NAME, TargetMeanEncoding.NAME)); + assertEquals(Integer.valueOf(1), categories.get(org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel.class)); + assertThat(names, hasItems(Tree.NAME)); } public void testApiNamingConventions() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNodeTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNodeTests.java new file mode 100644 index 00000000000..733a9ddc3d9 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNodeTests.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.client.ml.job.config.Operator; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class TreeNodeTests extends AbstractXContentTestCase { + + @Override + protected TreeNode doParseInstance(XContentParser parser) throws IOException { + return TreeNode.fromXContent(parser).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected TreeNode createTestInstance() { + Integer lft = randomBoolean() ? null : randomInt(100); + Integer rgt = randomBoolean() ? randomInt(100) : null; + Double threshold = lft != null || randomBoolean() ? randomDouble() : null; + Integer featureIndex = lft != null || randomBoolean() ? randomInt(100) : null; + return createRandom(randomInt(), lft, rgt, threshold, featureIndex, randomBoolean() ? null : randomFrom(Operator.values())).build(); + } + + public static TreeNode createRandomLeafNode(double internalValue) { + return TreeNode.builder(randomInt(100)) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeafValue(internalValue) + .build(); + } + + public static TreeNode.Builder createRandom(int nodeIndex, + Integer left, + Integer right, + Double threshold, + Integer featureIndex, + Operator operator) { + return TreeNode.builder(nodeIndex) + .setLeafValue(left == null ? randomDouble() : null) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeftChild(left) + .setRightChild(right) + .setThreshold(threshold) + .setOperator(operator) + .setSplitFeature(featureIndex) + .setSplitGain(randomBoolean() ? null : randomDouble()); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeTests.java new file mode 100644 index 00000000000..5fe2286c360 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeTests.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Predicate; + + +public class TreeTests extends AbstractXContentTestCase { + + @Override + protected Tree doParseInstance(XContentParser parser) throws IOException { + return Tree.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.startsWith("feature_names"); + } + + @Override + protected Tree createTestInstance() { + return createRandom(); + } + + public static Tree createRandom() { + return buildRandomTree(randomIntBetween(2, 15), 6); + } + + public static Tree buildRandomTree(int numFeatures, int depth) { + + Tree.Builder builder = Tree.builder(); + List featureNames = new ArrayList<>(numFeatures); + for(int i = 0; i < numFeatures; i++) { + featureNames.add(randomAlphaOfLength(10)); + } + builder.setFeatureNames(featureNames); + + TreeNode.Builder node = builder.addJunction(0, randomInt(numFeatures), true, randomDouble()); + List childNodes = Arrays.asList(node.getLeftChild(), node.getRightChild()); + + for (int i = 0; i < depth -1; i++) { + + List nextNodes = new ArrayList<>(); + for (int nodeId : childNodes) { + if (i == depth -2) { + builder.addLeaf(nodeId, randomDouble()); + } else { + TreeNode.Builder childNode = + builder.addJunction(nodeId, randomInt(numFeatures), true, randomDouble()); + nextNodes.add(childNode.getLeftChild()); + nextNodes.add(childNode.getRightChild()); + } + } + childNodes = nextNodes; + } + + return builder.build(); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 5570e82f10c..6c9342cc710 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -145,6 +145,8 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.P import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.Recall; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.ScoreByThresholdResult; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.SoftClassificationMetric; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree; import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncoding; import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; @@ -476,10 +478,12 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl new NamedWriteableRegistry.Entry(EvaluationMetricResult.class, ScoreByThresholdResult.NAME, ScoreByThresholdResult::new), new NamedWriteableRegistry.Entry(EvaluationMetricResult.class, ConfusionMatrix.NAME.getPreferredName(), ConfusionMatrix.Result::new), - // ML - Inference + // ML - Inference preprocessing new NamedWriteableRegistry.Entry(PreProcessor.class, FrequencyEncoding.NAME.getPreferredName(), FrequencyEncoding::new), new NamedWriteableRegistry.Entry(PreProcessor.class, OneHotEncoding.NAME.getPreferredName(), OneHotEncoding::new), new NamedWriteableRegistry.Entry(PreProcessor.class, TargetMeanEncoding.NAME.getPreferredName(), TargetMeanEncoding::new), + // ML - Inference models + new NamedWriteableRegistry.Entry(TrainedModel.class, Tree.NAME.getPreferredName(), Tree::new), // monitoring new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.MONITORING, MonitoringFeatureSetUsage::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java index d7da457b64c..7f14077a150 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -8,6 +8,10 @@ package org.elasticsearch.xpack.core.ml.inference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.plugins.spi.NamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree; import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncoding; import org.elasticsearch.xpack.core.ml.inference.preprocessing.LenientlyParsedPreProcessor; import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; @@ -40,6 +44,12 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider { namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedPreProcessor.class, FrequencyEncoding.NAME, FrequencyEncoding::fromXContentStrict)); + // Model Lenient + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedTrainedModel.class, Tree.NAME, Tree::fromXContentLenient)); + + // Model Strict + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedTrainedModel.class, Tree.NAME, Tree::fromXContentStrict)); + return namedXContent; } @@ -54,6 +64,9 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider { namedWriteables.add(new NamedWriteableRegistry.Entry(PreProcessor.class, FrequencyEncoding.NAME.getPreferredName(), FrequencyEncoding::new)); + // Model + namedWriteables.add(new NamedWriteableRegistry.Entry(TrainedModel.class, Tree.NAME.getPreferredName(), Tree::new)); + return namedWriteables; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LenientlyParsedTrainedModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LenientlyParsedTrainedModel.java new file mode 100644 index 00000000000..208e07de17b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LenientlyParsedTrainedModel.java @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +public interface LenientlyParsedTrainedModel extends TrainedModel { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/StrictlyParsedTrainedModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/StrictlyParsedTrainedModel.java new file mode 100644 index 00000000000..48b38c16194 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/StrictlyParsedTrainedModel.java @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +public interface StrictlyParsedTrainedModel extends TrainedModel { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java new file mode 100644 index 00000000000..1d68e3d6d3f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; + +import java.util.List; +import java.util.Map; + +public interface TrainedModel extends NamedXContentObject, NamedWriteable { + + /** + * @return List of featureNames expected by the model. In the order that they are expected + */ + List getFeatureNames(); + + /** + * Infer against the provided fields + * + * @param fields The fields and their values to infer against + * @return The predicted value. For classification this will be discrete values (e.g. 0.0, or 1.0). + * For regression this is continuous. + */ + double infer(Map fields); + + /** + * @return {@code true} if the model is classification, {@code false} otherwise. + */ + boolean isClassification(); + + /** + * This gathers the probabilities for each potential classification value. + * + * This only should return if the implementation model is inferring classification values and not regression + * @param fields The fields and their values to infer against + * @return The probabilities of each classification value + */ + List inferProbabilities(Map fields); + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java new file mode 100644 index 00000000000..8e48fa488a0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java @@ -0,0 +1,311 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Queue; +import java.util.Set; +import java.util.stream.Collectors; + +public class Tree implements LenientlyParsedTrainedModel, StrictlyParsedTrainedModel { + + public static final ParseField NAME = new ParseField("tree"); + + public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); + public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure"); + + private static final ObjectParser LENIENT_PARSER = createParser(true); + private static final ObjectParser STRICT_PARSER = createParser(false); + + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( + NAME.getPreferredName(), + lenient, + Tree.Builder::new); + parser.declareStringArray(Tree.Builder::setFeatureNames, FEATURE_NAMES); + parser.declareObjectArray(Tree.Builder::setNodes, (p, c) -> TreeNode.fromXContent(p, lenient), TREE_STRUCTURE); + return parser; + } + + public static Tree fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null).build(); + } + + public static Tree fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null).build(); + } + + private final List featureNames; + private final List nodes; + + Tree(List featureNames, List nodes) { + this.featureNames = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(featureNames, FEATURE_NAMES)); + this.nodes = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(nodes, TREE_STRUCTURE)); + } + + public Tree(StreamInput in) throws IOException { + this.featureNames = Collections.unmodifiableList(in.readStringList()); + this.nodes = Collections.unmodifiableList(in.readList(TreeNode::new)); + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public List getFeatureNames() { + return featureNames; + } + + public List getNodes() { + return nodes; + } + + @Override + public double infer(Map fields) { + List features = featureNames.stream().map(f -> (Double) fields.get(f)).collect(Collectors.toList()); + return infer(features); + } + + private double infer(List features) { + TreeNode node = nodes.get(0); + while(node.isLeaf() == false) { + node = nodes.get(node.compare(features)); + } + return node.getLeafValue(); + } + + /** + * Trace the route predicting on the feature vector takes. + * @param features The feature vector + * @return The list of traversed nodes ordered from root to leaf + */ + public List trace(List features) { + List visited = new ArrayList<>(); + TreeNode node = nodes.get(0); + visited.add(node); + while(node.isLeaf() == false) { + node = nodes.get(node.compare(features)); + visited.add(node); + } + return visited; + } + + @Override + public boolean isClassification() { + return false; + } + + @Override + public List inferProbabilities(Map fields) { + throw new UnsupportedOperationException("Cannot infer probabilities against a regression model."); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(featureNames); + out.writeCollection(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FEATURE_NAMES.getPreferredName(), featureNames); + builder.field(TREE_STRUCTURE.getPreferredName(), nodes); + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Tree that = (Tree) o; + return Objects.equals(featureNames, that.featureNames) + && Objects.equals(nodes, that.nodes); + } + + @Override + public int hashCode() { + return Objects.hash(featureNames, nodes); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private List featureNames; + private ArrayList nodes; + private int numNodes; + + public Builder() { + nodes = new ArrayList<>(); + // allocate space in the root node and set to a leaf + nodes.add(null); + addLeaf(0, 0.0); + numNodes = 1; + } + + public Builder setFeatureNames(List featureNames) { + this.featureNames = featureNames; + return this; + } + + public Builder addNode(TreeNode.Builder node) { + nodes.add(node); + return this; + } + + public Builder setNodes(List nodes) { + this.nodes = new ArrayList<>(nodes); + return this; + } + + public Builder setNodes(TreeNode.Builder... nodes) { + return setNodes(Arrays.asList(nodes)); + } + + /** + * Add a decision node. Space for the child nodes is allocated + * @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index + * @param featureIndex The feature index the decision is made on + * @param isDefaultLeft Default left branch if the feature is missing + * @param decisionThreshold The decision threshold + * @return The created node + */ + TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) { + int leftChild = numNodes++; + int rightChild = numNodes++; + nodes.ensureCapacity(nodeIndex + 1); + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + + TreeNode.Builder node = TreeNode.builder(nodeIndex) + .setDefaultLeft(isDefaultLeft) + .setLeftChild(leftChild) + .setRightChild(rightChild) + .setSplitFeature(featureIndex) + .setThreshold(decisionThreshold); + nodes.set(nodeIndex, node); + + // allocate space for the child nodes + while (nodes.size() <= rightChild) { + nodes.add(null); + } + + return node; + } + + void detectCycle(List nodes) { + if (nodes.isEmpty()) { + return; + } + Set visited = new HashSet<>(); + Queue toVisit = new ArrayDeque<>(nodes.size()); + toVisit.add(0); + while(toVisit.isEmpty() == false) { + Integer nodeIdx = toVisit.remove(); + if (visited.contains(nodeIdx)) { + throw new IllegalArgumentException("[tree] contains cycle at node " + nodeIdx); + } + visited.add(nodeIdx); + TreeNode.Builder treeNode = nodes.get(nodeIdx); + if (treeNode.getLeftChild() != null) { + toVisit.add(treeNode.getLeftChild()); + } + if (treeNode.getRightChild() != null) { + toVisit.add(treeNode.getRightChild()); + } + } + } + + void detectNullOrMissingNode(List nodes) { + if (nodes.isEmpty()) { + return; + } + if (nodes.get(0) == null) { + throw new IllegalArgumentException("[tree] must have non-null root node."); + } + List nullOrMissingNodes = new ArrayList<>(); + for (int i = 0; i < nodes.size(); i++) { + TreeNode.Builder currentNode = nodes.get(i); + if (currentNode == null) { + continue; + } + if (nodeNullOrMissing(currentNode.getLeftChild())) { + nullOrMissingNodes.add(currentNode.getLeftChild()); + } + if (nodeNullOrMissing(currentNode.getRightChild())) { + nullOrMissingNodes.add(currentNode.getRightChild()); + } + } + if (nullOrMissingNodes.isEmpty() == false) { + throw new IllegalArgumentException("[tree] contains null or missing nodes " + nullOrMissingNodes); + } + } + + private boolean nodeNullOrMissing(Integer nodeIdx) { + if (nodeIdx == null) { + return false; + } + return nodeIdx >= nodes.size() || nodes.get(nodeIdx) == null; + } + + /** + * Sets the node at {@code nodeIndex} to a leaf node. + * @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)} + * @param value The prediction value + * @return this + */ + Tree.Builder addLeaf(int nodeIndex, double value) { + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(value)); + return this; + } + + public Tree build() { + detectNullOrMissingNode(nodes); + detectCycle(nodes); + return new Tree(featureNames, + nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList())); + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNode.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNode.java new file mode 100644 index 00000000000..f0dbb061750 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNode.java @@ -0,0 +1,346 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.job.config.Operator; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class TreeNode implements ToXContentObject, Writeable { + + public static final String NAME = "tree_node"; + + public static final ParseField DECISION_TYPE = new ParseField("decision_type"); + public static final ParseField THRESHOLD = new ParseField("threshold"); + public static final ParseField LEFT_CHILD = new ParseField("left_child"); + public static final ParseField RIGHT_CHILD = new ParseField("right_child"); + public static final ParseField DEFAULT_LEFT = new ParseField("default_left"); + public static final ParseField SPLIT_FEATURE = new ParseField("split_feature"); + public static final ParseField NODE_INDEX = new ParseField("node_index"); + public static final ParseField SPLIT_GAIN = new ParseField("split_gain"); + public static final ParseField LEAF_VALUE = new ParseField("leaf_value"); + + private static final ObjectParser LENIENT_PARSER = createParser(true); + private static final ObjectParser STRICT_PARSER = createParser(false); + + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( + NAME, + lenient, + TreeNode.Builder::new); + parser.declareDouble(TreeNode.Builder::setThreshold, THRESHOLD); + parser.declareField(TreeNode.Builder::setOperator, + p -> Operator.fromString(p.text()), + DECISION_TYPE, + ObjectParser.ValueType.STRING); + parser.declareInt(TreeNode.Builder::setLeftChild, LEFT_CHILD); + parser.declareInt(TreeNode.Builder::setRightChild, RIGHT_CHILD); + parser.declareBoolean(TreeNode.Builder::setDefaultLeft, DEFAULT_LEFT); + parser.declareInt(TreeNode.Builder::setSplitFeature, SPLIT_FEATURE); + parser.declareInt(TreeNode.Builder::setNodeIndex, NODE_INDEX); + parser.declareDouble(TreeNode.Builder::setSplitGain, SPLIT_GAIN); + parser.declareDouble(TreeNode.Builder::setLeafValue, LEAF_VALUE); + return parser; + } + + public static TreeNode.Builder fromXContent(XContentParser parser, boolean lenient) { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + private final Operator operator; + private final Double threshold; + private final Integer splitFeature; + private final int nodeIndex; + private final Double splitGain; + private final Double leafValue; + private final boolean defaultLeft; + private final int leftChild; + private final int rightChild; + + + TreeNode(Operator operator, + Double threshold, + Integer splitFeature, + Integer nodeIndex, + Double splitGain, + Double leafValue, + Boolean defaultLeft, + Integer leftChild, + Integer rightChild) { + this.operator = operator == null ? Operator.LTE : operator; + this.threshold = threshold; + this.splitFeature = splitFeature; + this.nodeIndex = ExceptionsHelper.requireNonNull(nodeIndex, NODE_INDEX.getPreferredName()); + this.splitGain = splitGain; + this.leafValue = leafValue; + this.defaultLeft = defaultLeft == null ? false : defaultLeft; + this.leftChild = leftChild == null ? -1 : leftChild; + this.rightChild = rightChild == null ? -1 : rightChild; + } + + public TreeNode(StreamInput in) throws IOException { + operator = Operator.readFromStream(in); + threshold = in.readOptionalDouble(); + splitFeature = in.readOptionalInt(); + splitGain = in.readOptionalDouble(); + nodeIndex = in.readInt(); + leafValue = in.readOptionalDouble(); + defaultLeft = in.readBoolean(); + leftChild = in.readInt(); + rightChild = in.readInt(); + } + + + public Operator getOperator() { + return operator; + } + + public Double getThreshold() { + return threshold; + } + + public Integer getSplitFeature() { + return splitFeature; + } + + public Integer getNodeIndex() { + return nodeIndex; + } + + public Double getSplitGain() { + return splitGain; + } + + public Double getLeafValue() { + return leafValue; + } + + public boolean isDefaultLeft() { + return defaultLeft; + } + + public int getLeftChild() { + return leftChild; + } + + public int getRightChild() { + return rightChild; + } + + public boolean isLeaf() { + return leftChild < 1; + } + + public int compare(List features) { + if (isLeaf()) { + throw new IllegalArgumentException("cannot call compare against a leaf node."); + } + Double feature = features.get(splitFeature); + if (isMissing(feature)) { + return defaultLeft ? leftChild : rightChild; + } + return operator.test(feature, threshold) ? leftChild : rightChild; + } + + private boolean isMissing(Double feature) { + return feature == null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + operator.writeTo(out); + out.writeOptionalDouble(threshold); + out.writeOptionalInt(splitFeature); + out.writeOptionalDouble(splitGain); + out.writeInt(nodeIndex); + out.writeOptionalDouble(leafValue); + out.writeBoolean(defaultLeft); + out.writeInt(leftChild); + out.writeInt(rightChild); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + addOptionalField(builder, DECISION_TYPE, operator); + addOptionalField(builder, THRESHOLD, threshold); + addOptionalField(builder, SPLIT_FEATURE, splitFeature); + addOptionalField(builder, SPLIT_GAIN, splitGain); + builder.field(NODE_INDEX.getPreferredName(), nodeIndex); + addOptionalField(builder, LEAF_VALUE, leafValue); + builder.field(DEFAULT_LEFT.getPreferredName(), defaultLeft); + if (leftChild >= 0) { + builder.field(LEFT_CHILD.getPreferredName(), leftChild); + } + if (rightChild >= 0) { + builder.field(RIGHT_CHILD.getPreferredName(), rightChild); + } + builder.endObject(); + return builder; + } + + private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { + if (value != null) { + builder.field(field.getPreferredName(), value); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TreeNode that = (TreeNode) o; + return Objects.equals(operator, that.operator) + && Objects.equals(threshold, that.threshold) + && Objects.equals(splitFeature, that.splitFeature) + && Objects.equals(nodeIndex, that.nodeIndex) + && Objects.equals(splitGain, that.splitGain) + && Objects.equals(leafValue, that.leafValue) + && Objects.equals(defaultLeft, that.defaultLeft) + && Objects.equals(leftChild, that.leftChild) + && Objects.equals(rightChild, that.rightChild); + } + + @Override + public int hashCode() { + return Objects.hash(operator, + threshold, + splitFeature, + splitGain, + nodeIndex, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + public static Builder builder(int nodeIndex) { + return new Builder(nodeIndex); + } + + public static class Builder { + private Operator operator; + private Double threshold; + private Integer splitFeature; + private int nodeIndex; + private Double splitGain; + private Double leafValue; + private Boolean defaultLeft; + private Integer leftChild; + private Integer rightChild; + + public Builder(int nodeIndex) { + this.nodeIndex = nodeIndex; + } + + private Builder() { + } + + public Builder setOperator(Operator operator) { + this.operator = operator; + return this; + } + + public Builder setThreshold(Double threshold) { + this.threshold = threshold; + return this; + } + + public Builder setSplitFeature(Integer splitFeature) { + this.splitFeature = splitFeature; + return this; + } + + public Builder setNodeIndex(Integer nodeIndex) { + this.nodeIndex = nodeIndex; + return this; + } + + public Builder setSplitGain(Double splitGain) { + this.splitGain = splitGain; + return this; + } + + public Builder setLeafValue(Double leafValue) { + this.leafValue = leafValue; + return this; + } + + public Builder setDefaultLeft(Boolean defaultLeft) { + this.defaultLeft = defaultLeft; + return this; + } + + public Builder setLeftChild(Integer leftChild) { + this.leftChild = leftChild; + return this; + } + + Integer getLeftChild() { + return leftChild; + } + + public Builder setRightChild(Integer rightChild) { + this.rightChild = rightChild; + return this; + } + + Integer getRightChild() { + return rightChild; + } + + public void validate() { + if (nodeIndex < 0) { + throw new IllegalArgumentException("[node_index] must be a non-negative integer."); + } + if (leftChild == null) { // leaf validations + if (leafValue == null) { + throw new IllegalArgumentException("[leaf_value] is required for a leaf node."); + } + } else { + if (leftChild < 0) { + throw new IllegalArgumentException("[left_child] must be a non-negative integer."); + } + if (rightChild != null && rightChild < 0) { + throw new IllegalArgumentException("[right_child] must be a non-negative integer."); + } + if (threshold == null) { + throw new IllegalArgumentException("[threshold] must exist for non-leaf node."); + } + } + } + + public TreeNode build() { + validate(); + return new TreeNode(operator, + threshold, + splitFeature, + nodeIndex, + splitGain, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java index bd1740edf07..1e77c148324 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/NamedXContentObjectsTests.java @@ -14,6 +14,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.TreeTests; import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncodingTests; import org.elasticsearch.xpack.core.ml.inference.preprocessing.LenientlyParsedPreProcessor; import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncodingTests; @@ -36,6 +40,7 @@ public class NamedXContentObjectsTests extends AbstractXContentTestCase STRICT_PARSER = createParser(false); static final ObjectParser LENIENT_PARSER = createParser(true); @@ -51,16 +56,30 @@ public class NamedXContentObjectsTests extends AbstractXContentTestCase noc.setUseExplicitPreprocessorOrder(true), PRE_PROCESSORS); + parser.declareNamedObjects(NamedObjectContainer::setTrainedModel, + (p, c, n) -> + lenient ? p.namedObject(LenientlyParsedTrainedModel.class, n, null) : + p.namedObject(StrictlyParsedTrainedModel.class, n, null), + TRAINED_MODEL); return parser; } private boolean useExplicitPreprocessorOrder = false; private List preProcessors; + private TrainedModel trainedModel; void setPreProcessors(List preProcessors) { this.preProcessors = preProcessors; } + void setTrainedModel(List trainedModel) { + this.trainedModel = trainedModel.get(0); + } + + void setModel(TrainedModel trainedModel) { + this.trainedModel = trainedModel; + } + void setUseExplicitPreprocessorOrder(boolean value) { this.useExplicitPreprocessorOrder = value; } @@ -73,6 +92,7 @@ public class NamedXContentObjectsTests extends AbstractXContentTestCase { + + private boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + protected TreeNode doParseInstance(XContentParser parser) throws IOException { + return TreeNode.fromXContent(parser, lenient).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected TreeNode createTestInstance() { + Integer lft = randomBoolean() ? null : randomInt(100); + Integer rgt = randomBoolean() ? randomInt(100) : null; + Double threshold = lft != null || randomBoolean() ? randomDouble() : null; + Integer featureIndex = lft != null || randomBoolean() ? randomInt(100) : null; + return createRandom(randomInt(100), + lft, + rgt, + threshold, + featureIndex, + randomBoolean() ? null : randomFrom(Operator.values())).build(); + } + + public static TreeNode createRandomLeafNode(double internalValue) { + return TreeNode.builder(randomInt(100)) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeafValue(internalValue) + .build(); + } + + public static TreeNode.Builder createRandom(int nodeId, + Integer left, + Integer right, + Double threshold, + Integer featureIndex, + Operator operator) { + return TreeNode.builder(nodeId) + .setLeafValue(left == null ? randomDouble() : null) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeftChild(left) + .setRightChild(right) + .setThreshold(threshold) + .setOperator(operator) + .setSplitFeature(randomBoolean() ? null : randomInt()) + .setSplitGain(randomBoolean() ? null : randomDouble()) + .setSplitFeature(featureIndex); + } + + @Override + protected Writeable.Reader instanceReader() { + return TreeNode::new; + } + + public void testCompare() { + expectThrows(IllegalArgumentException.class, + () -> createRandomLeafNode(randomDouble()).compare(Collections.singletonList(randomDouble()))); + + List featureValues = Arrays.asList(0.1, null); + assertThat(createRandom(0, 2, 3, 0.0, 0, null).build().compare(featureValues), + equalTo(3)); + assertThat(createRandom(0, 2, 3, 0.0, 0, Operator.GT).build().compare(featureValues), + equalTo(2)); + assertThat(createRandom(0, 2, 3, 0.2, 0, null).build().compare(featureValues), + equalTo(2)); + assertThat(createRandom(0, 2, 3, 0.0, 1, null).setDefaultLeft(true).build().compare(featureValues), + equalTo(2)); + assertThat(createRandom(0, 2, 3, 0.0, 1, null).setDefaultLeft(false).build().compare(featureValues), + equalTo(3)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeTests.java new file mode 100644 index 00000000000..2422c046abe --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeTests.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + + +public class TreeTests extends AbstractSerializingTestCase { + + private boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + protected Tree doParseInstance(XContentParser parser) throws IOException { + return lenient ? Tree.fromXContentLenient(parser) : Tree.fromXContentStrict(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.startsWith("feature_names"); + } + + + @Override + protected Tree createTestInstance() { + return createRandom(); + } + + public static Tree createRandom() { + return buildRandomTree(randomIntBetween(2, 15), 6); + } + + public static Tree buildRandomTree(int numFeatures, int depth) { + + Tree.Builder builder = Tree.builder(); + List featureNames = new ArrayList<>(numFeatures); + for(int i = 0; i < numFeatures; i++) { + featureNames.add(randomAlphaOfLength(10)); + } + builder.setFeatureNames(featureNames); + + TreeNode.Builder node = builder.addJunction(0, randomInt(numFeatures), true, randomDouble()); + List childNodes = Arrays.asList(node.getLeftChild(), node.getRightChild()); + + for (int i = 0; i < depth -1; i++) { + + List nextNodes = new ArrayList<>(); + for (int nodeId : childNodes) { + if (i == depth -2) { + builder.addLeaf(nodeId, randomDouble()); + } else { + TreeNode.Builder childNode = + builder.addJunction(nodeId, randomInt(numFeatures), true, randomDouble()); + nextNodes.add(childNode.getLeftChild()); + nextNodes.add(childNode.getRightChild()); + } + } + childNodes = nextNodes; + } + + return builder.build(); + } + + @Override + protected Writeable.Reader instanceReader() { + return Tree::new; + } + + public void testInfer() { + // Build a tree with 2 nodes and 3 leaves using 2 features + // The leaves have unique values 0.1, 0.2, 0.3 + Tree.Builder builder = Tree.builder(); + TreeNode.Builder rootNode = builder.addJunction(0, 0, true, 0.5); + builder.addLeaf(rootNode.getRightChild(), 0.3); + TreeNode.Builder leftChildNode = builder.addJunction(rootNode.getLeftChild(), 1, true, 0.8); + builder.addLeaf(leftChildNode.getLeftChild(), 0.1); + builder.addLeaf(leftChildNode.getRightChild(), 0.2); + + List featureNames = Arrays.asList("foo", "bar"); + Tree tree = builder.setFeatureNames(featureNames).build(); + + // This feature vector should hit the right child of the root node + List featureVector = Arrays.asList(0.6, 0.0); + Map featureMap = zipObjMap(featureNames, featureVector); + assertEquals(0.3, tree.infer(featureMap), 0.00001); + + // This should hit the left child of the left child of the root node + // i.e. it takes the path left, left + featureVector = Arrays.asList(0.3, 0.7); + featureMap = zipObjMap(featureNames, featureVector); + assertEquals(0.1, tree.infer(featureMap), 0.00001); + + // This should hit the right child of the left child of the root node + // i.e. it takes the path left, right + featureVector = Arrays.asList(0.3, 0.9); + featureMap = zipObjMap(featureNames, featureVector); + assertEquals(0.2, tree.infer(featureMap), 0.00001); + } + + public void testTreeWithNullRoot() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(Collections.singletonList(null)) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] must have non-null root node.")); + } + + public void testTreeWithInvalidNode() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(TreeNode.builder(0) + .setLeftChild(1) + .setSplitFeature(1) + .setThreshold(randomDouble())) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] contains null or missing nodes [1]")); + } + + public void testTreeWithNullNode() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(TreeNode.builder(0) + .setLeftChild(1) + .setSplitFeature(1) + .setThreshold(randomDouble()), + null) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] contains null or missing nodes [1]")); + } + + public void testTreeWithCycle() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(TreeNode.builder(0) + .setLeftChild(1) + .setSplitFeature(1) + .setThreshold(randomDouble()), + TreeNode.builder(0) + .setLeftChild(0) + .setSplitFeature(1) + .setThreshold(randomDouble())) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] contains cycle at node 0")); + } + + private static Map zipObjMap(List keys, List values) { + return IntStream.range(0, keys.size()).boxed().collect(Collectors.toMap(keys::get, values::get)); + } +} From 45c77830181dacfe0e770238a4163dd19c39d1a7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 26 Sep 2019 07:40:54 +0100 Subject: [PATCH 72/94] Warn on slow metadata persistence (#47130) Today if metadata persistence is excessively slow on a master-ineligible node then the `ClusterApplierService` emits a warning indicating that the `GatewayMetaState` applier was slow, but gives no further details. If it is excessively slow on a master-eligible node then we do not see any warning at all, although we might see other consequences such as a lagging node or a master failure. With this commit we emit a warning if metadata persistence takes longer than a configurable threshold, which defaults to `10s`. We also emit statistics that record how much index metadata was persisted and how much was skipped since this can help distinguish cases where IO was slow from cases where there are simply too many indices involved. Backport of #47005. --- .../common/settings/ClusterSettings.java | 2 + .../gateway/GatewayMetaState.java | 7 +- .../IncrementalClusterStateWriter.java | 63 +++++++++- .../IncrementalClusterStateWriterTests.java | 110 +++++++++++++++++- .../gateway/MockGatewayMetaState.java | 13 ++- 5 files changed, 187 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 69f4f19dd51..d2a338a5405 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -78,6 +78,7 @@ import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.gateway.IncrementalClusterStateWriter; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -245,6 +246,7 @@ public final class ClusterSettings extends AbstractScopedSettings { GatewayService.RECOVER_AFTER_MASTER_NODES_SETTING, GatewayService.RECOVER_AFTER_NODES_SETTING, GatewayService.RECOVER_AFTER_TIME_SETTING, + IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD, NetworkModule.HTTP_DEFAULT_TYPE_SETTING, NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING, NetworkModule.HTTP_TYPE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index f9433ee6059..35b22968459 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -92,8 +92,11 @@ public class GatewayMetaState { throw new ElasticsearchException("failed to load metadata", e); } final IncrementalClusterStateWriter incrementalClusterStateWriter - = new IncrementalClusterStateWriter(metaStateService, manifestClusterStateTuple.v1(), - prepareInitialClusterState(transportService, clusterService, manifestClusterStateTuple.v2())); + = new IncrementalClusterStateWriter(settings, clusterService.getClusterSettings(), metaStateService, + manifestClusterStateTuple.v1(), + prepareInitialClusterState(transportService, clusterService, manifestClusterStateTuple.v2()), + transportService.getThreadPool()::relativeTimeInMillis); + if (DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings).equals(DiscoveryModule.ZEN_DISCOVERY_TYPE)) { // only for tests that simulate mixed Zen1/Zen2 clusters, see Zen1IT if (isMasterOrDataNode(settings)) { diff --git a/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java b/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java index 5facb826a24..d015bcc5b6c 100644 --- a/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java +++ b/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java @@ -18,12 +18,18 @@ */ package org.elasticsearch.gateway; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import java.util.ArrayList; @@ -33,11 +39,17 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.LongSupplier; /** * Tracks the metadata written to disk, allowing updated metadata to be written incrementally (i.e. only writing out the changed metadata). */ -class IncrementalClusterStateWriter { +public class IncrementalClusterStateWriter { + + private static final Logger logger = LogManager.getLogger(IncrementalClusterStateWriter.class); + + public static final Setting SLOW_WRITE_LOGGING_THRESHOLD = Setting.timeSetting("gateway.slow_write_logging_threshold", + TimeValue.timeValueSeconds(10), TimeValue.ZERO, Setting.Property.NodeScope, Setting.Property.Dynamic); private final MetaStateService metaStateService; @@ -46,13 +58,24 @@ class IncrementalClusterStateWriter { // no need to synchronize access to these fields. private Manifest previousManifest; private ClusterState previousClusterState; + private final LongSupplier relativeTimeMillisSupplier; private boolean incrementalWrite; - IncrementalClusterStateWriter(MetaStateService metaStateService, Manifest manifest, ClusterState clusterState) { + private volatile TimeValue slowWriteLoggingThreshold; + + IncrementalClusterStateWriter(Settings settings, ClusterSettings clusterSettings, MetaStateService metaStateService, Manifest manifest, + ClusterState clusterState, LongSupplier relativeTimeMillisSupplier) { this.metaStateService = metaStateService; this.previousManifest = manifest; this.previousClusterState = clusterState; + this.relativeTimeMillisSupplier = relativeTimeMillisSupplier; this.incrementalWrite = false; + this.slowWriteLoggingThreshold = SLOW_WRITE_LOGGING_THRESHOLD.get(settings); + clusterSettings.addSettingsUpdateConsumer(SLOW_WRITE_LOGGING_THRESHOLD, this::setSlowWriteLoggingThreshold); + } + + private void setSlowWriteLoggingThreshold(TimeValue slowWriteLoggingThreshold) { + this.slowWriteLoggingThreshold = slowWriteLoggingThreshold; } void setCurrentTerm(long currentTerm) throws WriteStateException { @@ -85,14 +108,26 @@ class IncrementalClusterStateWriter { void updateClusterState(ClusterState newState, ClusterState previousState) throws WriteStateException { MetaData newMetaData = newState.metaData(); + final long startTimeMillis = relativeTimeMillisSupplier.getAsLong(); + final AtomicClusterStateWriter writer = new AtomicClusterStateWriter(metaStateService, previousManifest); long globalStateGeneration = writeGlobalState(writer, newMetaData); Map indexGenerations = writeIndicesMetadata(writer, newState, previousState); Manifest manifest = new Manifest(previousManifest.getCurrentTerm(), newState.version(), globalStateGeneration, indexGenerations); writeManifest(writer, manifest); - previousManifest = manifest; previousClusterState = newState; + + final long durationMillis = relativeTimeMillisSupplier.getAsLong() - startTimeMillis; + final TimeValue finalSlowWriteLoggingThreshold = this.slowWriteLoggingThreshold; + if (durationMillis >= finalSlowWriteLoggingThreshold.getMillis()) { + logger.warn("writing cluster state took [{}ms] which is above the warn threshold of [{}]; " + + "wrote metadata for [{}] indices and skipped [{}] unchanged indices", + durationMillis, finalSlowWriteLoggingThreshold, writer.getIndicesWritten(), writer.getIndicesSkipped()); + } else { + logger.debug("writing cluster state took [{}ms]; wrote metadata for [{}] indices and skipped [{}] unchanged indices", + durationMillis, writer.getIndicesWritten(), writer.getIndicesSkipped()); + } } private void writeManifest(AtomicClusterStateWriter writer, Manifest manifest) throws WriteStateException { @@ -256,6 +291,9 @@ class IncrementalClusterStateWriter { private final MetaStateService metaStateService; private boolean finished; + private int indicesWritten; + private int indicesSkipped; + AtomicClusterStateWriter(MetaStateService metaStateService, Manifest previousManifest) { this.metaStateService = metaStateService; assert previousManifest != null; @@ -320,6 +358,22 @@ class IncrementalClusterStateWriter { rollbackCleanupActions.forEach(Runnable::run); finished = true; } + + void incrementIndicesWritten() { + indicesWritten++; + } + + void incrementIndicesSkipped() { + indicesSkipped++; + } + + int getIndicesWritten() { + return indicesWritten; + } + + int getIndicesSkipped() { + return indicesSkipped; + } } static class KeepPreviousGeneration implements IndexMetaDataAction { @@ -338,6 +392,7 @@ class IncrementalClusterStateWriter { @Override public long execute(AtomicClusterStateWriter writer) { + writer.incrementIndicesSkipped(); return generation; } } @@ -356,6 +411,7 @@ class IncrementalClusterStateWriter { @Override public long execute(AtomicClusterStateWriter writer) throws WriteStateException { + writer.incrementIndicesWritten(); return writer.writeIndex("freshly created", indexMetaData); } } @@ -376,6 +432,7 @@ class IncrementalClusterStateWriter { @Override public long execute(AtomicClusterStateWriter writer) throws WriteStateException { + writer.incrementIndicesWritten(); return writer.writeIndex( "version changed from [" + oldIndexMetaData.getVersion() + "] to [" + newIndexMetaData.getVersion() + "]", newIndexMetaData); diff --git a/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java b/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java index b41a24bb820..d5a03dee70e 100644 --- a/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java @@ -18,26 +18,35 @@ */ package org.elasticsearch.gateway; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.store.Directory; import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.mockito.ArgumentCaptor; import java.io.IOException; @@ -48,15 +57,18 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThan; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class IncrementalClusterStateWriterTests extends ESAllocationTestCase { @@ -250,13 +262,19 @@ public class IncrementalClusterStateWriterTests extends ESAllocationTestCase { assertThat(actions, hasSize(3)); + boolean keptPreviousGeneration = false; + boolean wroteNewIndex = false; + boolean wroteChangedIndex = false; + for (IncrementalClusterStateWriter.IndexMetaDataAction action : actions) { if (action instanceof IncrementalClusterStateWriter.KeepPreviousGeneration) { assertThat(action.getIndex(), equalTo(notChangedIndex.getIndex())); IncrementalClusterStateWriter.AtomicClusterStateWriter writer = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); assertThat(action.execute(writer), equalTo(3L)); - verifyZeroInteractions(writer); + verify(writer, times(1)).incrementIndicesSkipped(); + verifyNoMoreInteractions(writer); + keptPreviousGeneration = true; } if (action instanceof IncrementalClusterStateWriter.WriteNewIndexMetaData) { assertThat(action.getIndex(), equalTo(newIndex.getIndex())); @@ -264,6 +282,8 @@ public class IncrementalClusterStateWriterTests extends ESAllocationTestCase { = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); when(writer.writeIndex("freshly created", newIndex)).thenReturn(0L); assertThat(action.execute(writer), equalTo(0L)); + verify(writer, times(1)).incrementIndicesWritten(); + wroteNewIndex = true; } if (action instanceof IncrementalClusterStateWriter.WriteChangedIndexMetaData) { assertThat(action.getIndex(), equalTo(newVersionChangedIndex.getIndex())); @@ -273,10 +293,16 @@ public class IncrementalClusterStateWriterTests extends ESAllocationTestCase { assertThat(action.execute(writer), equalTo(3L)); ArgumentCaptor reason = ArgumentCaptor.forClass(String.class); verify(writer).writeIndex(reason.capture(), eq(newVersionChangedIndex)); + verify(writer, times(1)).incrementIndicesWritten(); assertThat(reason.getValue(), containsString(Long.toString(versionChangedIndex.getVersion()))); assertThat(reason.getValue(), containsString(Long.toString(newVersionChangedIndex.getVersion()))); + wroteChangedIndex = true; } } + + assertTrue(keptPreviousGeneration); + assertTrue(wroteNewIndex); + assertTrue(wroteChangedIndex); } private static class MetaStateServiceWithFailures extends MetaStateService { @@ -426,4 +452,84 @@ public class IncrementalClusterStateWriterTests extends ESAllocationTestCase { assertTrue(possibleMetaData.stream().anyMatch(md -> metaDataEquals(md, loadedMetaData))); } } + + @TestLogging(value = "org.elasticsearch.gateway:WARN", reason = "to ensure that we log gateway events on WARN level") + public void testSlowLogging() throws WriteStateException, IllegalAccessException { + final long slowWriteLoggingThresholdMillis; + final Settings settings; + if (randomBoolean()) { + slowWriteLoggingThresholdMillis = IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD.get(Settings.EMPTY).millis(); + settings = Settings.EMPTY; + } else { + slowWriteLoggingThresholdMillis = randomLongBetween(2, 100000); + settings = Settings.builder() + .put(IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD.getKey(), slowWriteLoggingThresholdMillis + "ms") + .build(); + } + + final DiscoveryNode localNode = newNode("node"); + final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId())).build(); + + final long startTimeMillis = randomLongBetween(0L, Long.MAX_VALUE - slowWriteLoggingThresholdMillis * 10); + final AtomicLong currentTime = new AtomicLong(startTimeMillis); + final AtomicLong writeDurationMillis = new AtomicLong(slowWriteLoggingThresholdMillis); + + final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + final IncrementalClusterStateWriter incrementalClusterStateWriter + = new IncrementalClusterStateWriter(settings, clusterSettings, mock(MetaStateService.class), + new Manifest(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), Collections.emptyMap()), + clusterState, () -> currentTime.getAndAdd(writeDurationMillis.get())); + + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.SeenEventExpectation( + "should see warning at threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "writing cluster state took [*] which is above the warn threshold of [*]; " + + "wrote metadata for [0] indices and skipped [0] unchanged indices")); + + writeDurationMillis.set(randomLongBetween(slowWriteLoggingThresholdMillis, slowWriteLoggingThresholdMillis * 2)); + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.SeenEventExpectation( + "should see warning above threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "writing cluster state took [*] which is above the warn threshold of [*]; " + + "wrote metadata for [0] indices and skipped [0] unchanged indices")); + + writeDurationMillis.set(randomLongBetween(1, slowWriteLoggingThresholdMillis - 1)); + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.UnseenEventExpectation( + "should not see warning below threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "*")); + + clusterSettings.applySettings(Settings.builder() + .put(IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD.getKey(), writeDurationMillis.get() + "ms") + .build()); + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.SeenEventExpectation( + "should see warning at reduced threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "writing cluster state took [*] which is above the warn threshold of [*]; " + + "wrote metadata for [0] indices and skipped [0] unchanged indices")); + + assertThat(currentTime.get(), lessThan(startTimeMillis + 10 * slowWriteLoggingThresholdMillis)); // ensure no overflow + } + + private void assertExpectedLogs(ClusterState clusterState, IncrementalClusterStateWriter incrementalClusterStateWriter, + MockLogAppender.LoggingExpectation expectation) throws IllegalAccessException, WriteStateException { + MockLogAppender mockAppender = new MockLogAppender(); + mockAppender.start(); + mockAppender.addExpectation(expectation); + Logger classLogger = LogManager.getLogger(IncrementalClusterStateWriter.class); + Loggers.addAppender(classLogger, mockAppender); + + try { + incrementalClusterStateWriter.updateClusterState(clusterState, clusterState); + } finally { + Loggers.removeAppender(classLogger, mockAppender); + mockAppender.stop(); + } + mockAppender.assertAllExpectationsMatched(); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java index b66b5ea3ee2..b73a90b4284 100644 --- a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java +++ b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java @@ -23,12 +23,17 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.MetaDataUpgrader; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + /** * {@link GatewayMetaState} constructor accepts a lot of arguments. * It's not always easy / convenient to construct these dependencies. @@ -55,6 +60,12 @@ public class MockGatewayMetaState extends GatewayMetaState { } public void start(Settings settings, NodeEnvironment nodeEnvironment, NamedXContentRegistry xContentRegistry) { - start(settings, null, null, new MetaStateService(nodeEnvironment, xContentRegistry), null, null); + final TransportService transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(mock(ThreadPool.class)); + final ClusterService clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()) + .thenReturn(new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + start(settings, transportService, clusterService, new MetaStateService(nodeEnvironment, xContentRegistry), + null, null); } } From 429f23ea2f25f05a5c1a4ab54f0a4c45a7d723ff Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 26 Sep 2019 08:55:28 +0200 Subject: [PATCH 73/94] Allow ingest processors to execute in a non blocking manner. (#47122) Backport of #46241 This PR changes the ingest executing to be non blocking by adding an additional method to the Processor interface that accepts a BiConsumer as handler and changing IngestService#executeBulkRequest(...) to ingest document in a non blocking fashion iff a processor executes in a non blocking fashion. This is the second PR that merges changes made to server module from the enrich branch (see #32789) into the master branch. The plan is to merge changes made to the server module separately from the pr that will merge enrich into master, so that these changes can be reviewed in isolation. This change originates from the enrich branch and was introduced there in #43361. --- .../ingest/common/ForEachProcessor.java | 53 +++-- .../ingest/common/ForEachProcessorTests.java | 27 +-- .../action/bulk/TransportBulkAction.java | 81 +++++-- .../ingest/SimulateExecutionService.java | 50 ++-- .../ingest/CompoundProcessor.java | 113 +++++---- .../ingest/ConditionalProcessor.java | 30 ++- .../elasticsearch/ingest/IngestDocument.java | 22 +- .../elasticsearch/ingest/IngestService.java | 111 ++++++--- .../org/elasticsearch/ingest/Pipeline.java | 20 +- .../ingest/PipelineProcessor.java | 16 +- .../org/elasticsearch/ingest/Processor.java | 16 ++ .../ingest/TrackingResultProcessor.java | 103 +++++---- .../action/bulk/BulkRequestModifierTests.java | 4 +- .../bulk/TransportBulkActionIngestTests.java | 52 +++-- .../action/ingest/AsyncIngestProcessorIT.java | 160 +++++++++++++ .../ingest/SimulateExecutionServiceTests.java | 99 ++++++-- .../ingest/CompoundProcessorTests.java | 32 +-- .../ingest/ConditionalProcessorTests.java | 14 +- .../ingest/IngestServiceTests.java | 218 +++++++++++------- .../ingest/PipelineProcessorTests.java | 26 +-- .../ingest/TrackingResultProcessorTests.java | 31 ++- 21 files changed, 869 insertions(+), 409 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java index d56a2731d35..681b167c828 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java @@ -28,6 +28,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.BiConsumer; import org.elasticsearch.ingest.WrappingProcessor; import org.elasticsearch.script.ScriptService; @@ -65,29 +67,46 @@ public final class ForEachProcessor extends AbstractProcessor implements Wrappin } @Override - public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + public void execute(IngestDocument ingestDocument, BiConsumer handler) { List values = ingestDocument.getFieldValue(field, List.class, ignoreMissing); if (values == null) { if (ignoreMissing) { - return ingestDocument; + handler.accept(ingestDocument, null); + } else { + handler.accept(null, new IllegalArgumentException("field [" + field + "] is null, cannot loop over its elements.")); } - throw new IllegalArgumentException("field [" + field + "] is null, cannot loop over its elements."); + } else { + List newValues = new CopyOnWriteArrayList<>(); + innerExecute(0, values, newValues, ingestDocument, handler); } - List newValues = new ArrayList<>(values.size()); - IngestDocument document = ingestDocument; - for (Object value : values) { - Object previousValue = ingestDocument.getIngestMetadata().put("_value", value); - try { - document = processor.execute(document); - if (document == null) { - return null; - } - } finally { - newValues.add(ingestDocument.getIngestMetadata().put("_value", previousValue)); + } + + void innerExecute(int index, List values, List newValues, IngestDocument document, + BiConsumer handler) { + if (index == values.size()) { + document.setFieldValue(field, new ArrayList<>(newValues)); + handler.accept(document, null); + return; + } + + Object value = values.get(index); + Object previousValue = document.getIngestMetadata().put("_value", value); + processor.execute(document, (result, e) -> { + if (e != null) { + newValues.add(document.getIngestMetadata().put("_value", previousValue)); + handler.accept(null, e); + } else if (result == null) { + handler.accept(null, null); + } else { + newValues.add(document.getIngestMetadata().put("_value", previousValue)); + innerExecute(index + 1, values, newValues, document, handler); } - } - document.setFieldValue(field, newValues); - return document; + }); + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + throw new UnsupportedOperationException("this method should not get executed"); } @Override diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 282994d8eb3..a4ee786315c 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -53,7 +53,7 @@ public class ForEachProcessorTests extends ESTestCase { "_tag", "values", new UppercaseProcessor("_tag", "_ingest._value", false, "_ingest._value"), false ); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); @SuppressWarnings("unchecked") List result = ingestDocument.getFieldValue("values", List.class); @@ -73,12 +73,9 @@ public class ForEachProcessorTests extends ESTestCase { } }); ForEachProcessor processor = new ForEachProcessor("_tag", "values", testProcessor, false); - try { - processor.execute(ingestDocument); - fail("exception expected"); - } catch (RuntimeException e) { - assertThat(e.getMessage(), equalTo("failure")); - } + Exception[] exceptions = new Exception[1]; + processor.execute(ingestDocument, (result, e) -> {exceptions[0] = e;}); + assertThat(exceptions[0].getMessage(), equalTo("failure")); assertThat(testProcessor.getInvokedCounter(), equalTo(3)); assertThat(ingestDocument.getFieldValue("values", List.class), equalTo(Arrays.asList("a", "b", "c"))); @@ -95,7 +92,7 @@ public class ForEachProcessorTests extends ESTestCase { "_tag", "values", new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)), false ); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertThat(testProcessor.getInvokedCounter(), equalTo(3)); assertThat(ingestDocument.getFieldValue("values", List.class), equalTo(Arrays.asList("A", "B", "c"))); } @@ -114,7 +111,7 @@ public class ForEachProcessorTests extends ESTestCase { id.setFieldValue("_ingest._value.id", id.getSourceAndMetadata().get("_id")); }); ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor, false); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertThat(innerProcessor.getInvokedCounter(), equalTo(2)); assertThat(ingestDocument.getFieldValue("values.0.index", String.class), equalTo("_index")); @@ -142,7 +139,7 @@ public class ForEachProcessorTests extends ESTestCase { "_tag", "values", new SetProcessor("_tag", new TestTemplateService.MockTemplateScript.Factory("_ingest._value.new_field"), (model) -> model.get("other")), false); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertThat(ingestDocument.getFieldValue("values.0.new_field", String.class), equalTo("value")); assertThat(ingestDocument.getFieldValue("values.1.new_field", String.class), equalTo("value")); @@ -180,7 +177,7 @@ public class ForEachProcessorTests extends ESTestCase { ); ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor, false); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); @SuppressWarnings("unchecked") List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.size(), equalTo(numValues)); @@ -205,7 +202,7 @@ public class ForEachProcessorTests extends ESTestCase { Collections.singletonList(new UppercaseProcessor("_tag_upper", "_ingest._value", false, "_ingest._value")), Collections.singletonList(new AppendProcessor("_tag", template, (model) -> (Collections.singletonList("added")))) ), false); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.get(0), equalTo("STRING")); @@ -231,7 +228,7 @@ public class ForEachProcessorTests extends ESTestCase { TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_source._value", String.class))); ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor, false); - forEachProcessor.execute(ingestDocument); + forEachProcessor.execute(ingestDocument, (result, e) -> {}); List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.get(0), equalTo("new_value")); @@ -264,7 +261,7 @@ public class ForEachProcessorTests extends ESTestCase { ); ForEachProcessor processor = new ForEachProcessor( "_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor, false), false); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); List result = ingestDocument.getFieldValue("values1.0.values2", List.class); assertThat(result.get(0), equalTo("ABC")); @@ -282,7 +279,7 @@ public class ForEachProcessorTests extends ESTestCase { IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); TestProcessor testProcessor = new TestProcessor(doc -> {}); ForEachProcessor processor = new ForEachProcessor("_tag", "_ingest._value", testProcessor, true); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertIngestDocument(originalIngestDocument, ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(0)); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 0745e3b8d4d..6917b355220 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -19,6 +19,8 @@ package org.elasticsearch.action.bulk; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.SparseFixedBitSet; import org.elasticsearch.Assertions; @@ -57,6 +59,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -82,6 +85,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicIntegerArray; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -648,14 +652,13 @@ public class TransportBulkAction extends HandledTransportAction listener) { - long ingestStartTimeInNanos = System.nanoTime(); - BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); - ingestService.executeBulkRequest(() -> bulkRequestModifier, - (indexRequest, exception) -> { - logger.debug(() -> new ParameterizedMessage("failed to execute pipeline [{}] for document [{}/{}/{}]", - indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id()), exception); - bulkRequestModifier.markCurrentItemAsFailed(exception); - }, (exception) -> { + final long ingestStartTimeInNanos = System.nanoTime(); + final BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); + ingestService.executeBulkRequest( + original.numberOfActions(), + () -> bulkRequestModifier, + bulkRequestModifier::markItemAsFailed, + (originalThread, exception) -> { if (exception != null) { logger.error("failed to execute pipeline for a bulk request", exception); listener.onFailure(exception); @@ -670,26 +673,56 @@ public class TransportBulkAction extends HandledTransportAction bulkRequestModifier.markCurrentItemAsDropped()); + bulkRequestModifier::markItemAsDropped + ); } static final class BulkRequestModifier implements Iterator> { + private static final Logger LOGGER = LogManager.getLogger(BulkRequestModifier.class); + final BulkRequest bulkRequest; final SparseFixedBitSet failedSlots; final List itemResponses; + final AtomicIntegerArray originalSlots; - int currentSlot = -1; - int[] originalSlots; + volatile int currentSlot = -1; BulkRequestModifier(BulkRequest bulkRequest) { this.bulkRequest = bulkRequest; this.failedSlots = new SparseFixedBitSet(bulkRequest.requests().size()); this.itemResponses = new ArrayList<>(bulkRequest.requests().size()); + this.originalSlots = new AtomicIntegerArray(bulkRequest.requests().size()); // oversize, but that's ok } @Override @@ -713,12 +746,11 @@ public class TransportBulkAction extends HandledTransportAction> requests = bulkRequest.requests(); - originalSlots = new int[requests.size()]; // oversize, but that's ok for (int i = 0; i < requests.size(); i++) { DocWriteRequest request = requests.get(i); if (failedSlots.get(i) == false) { modifiedBulkRequest.add(request); - originalSlots[slot++] = i; + originalSlots.set(slot++, i); } } return modifiedBulkRequest; @@ -733,7 +765,7 @@ public class TransportBulkAction extends HandledTransportAction { BulkItemResponse[] items = response.getItems(); for (int i = 0; i < items.length; i++) { - itemResponses.add(originalSlots[i], response.getItems()[i]); + itemResponses.add(originalSlots.get(i), response.getItems()[i]); } delegatedListener.onResponse( new BulkResponse( @@ -742,12 +774,12 @@ public class TransportBulkAction extends HandledTransportAction new ParameterizedMessage("failed to execute pipeline [{}] for document [{}/{}/{}]", + indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id()), e); + // We hit a error during preprocessing a request, so we: // 1) Remember the request item slot from the bulk, so that we're done processing all requests we know what failed // 2) Add a bulk item failure for this request // 3) Continue with the next request in the bulk. - failedSlots.set(currentSlot); + failedSlots.set(slot); BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexRequest.index(), indexRequest.type(), indexRequest.id(), e); - itemResponses.add(new BulkItemResponse(currentSlot, indexRequest.opType(), failure)); + itemResponses.add(new BulkItemResponse(slot, indexRequest.opType(), failure)); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index 8ed2ffa5ba0..070e99cc5c7 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -26,8 +26,10 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Pipeline; import org.elasticsearch.threadpool.ThreadPool; -import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiConsumer; import static org.elasticsearch.ingest.TrackingResultProcessor.decorate; @@ -41,38 +43,42 @@ class SimulateExecutionService { this.threadPool = threadPool; } - SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) { + void executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose, + BiConsumer handler) { if (verbose) { - List processorResultList = new ArrayList<>(); + List processorResultList = new CopyOnWriteArrayList<>(); CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList); - try { - Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), - verbosePipelineProcessor); - ingestDocument.executePipeline(verbosePipeline); - return new SimulateDocumentVerboseResult(processorResultList); - } catch (Exception e) { - return new SimulateDocumentVerboseResult(processorResultList); - } + Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), + verbosePipelineProcessor); + ingestDocument.executePipeline(verbosePipeline, (result, e) -> { + handler.accept(new SimulateDocumentVerboseResult(processorResultList), e); + }); } else { - try { - IngestDocument result = pipeline.execute(ingestDocument); - return new SimulateDocumentBaseResult(result); - } catch (Exception e) { - return new SimulateDocumentBaseResult(e); - } + pipeline.execute(ingestDocument, (result, e) -> { + if (e == null) { + handler.accept(new SimulateDocumentBaseResult(result), null); + } else { + handler.accept(new SimulateDocumentBaseResult(e), null); + } + }); } } public void execute(SimulatePipelineRequest.Parsed request, ActionListener listener) { threadPool.executor(THREAD_POOL_NAME).execute(ActionRunnable.wrap(listener, l -> { - List responses = new ArrayList<>(); - for (IngestDocument ingestDocument : request.getDocuments()) { - SimulateDocumentResult response = executeDocument(request.getPipeline(), ingestDocument, request.isVerbose()); + final AtomicInteger counter = new AtomicInteger(); + final List responses = new CopyOnWriteArrayList<>(); + for (IngestDocument ingestDocument : request.getDocuments()) { + executeDocument(request.getPipeline(), ingestDocument, request.isVerbose(), (response, e) -> { if (response != null) { responses.add(response); } - } - l.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); + if (counter.incrementAndGet() == request.getDocuments().size()) { + l.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), + request.isVerbose(), responses)); + } + }); + } })); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index a095d7647d9..cf75ead3735 100644 --- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -28,6 +28,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -114,58 +115,78 @@ public class CompoundProcessor implements Processor { @Override public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - for (Tuple processorWithMetric : processorsWithMetrics) { - Processor processor = processorWithMetric.v1(); - IngestMetric metric = processorWithMetric.v2(); - long startTimeInNanos = relativeTimeProvider.getAsLong(); - try { - metric.preIngest(); - if (processor.execute(ingestDocument) == null) { - return null; - } - } catch (Exception e) { - metric.ingestFailed(); - if (ignoreFailure) { - continue; - } - - ElasticsearchException compoundProcessorException = - newCompoundProcessorException(e, processor.getType(), processor.getTag()); - if (onFailureProcessors.isEmpty()) { - throw compoundProcessorException; - } else { - if (executeOnFailure(ingestDocument, compoundProcessorException) == false) { - return null; - } - break; - } - } finally { - long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); - metric.postIngest(ingestTimeInMillis); - } - } - return ingestDocument; + throw new UnsupportedOperationException("this method should not get executed"); } - /** - * @return true if execution should continue, false if document is dropped. - */ - boolean executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { - try { - putFailureMetadata(ingestDocument, exception); - for (Processor processor : onFailureProcessors) { - try { - if (processor.execute(ingestDocument) == null) { - return false; + @Override + public void execute(IngestDocument ingestDocument, BiConsumer handler) { + innerExecute(0, ingestDocument, handler); + } + + void innerExecute(int currentProcessor, IngestDocument ingestDocument, BiConsumer handler) { + if (currentProcessor == processorsWithMetrics.size()) { + handler.accept(ingestDocument, null); + return; + } + + Tuple processorWithMetric = processorsWithMetrics.get(currentProcessor); + final Processor processor = processorWithMetric.v1(); + final IngestMetric metric = processorWithMetric.v2(); + final long startTimeInNanos = relativeTimeProvider.getAsLong(); + metric.preIngest(); + processor.execute(ingestDocument, (result, e) -> { + long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); + metric.postIngest(ingestTimeInMillis); + + if (e != null) { + metric.ingestFailed(); + if (ignoreFailure) { + innerExecute(currentProcessor + 1, ingestDocument, handler); + } else { + ElasticsearchException compoundProcessorException = + newCompoundProcessorException(e, processor.getType(), processor.getTag()); + if (onFailureProcessors.isEmpty()) { + handler.accept(null, compoundProcessorException); + } else { + executeOnFailureAsync(0, ingestDocument, compoundProcessorException, handler); } - } catch (Exception e) { - throw newCompoundProcessorException(e, processor.getType(), processor.getTag()); + } + } else { + if (result != null) { + innerExecute(currentProcessor + 1, result, handler); + } else { + handler.accept(null, null); } } - } finally { - removeFailureMetadata(ingestDocument); + }); + } + + void executeOnFailureAsync(int currentOnFailureProcessor, IngestDocument ingestDocument, ElasticsearchException exception, + BiConsumer handler) { + if (currentOnFailureProcessor == 0) { + putFailureMetadata(ingestDocument, exception); } - return true; + + if (currentOnFailureProcessor == onFailureProcessors.size()) { + removeFailureMetadata(ingestDocument); + handler.accept(ingestDocument, null); + return; + } + + final Processor onFailureProcessor = onFailureProcessors.get(currentOnFailureProcessor); + onFailureProcessor.execute(ingestDocument, (result, e) -> { + if (e != null) { + removeFailureMetadata(ingestDocument); + handler.accept(null, newCompoundProcessorException(e, onFailureProcessor.getType(), onFailureProcessor.getTag())); + return; + } + if (result == null) { + removeFailureMetadata(ingestDocument); + handler.accept(null, null); + return; + } + executeOnFailureAsync(currentOnFailureProcessor + 1, ingestDocument, exception, handler); + }); } private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) { diff --git a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java index 05ba14468bb..e6f92a87178 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConditionalProcessor.java @@ -30,6 +30,7 @@ import java.util.ListIterator; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -74,21 +75,28 @@ public class ConditionalProcessor extends AbstractProcessor implements WrappingP } @Override - public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + public void execute(IngestDocument ingestDocument, BiConsumer handler) { if (evaluate(ingestDocument)) { - long startTimeInNanos = relativeTimeProvider.getAsLong(); - try { - metric.preIngest(); - return processor.execute(ingestDocument); - } catch (Exception e) { - metric.ingestFailed(); - throw e; - } finally { + final long startTimeInNanos = relativeTimeProvider.getAsLong(); + metric.preIngest(); + processor.execute(ingestDocument, (result, e) -> { long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); metric.postIngest(ingestTimeInMillis); - } + if (e != null) { + metric.ingestFailed(); + handler.accept(null, e); + } else { + handler.accept(result, null); + } + }); + } else { + handler.accept(ingestDocument, null); } - return ingestDocument; + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + throw new UnsupportedOperationException("this method should not get executed"); } boolean evaluate(IngestDocument ingestDocument) { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 90ebc8e0741..34299cb475a 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -43,6 +43,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.BiConsumer; /** * Represents a single document being captured before indexing and holds the source and metadata (like id, type and index). @@ -641,17 +642,18 @@ public final class IngestDocument { /** * Executes the given pipeline with for this document unless the pipeline has already been executed * for this document. - * @param pipeline Pipeline to execute - * @throws Exception On exception in pipeline execution + * + * @param pipeline the pipeline to execute + * @param handler handles the result or failure */ - public IngestDocument executePipeline(Pipeline pipeline) throws Exception { - try { - if (this.executedPipelines.add(pipeline) == false) { - throw new IllegalStateException("Cycle detected for pipeline: " + pipeline.getId()); - } - return pipeline.execute(this); - } finally { - executedPipelines.remove(pipeline); + public void executePipeline(Pipeline pipeline, BiConsumer handler) { + if (executedPipelines.add(pipeline)) { + pipeline.execute(this, (result, e) -> { + executedPipelines.remove(pipeline); + handler.accept(result, e); + }); + } else { + handler.accept(null, new IllegalStateException("Cycle detected for pipeline: " + pipeline.getId())); } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 707760432af..2ba5214f80f 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -63,8 +63,10 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.IntConsumer; /** * Holder class for several ingest related services. @@ -329,42 +331,72 @@ public class IngestService implements ClusterStateApplier { ExceptionsHelper.rethrowAndSuppress(exceptions); } - public void executeBulkRequest(Iterable> actionRequests, - BiConsumer itemFailureHandler, Consumer completionHandler, - Consumer itemDroppedHandler) { + public void executeBulkRequest(int numberOfActionRequests, + Iterable> actionRequests, + BiConsumer itemFailureHandler, + BiConsumer completionHandler, + IntConsumer itemDroppedHandler) { threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { - completionHandler.accept(e); + completionHandler.accept(null, e); } @Override protected void doRun() { + final Thread originalThread = Thread.currentThread(); + final AtomicInteger counter = new AtomicInteger(numberOfActionRequests); + int i = 0; for (DocWriteRequest actionRequest : actionRequests) { IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(actionRequest); if (indexRequest == null) { + if (counter.decrementAndGet() == 0){ + completionHandler.accept(originalThread, null); + } + assert counter.get() >= 0; continue; } String pipelineId = indexRequest.getPipeline(); - if (NOOP_PIPELINE_NAME.equals(pipelineId) == false) { - try { - PipelineHolder holder = pipelines.get(pipelineId); - if (holder == null) { - throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist"); - } - Pipeline pipeline = holder.pipeline; - innerExecute(indexRequest, pipeline, itemDroppedHandler); - //this shouldn't be needed here but we do it for consistency with index api - // which requires it to prevent double execution - indexRequest.setPipeline(NOOP_PIPELINE_NAME); - } catch (Exception e) { - itemFailureHandler.accept(indexRequest, e); + if (NOOP_PIPELINE_NAME.equals(pipelineId)) { + if (counter.decrementAndGet() == 0){ + completionHandler.accept(originalThread, null); } + assert counter.get() >= 0; + continue; } + + final int slot = i; + try { + PipelineHolder holder = pipelines.get(pipelineId); + if (holder == null) { + throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist"); + } + Pipeline pipeline = holder.pipeline; + innerExecute(slot, indexRequest, pipeline, itemDroppedHandler, e -> { + if (e == null) { + // this shouldn't be needed here but we do it for consistency with index api + // which requires it to prevent double execution + indexRequest.setPipeline(NOOP_PIPELINE_NAME); + } else { + itemFailureHandler.accept(slot, e); + } + + if (counter.decrementAndGet() == 0){ + completionHandler.accept(originalThread, null); + } + assert counter.get() >= 0; + }); + } catch (Exception e) { + itemFailureHandler.accept(slot, e); + if (counter.decrementAndGet() == 0){ + completionHandler.accept(originalThread, null); + } + assert counter.get() >= 0; + } + i++; } - completionHandler.accept(null); } }); } @@ -420,26 +452,34 @@ public class IngestService implements ClusterStateApplier { return sb.toString(); } - private void innerExecute(IndexRequest indexRequest, Pipeline pipeline, Consumer itemDroppedHandler) throws Exception { + private void innerExecute(int slot, IndexRequest indexRequest, Pipeline pipeline, IntConsumer itemDroppedHandler, + Consumer handler) { if (pipeline.getProcessors().isEmpty()) { + handler.accept(null); return; } long startTimeInNanos = System.nanoTime(); // the pipeline specific stat holder may not exist and that is fine: // (e.g. the pipeline may have been removed while we're ingesting a document - try { - totalMetrics.preIngest(); - String index = indexRequest.index(); - String type = indexRequest.type(); - String id = indexRequest.id(); - String routing = indexRequest.routing(); - Long version = indexRequest.version(); - VersionType versionType = indexRequest.versionType(); - Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, version, versionType, sourceAsMap); - if (pipeline.execute(ingestDocument) == null) { - itemDroppedHandler.accept(indexRequest); + totalMetrics.preIngest(); + String index = indexRequest.index(); + String type = indexRequest.type(); + String id = indexRequest.id(); + String routing = indexRequest.routing(); + Long version = indexRequest.version(); + VersionType versionType = indexRequest.versionType(); + Map sourceAsMap = indexRequest.sourceAsMap(); + IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, version, versionType, sourceAsMap); + pipeline.execute(ingestDocument, (result, e) -> { + long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeInNanos); + totalMetrics.postIngest(ingestTimeInMillis); + if (e != null) { + totalMetrics.ingestFailed(); + handler.accept(e); + } else if (result == null) { + itemDroppedHandler.accept(slot); + handler.accept(null); } else { Map metadataMap = ingestDocument.extractMetadata(); //it's fine to set all metadata fields all the time, as ingest document holds their starting values @@ -453,14 +493,9 @@ public class IngestService implements ClusterStateApplier { indexRequest.versionType(VersionType.fromString((String) metadataMap.get(IngestDocument.MetaData.VERSION_TYPE))); } indexRequest.source(ingestDocument.getSourceAndMetadata(), indexRequest.getContentType()); + handler.accept(null); } - } catch (Exception e) { - totalMetrics.ingestFailed(); - throw e; - } finally { - long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeInNanos); - totalMetrics.postIngest(ingestTimeInMillis); - } + }); } @Override diff --git a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java index 21871338322..3d41d991f3e 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -27,6 +27,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; import java.util.function.LongSupplier; import org.elasticsearch.script.ScriptService; @@ -93,18 +94,17 @@ public final class Pipeline { * If null is returned then this document will be dropped and not indexed, otherwise * this document will be kept and indexed. */ - public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - long startTimeInNanos = relativeTimeProvider.getAsLong(); - try { - metrics.preIngest(); - return compoundProcessor.execute(ingestDocument); - } catch (Exception e) { - metrics.ingestFailed(); - throw e; - } finally { + public void execute(IngestDocument ingestDocument, BiConsumer handler) { + final long startTimeInNanos = relativeTimeProvider.getAsLong(); + metrics.preIngest(); + compoundProcessor.execute(ingestDocument, (result, e) -> { long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeInNanos); metrics.postIngest(ingestTimeInMillis); - } + if (e != null) { + metrics.ingestFailed(); + } + handler.accept(result, e); + }); } /** diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java b/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java index b5794a3f768..f5e37a1c123 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineProcessor.java @@ -20,6 +20,7 @@ package org.elasticsearch.ingest; import java.util.Map; +import java.util.function.BiConsumer; public class PipelineProcessor extends AbstractProcessor { @@ -36,12 +37,19 @@ public class PipelineProcessor extends AbstractProcessor { } @Override - public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + public void execute(IngestDocument ingestDocument, BiConsumer handler) { Pipeline pipeline = ingestService.getPipeline(pipelineName); - if (pipeline == null) { - throw new IllegalStateException("Pipeline processor configured for non-existent pipeline [" + pipelineName + ']'); + if (pipeline != null) { + ingestDocument.executePipeline(pipeline, handler); + } else { + handler.accept(null, + new IllegalStateException("Pipeline processor configured for non-existent pipeline [" + pipelineName + ']')); } - return ingestDocument.executePipeline(pipeline); + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + throw new UnsupportedOperationException("this method should not get executed"); } Pipeline getPipeline(){ diff --git a/server/src/main/java/org/elasticsearch/ingest/Processor.java b/server/src/main/java/org/elasticsearch/ingest/Processor.java index 10bd530e3c1..029e80234e9 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Processor.java +++ b/server/src/main/java/org/elasticsearch/ingest/Processor.java @@ -27,6 +27,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.Scheduler; import java.util.Map; +import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.LongSupplier; @@ -38,6 +39,21 @@ import java.util.function.LongSupplier; */ public interface Processor { + /** + * Introspect and potentially modify the incoming data. + * + * Expert method: only override this method if a processor implementation needs to make an asynchronous call, + * otherwise just overwrite {@link #execute(IngestDocument)}. + */ + default void execute(IngestDocument ingestDocument, BiConsumer handler) { + try { + IngestDocument result = execute(ingestDocument); + handler.accept(result, null); + } catch (Exception e) { + handler.accept(null, e); + } + } + /** * Introspect and potentially modify the incoming data. * diff --git a/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java index 39ae9edcb2e..e9d4ea6b2ad 100644 --- a/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/TrackingResultProcessor.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.ingest.SimulateProcessorResult; import java.util.ArrayList; import java.util.List; +import java.util.function.BiConsumer; /** * Processor to be used within Simulate API to keep track of processors executed in pipeline. @@ -41,56 +42,76 @@ public final class TrackingResultProcessor implements Processor { } @Override - public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - Processor processor = actualProcessor; - try { - if (processor instanceof ConditionalProcessor) { - ConditionalProcessor conditionalProcessor = (ConditionalProcessor) processor; - if (conditionalProcessor.evaluate(ingestDocument) == false) { - return ingestDocument; - } - if (conditionalProcessor.getInnerProcessor() instanceof PipelineProcessor) { - processor = conditionalProcessor.getInnerProcessor(); - } - } - if (processor instanceof PipelineProcessor) { - PipelineProcessor pipelineProcessor = ((PipelineProcessor) processor); - Pipeline pipeline = pipelineProcessor.getPipeline(); - //runtime check for cycles against a copy of the document. This is needed to properly handle conditionals around pipelines - try { - IngestDocument ingestDocumentCopy = new IngestDocument(ingestDocument); - ingestDocumentCopy.executePipeline(pipelineProcessor.getPipeline()); - } catch (ElasticsearchException elasticsearchException) { - if (elasticsearchException.getCause().getCause() instanceof IllegalStateException) { - throw elasticsearchException; - } + public void execute(IngestDocument ingestDocument, BiConsumer handler) { + if (actualProcessor instanceof PipelineProcessor) { + PipelineProcessor pipelineProcessor = ((PipelineProcessor) actualProcessor); + Pipeline pipeline = pipelineProcessor.getPipeline(); + //runtime check for cycles against a copy of the document. This is needed to properly handle conditionals around pipelines + IngestDocument ingestDocumentCopy = new IngestDocument(ingestDocument); + ingestDocumentCopy.executePipeline(pipelineProcessor.getPipeline(), (result, e) -> { + // do nothing, let the tracking processors throw the exception while recording the path up to the failure + if (e instanceof ElasticsearchException) { + ElasticsearchException elasticsearchException = (ElasticsearchException) e; //else do nothing, let the tracking processors throw the exception while recording the path up to the failure - } catch (Exception e) { - // do nothing, let the tracking processors throw the exception while recording the path up to the failure + if (elasticsearchException.getCause().getCause() instanceof IllegalStateException) { + if (ignoreFailure) { + processorResultList.add(new SimulateProcessorResult(pipelineProcessor.getTag(), + new IngestDocument(ingestDocument), e)); + } else { + processorResultList.add(new SimulateProcessorResult(pipelineProcessor.getTag(), e)); + } + handler.accept(null, elasticsearchException); + } + } else { + //now that we know that there are no cycles between pipelines, decorate the processors for this pipeline and execute it + CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList); + Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), + verbosePipelineProcessor); + ingestDocument.executePipeline(verbosePipeline, handler); } - //now that we know that there are no cycles between pipelines, decorate the processors for this pipeline and execute it - CompoundProcessor verbosePipelineProcessor = decorate(pipeline.getCompoundProcessor(), processorResultList); - Pipeline verbosePipeline = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getVersion(), - verbosePipelineProcessor); - ingestDocument.executePipeline(verbosePipeline); + }); + return; + } + + final Processor processor; + if (actualProcessor instanceof ConditionalProcessor) { + ConditionalProcessor conditionalProcessor = (ConditionalProcessor) actualProcessor; + if (conditionalProcessor.evaluate(ingestDocument) == false) { + handler.accept(ingestDocument, null); + return; + } + if (conditionalProcessor.getInnerProcessor() instanceof PipelineProcessor) { + processor = conditionalProcessor.getInnerProcessor(); + } else { + processor = actualProcessor; + } + } else { + processor = actualProcessor; + } + + processor.execute(ingestDocument, (result, e) -> { + if (e != null) { + if (ignoreFailure) { + processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument), e)); + } else { + processorResultList.add(new SimulateProcessorResult(processor.getTag(), e)); + } + handler.accept(null, e); } else { - IngestDocument result = processor.execute(ingestDocument); if (result != null) { processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument))); + handler.accept(result, null); } else { processorResultList.add(new SimulateProcessorResult(processor.getTag())); - return null; + handler.accept(null, null); } } - } catch (Exception e) { - if (ignoreFailure) { - processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument), e)); - } else { - processorResultList.add(new SimulateProcessorResult(processor.getTag(), e)); - } - throw e; - } - return ingestDocument; + }); + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + throw new UnsupportedOperationException(); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java index 82ed5182561..8e6f3e606a3 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java @@ -55,7 +55,7 @@ public class BulkRequestModifierTests extends ESTestCase { while (bulkRequestModifier.hasNext()) { bulkRequestModifier.next(); if (randomBoolean()) { - bulkRequestModifier.markCurrentItemAsFailed(new RuntimeException()); + bulkRequestModifier.markItemAsFailed(i, new RuntimeException()); failedSlots.add(i); } i++; @@ -93,7 +93,7 @@ public class BulkRequestModifierTests extends ESTestCase { for (int i = 0; modifier.hasNext(); i++) { modifier.next(); if (i % 2 == 0) { - modifier.markCurrentItemAsFailed(new RuntimeException()); + modifier.markItemAsFailed(i, new RuntimeException()); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 3f3e20d95d3..505c1d41ffc 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -67,11 +67,11 @@ import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; -import java.util.function.Consumer; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; @@ -93,6 +93,8 @@ public class TransportBulkActionIngestTests extends ESTestCase { private static final Settings SETTINGS = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build(); + private static final Thread DUMMY_WRITE_THREAD = new Thread(ThreadPool.Names.WRITE); + /** Services needed by bulk action */ TransportService transportService; ClusterService clusterService; @@ -101,9 +103,9 @@ public class TransportBulkActionIngestTests extends ESTestCase { /** Arguments to callbacks we want to capture, but which require generics, so we must use @Captor */ @Captor - ArgumentCaptor> failureHandler; + ArgumentCaptor> failureHandler; @Captor - ArgumentCaptor> completionHandler; + ArgumentCaptor> completionHandler; @Captor ArgumentCaptor> remoteResponseHandler; @Captor @@ -265,15 +267,16 @@ public class TransportBulkActionIngestTests extends ESTestCase { assertFalse(action.isExecuted); // haven't executed yet assertFalse(responseCalled.get()); assertFalse(failureCalled.get()); - verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); - completionHandler.getValue().accept(exception); + verify(ingestService).executeBulkRequest(eq(bulkRequest.numberOfActions()), bulkDocsItr.capture(), + failureHandler.capture(), completionHandler.capture(), any()); + completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); // now check success Iterator> req = bulkDocsItr.getValue().iterator(); - failureHandler.getValue().accept((IndexRequest)req.next(), exception); // have an exception for our one index request + failureHandler.getValue().accept(0, exception); // have an exception for our one index request indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing - completionHandler.getValue().accept(null); + completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); assertTrue(action.isExecuted); assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one verifyZeroInteractions(transportService); @@ -299,13 +302,14 @@ public class TransportBulkActionIngestTests extends ESTestCase { assertFalse(action.isExecuted); // haven't executed yet assertFalse(responseCalled.get()); assertFalse(failureCalled.get()); - verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); - completionHandler.getValue().accept(exception); + verify(ingestService).executeBulkRequest(eq(1), bulkDocsItr.capture(), failureHandler.capture(), + completionHandler.capture(), any()); + completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); // now check success indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing - completionHandler.getValue().accept(null); + completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); assertTrue(action.isExecuted); assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one verifyZeroInteractions(transportService); @@ -331,7 +335,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { action.execute(null, bulkRequest, listener); // should not have executed ingest locally - verify(ingestService, never()).executeBulkRequest(any(), any(), any(), any()); + verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); @@ -375,7 +379,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { singleItemBulkWriteAction.execute(null, indexRequest, listener); // should not have executed ingest locally - verify(ingestService, never()).executeBulkRequest(any(), any(), any(), any()); + verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); @@ -459,18 +463,19 @@ public class TransportBulkActionIngestTests extends ESTestCase { assertFalse(action.isExecuted); // haven't executed yet assertFalse(responseCalled.get()); assertFalse(failureCalled.get()); - verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); + verify(ingestService).executeBulkRequest(eq(bulkRequest.numberOfActions()), bulkDocsItr.capture(), + failureHandler.capture(), completionHandler.capture(), any()); assertEquals(indexRequest1.getPipeline(), "default_pipeline"); assertEquals(indexRequest2.getPipeline(), "default_pipeline"); assertEquals(indexRequest3.getPipeline(), "default_pipeline"); - completionHandler.getValue().accept(exception); + completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); // now check success of the transport bulk action indexRequest1.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing indexRequest3.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing - completionHandler.getValue().accept(null); + completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); assertTrue(action.isExecuted); assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one verifyZeroInteractions(transportService); @@ -497,14 +502,15 @@ public class TransportBulkActionIngestTests extends ESTestCase { assertFalse(action.indexCreated); // no index yet assertFalse(responseCalled.get()); assertFalse(failureCalled.get()); - verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); - completionHandler.getValue().accept(exception); + verify(ingestService).executeBulkRequest(eq(1), bulkDocsItr.capture(), failureHandler.capture(), + completionHandler.capture(), any()); + completionHandler.getValue().accept(null, exception); assertFalse(action.indexCreated); // still no index yet, the ingest node failed. assertTrue(failureCalled.get()); // now check success indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing - completionHandler.getValue().accept(null); + completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); assertTrue(action.isExecuted); assertTrue(action.indexCreated); // now the index is created since we skipped the ingest node path. assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one @@ -561,7 +567,8 @@ public class TransportBulkActionIngestTests extends ESTestCase { })); assertEquals("pipeline2", indexRequest.getPipeline()); - verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); + verify(ingestService).executeBulkRequest(eq(1), bulkDocsItr.capture(), failureHandler.capture(), + completionHandler.capture(), any()); } private void validateDefaultPipeline(IndexRequest indexRequest) { @@ -583,14 +590,15 @@ public class TransportBulkActionIngestTests extends ESTestCase { assertFalse(action.isExecuted); // haven't executed yet assertFalse(responseCalled.get()); assertFalse(failureCalled.get()); - verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); + verify(ingestService).executeBulkRequest(eq(1), bulkDocsItr.capture(), failureHandler.capture(), + completionHandler.capture(), any()); assertEquals(indexRequest.getPipeline(), "default_pipeline"); - completionHandler.getValue().accept(exception); + completionHandler.getValue().accept(null, exception); assertTrue(failureCalled.get()); // now check success indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing - completionHandler.getValue().accept(null); + completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); assertTrue(action.isExecuted); assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one verifyZeroInteractions(transportService); diff --git a/server/src/test/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java b/server/src/test/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java new file mode 100644 index 00000000000..1d398738db1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ingest/AsyncIngestProcessorIT.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.ingest; + +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.BiConsumer; + +import static org.hamcrest.Matchers.equalTo; + +/** + * The purpose of this test is to verify that when a processor executes an operation asynchronously that + * the expected result is the same as if the same operation happens synchronously. + * + * In this test two test processor are defined that basically do the same operation, but a single processor + * executes asynchronously. The result of the operation should be the same and also the order in which the + * bulk responses are returned should be the same as how the corresponding index requests were defined. + */ +public class AsyncIngestProcessorIT extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singleton(TestPlugin.class); + } + + public void testAsyncProcessorImplementation() { + // A pipeline with 2 processors: the test async processor and sync test processor. + BytesReference pipelineBody = new BytesArray("{\"processors\": [{\"test-async\": {}, \"test\": {}}]}"); + client().admin().cluster().putPipeline(new PutPipelineRequest("_id", pipelineBody, XContentType.JSON)).actionGet(); + + BulkRequest bulkRequest = new BulkRequest(); + int numDocs = randomIntBetween(8, 256); + for (int i = 0; i < numDocs; i++) { + bulkRequest.add(new IndexRequest("foobar") + .id(Integer.toString(i)) + .source("{}", XContentType.JSON) + .setPipeline("_id") + ); + } + BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.getItems().length, equalTo(numDocs)); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + assertThat(bulkResponse.getItems()[i].getId(), equalTo(id)); + GetResponse getResponse = client().get(new GetRequest("foobar", id)).actionGet(); + // The expected result of async test processor: + assertThat(getResponse.getSource().get("foo"), equalTo("bar-" + id)); + // The expected result of sync test processor: + assertThat(getResponse.getSource().get("bar"), equalTo("baz-" + id)); + } + } + + public static class TestPlugin extends Plugin implements IngestPlugin { + + private ThreadPool threadPool; + + @Override + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + this.threadPool = threadPool; + return Collections.emptyList(); + } + + @Override + public Map getProcessors(Processor.Parameters parameters) { + Map processors = new HashMap<>(); + processors.put("test-async", (factories, tag, config) -> { + return new AbstractProcessor(tag) { + + @Override + public void execute(IngestDocument ingestDocument, BiConsumer handler) { + threadPool.generic().execute(() -> { + String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); + if (usually()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + // ignore + } + } + ingestDocument.setFieldValue("foo", "bar-" + id); + handler.accept(ingestDocument, null); + }); + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + throw new UnsupportedOperationException(); + } + + @Override + public String getType() { + return "test-async"; + } + }; + }); + processors.put("test", (processorFactories, tag, config) -> { + return new AbstractProcessor(tag) { + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); + ingestDocument.setFieldValue("bar", "baz-" + id); + return ingestDocument; + } + + @Override + public String getType() { + return "test"; + } + }; + }); + return processors; + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 115fd2ba1d4..55f32c23b97 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -34,6 +34,8 @@ import org.junit.Before; import java.util.Collections; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.equalTo; @@ -66,7 +68,15 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteVerboseItem() throws Exception { TestProcessor processor = new TestProcessor("test-id", "mock", ingestDocument -> {}); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor, processor)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, true, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor.getInvokedCounter(), equalTo(2)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; @@ -91,7 +101,14 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteItem() throws Exception { TestProcessor processor = new TestProcessor("processor_0", "mock", ingestDocument -> {}); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor, processor)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, false, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor.getInvokedCounter(), equalTo(2)); assertThat(actualItemResponse, instanceOf(SimulateDocumentBaseResult.class)); SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse; @@ -104,7 +121,14 @@ public class SimulateExecutionServiceTests extends ESTestCase { TestProcessor processor2 = new TestProcessor("processor_1", "mock", new RuntimeException("processor failed")); TestProcessor processor3 = new TestProcessor("processor_2", "mock", ingestDocument -> {}); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor1, processor2, processor3)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, true, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(processor2.getInvokedCounter(), equalTo(1)); assertThat(processor3.getInvokedCounter(), equalTo(0)); @@ -131,7 +155,14 @@ public class SimulateExecutionServiceTests extends ESTestCase { Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(new CompoundProcessor(false, Collections.singletonList(processor1), Collections.singletonList(processor2)), processor3)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, true, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(processor2.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); @@ -166,7 +197,14 @@ public class SimulateExecutionServiceTests extends ESTestCase { TestProcessor testProcessor = new TestProcessor("processor_0", "mock", exception); CompoundProcessor processor = new CompoundProcessor(true, Collections.singletonList(testProcessor), Collections.emptyList()); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, true, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(testProcessor.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; @@ -183,7 +221,14 @@ public class SimulateExecutionServiceTests extends ESTestCase { TestProcessor testProcessor = new TestProcessor("processor_0", "mock", ingestDocument -> { }); CompoundProcessor processor = new CompoundProcessor(true, Collections.singletonList(testProcessor), Collections.emptyList()); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, true, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(testProcessor.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; @@ -199,7 +244,14 @@ public class SimulateExecutionServiceTests extends ESTestCase { public void testExecuteItemWithFailure() throws Exception { TestProcessor processor = new TestProcessor(ingestDocument -> { throw new RuntimeException("processor failed"); }); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor, processor)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, false, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentBaseResult.class)); SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse; @@ -210,12 +262,19 @@ public class SimulateExecutionServiceTests extends ESTestCase { assertThat(exception.getMessage(), equalTo("java.lang.IllegalArgumentException: java.lang.RuntimeException: processor failed")); } - public void testDropDocument() { + public void testDropDocument() throws Exception { TestProcessor processor1 = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("field", "value")); Processor processor2 = new DropProcessor.Factory().create(Collections.emptyMap(), null, Collections.emptyMap()); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor1, processor2)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, false, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentBaseResult.class)); SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse; @@ -223,12 +282,19 @@ public class SimulateExecutionServiceTests extends ESTestCase { assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); } - public void testDropDocumentVerbose() { + public void testDropDocumentVerbose() throws Exception { TestProcessor processor1 = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("field", "value")); Processor processor2 = new DropProcessor.Factory().create(Collections.emptyMap(), null, Collections.emptyMap()); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor1, processor2)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, true, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); SimulateDocumentVerboseResult verboseResult = (SimulateDocumentVerboseResult) actualItemResponse; @@ -239,13 +305,20 @@ public class SimulateExecutionServiceTests extends ESTestCase { assertThat(verboseResult.getProcessorResults().get(1).getFailure(), nullValue()); } - public void testDropDocumentVerboseExtraProcessor() { + public void testDropDocumentVerboseExtraProcessor() throws Exception { TestProcessor processor1 = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("field1", "value")); Processor processor2 = new DropProcessor.Factory().create(Collections.emptyMap(), null, Collections.emptyMap()); TestProcessor processor3 = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("field2", "value")); Pipeline pipeline = new Pipeline("_id", "_description", version, new CompoundProcessor(processor1, processor2, processor3)); - SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + CountDownLatch latch = new CountDownLatch(1); + AtomicReference holder = new AtomicReference<>(); + executionService.executeDocument(pipeline, ingestDocument, true, (r, e) -> { + holder.set(r); + latch.countDown(); + }); + latch.await(); + SimulateDocumentResult actualItemResponse = holder.get(); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertThat(processor3.getInvokedCounter(), equalTo(0)); assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); diff --git a/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java index 575d5629b1a..b3b8ee9762d 100644 --- a/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java @@ -34,6 +34,7 @@ import java.util.function.LongSupplier; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -52,7 +53,7 @@ public class CompoundProcessorTests extends ESTestCase { CompoundProcessor processor = new CompoundProcessor(); assertThat(processor.getProcessors().isEmpty(), is(true)); assertThat(processor.getOnFailureProcessors().isEmpty(), is(true)); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); } public void testSingleProcessor() throws Exception { @@ -67,7 +68,7 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); - compoundProcessor.execute(ingestDocument); + compoundProcessor.execute(ingestDocument, (result, e) -> {}); verify(relativeTimeProvider, times(2)).getAsLong(); assertThat(processor.getInvokedCounter(), equalTo(1)); assertStats(compoundProcessor, 1, 0, 1); @@ -82,12 +83,9 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(compoundProcessor.getProcessors().size(), equalTo(1)); assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor)); assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true)); - try { - compoundProcessor.execute(ingestDocument); - fail("should throw exception"); - } catch (ElasticsearchException e) { - assertThat(e.getRootCause().getMessage(), equalTo("error")); - } + Exception[] holder = new Exception[1]; + compoundProcessor.execute(ingestDocument, (result, e) -> holder[0] = e); + assertThat(((ElasticsearchException) holder[0]).getRootCause().getMessage(), equalTo("error")); assertThat(processor.getInvokedCounter(), equalTo(1)); assertStats(compoundProcessor, 1, 1, 0); @@ -100,7 +98,7 @@ public class CompoundProcessorTests extends ESTestCase { when(relativeTimeProvider.getAsLong()).thenReturn(0L); CompoundProcessor compoundProcessor = new CompoundProcessor(true, Arrays.asList(processor1, processor2), Collections.emptyList(), relativeTimeProvider); - compoundProcessor.execute(ingestDocument); + compoundProcessor.execute(ingestDocument, (result, e) -> {}); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertStats(0, compoundProcessor, 0, 1, 1, 0); assertThat(processor2.getInvokedCounter(), equalTo(1)); @@ -122,7 +120,7 @@ public class CompoundProcessorTests extends ESTestCase { when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1)); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1), Collections.singletonList(processor2), relativeTimeProvider); - compoundProcessor.execute(ingestDocument); + compoundProcessor.execute(ingestDocument, (result, e) -> {}); verify(relativeTimeProvider, times(2)).getAsLong(); assertThat(processor1.getInvokedCounter(), equalTo(1)); @@ -154,7 +152,9 @@ public class CompoundProcessorTests extends ESTestCase { when(relativeTimeProvider.getAsLong()).thenReturn(0L); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1), Collections.singletonList(processor2), relativeTimeProvider); - assertNull(compoundProcessor.execute(ingestDocument)); + IngestDocument[] result = new IngestDocument[1]; + compoundProcessor.execute(ingestDocument, (r, e) -> result[0] = r); + assertThat(result[0], nullValue()); assertThat(processor1.getInvokedCounter(), equalTo(1)); assertStats(compoundProcessor, 1, 1, 0); } @@ -182,7 +182,7 @@ public class CompoundProcessorTests extends ESTestCase { Collections.singletonList(lastProcessor), relativeTimeProvider); CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor), Collections.singletonList(compoundOnFailProcessor), relativeTimeProvider); - compoundProcessor.execute(ingestDocument); + compoundProcessor.execute(ingestDocument, (result, e) -> {}); assertThat(processorToFail.getInvokedCounter(), equalTo(1)); assertThat(lastProcessor.getInvokedCounter(), equalTo(1)); @@ -205,7 +205,7 @@ public class CompoundProcessorTests extends ESTestCase { CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), Collections.singletonList(secondProcessor), relativeTimeProvider); - compoundProcessor.execute(ingestDocument); + compoundProcessor.execute(ingestDocument, (result, e) -> {}); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); @@ -231,7 +231,7 @@ public class CompoundProcessorTests extends ESTestCase { CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), Collections.singletonList(secondProcessor), relativeTimeProvider); - compoundProcessor.execute(ingestDocument); + compoundProcessor.execute(ingestDocument, (result, e) -> {}); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); @@ -257,7 +257,7 @@ public class CompoundProcessorTests extends ESTestCase { CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(failCompoundProcessor), Collections.singletonList(secondProcessor), relativeTimeProvider); - compoundProcessor.execute(ingestDocument); + compoundProcessor.execute(ingestDocument, (result, e) -> {}); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); @@ -272,7 +272,7 @@ public class CompoundProcessorTests extends ESTestCase { when(relativeTimeProvider.getAsLong()).thenReturn(0L); CompoundProcessor pipeline = new CompoundProcessor(false, Arrays.asList(firstProcessor, secondProcessor), Collections.singletonList(onFailureProcessor), relativeTimeProvider); - pipeline.execute(ingestDocument); + pipeline.execute(ingestDocument, (result, e) -> {}); assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); assertThat(secondProcessor.getInvokedCounter(), equalTo(0)); assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java index f484957d897..204a0bddbc3 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConditionalProcessorTests.java @@ -98,7 +98,7 @@ public class ConditionalProcessorTests extends ESTestCase { String falseValue = "falsy"; IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, falseValue); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(falseValue)); assertThat(ingestDocument.getSourceAndMetadata(), not(hasKey("foo"))); assertStats(processor, 0, 0, 0); @@ -106,13 +106,13 @@ public class ConditionalProcessorTests extends ESTestCase { ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, falseValue); ingestDocument.setFieldValue("error", true); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertStats(processor, 0, 0, 0); //true, always call processor and increments metrics ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue(conditionalField, trueValue); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertThat(ingestDocument.getSourceAndMetadata().get(conditionalField), is(trueValue)); assertThat(ingestDocument.getSourceAndMetadata().get("foo"), is("bar")); assertStats(processor, 1, 0, 1); @@ -121,7 +121,9 @@ public class ConditionalProcessorTests extends ESTestCase { ingestDocument.setFieldValue(conditionalField, trueValue); ingestDocument.setFieldValue("error", true); IngestDocument finalIngestDocument = ingestDocument; - expectThrows(RuntimeException.class, () -> processor.execute(finalIngestDocument)); + Exception holder[] = new Exception[1]; + processor.execute(finalIngestDocument, (result, e) -> {holder[0] = e;}); + assertThat(holder[0], instanceOf(RuntimeException.class)); assertStats(processor, 2, 1, 2); } @@ -177,7 +179,7 @@ public class ConditionalProcessorTests extends ESTestCase { }, relativeTimeProvider); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated."); } @@ -213,7 +215,7 @@ public class ConditionalProcessorTests extends ESTestCase { ); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); ingestDocument.setFieldValue("listField", new ArrayList<>()); - processor.execute(ingestDocument); + processor.execute(ingestDocument, (result, e) -> {}); Exception e = expectedException.get(); assertThat(e, instanceOf(UnsupportedOperationException.class)); assertEquals("Mutating ingest documents in conditionals is not supported", e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 5abf87b7f80..555d71c2922 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -73,6 +73,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.IntConsumer; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -86,6 +87,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; @@ -135,20 +137,20 @@ public class IngestServiceTests extends ESTestCase { final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); final SetOnce failure = new SetOnce<>(); - final BiConsumer failureHandler = (request, e) -> { + final BiConsumer failureHandler = (slot, e) -> { failure.set(true); - assertThat(request, sameInstance(indexRequest)); + assertThat(slot, equalTo(0)); assertThat(e, instanceOf(IllegalArgumentException.class)); assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); }; @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); + final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); assertTrue(failure.get()); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testUpdatePipelines() { @@ -641,7 +643,7 @@ public class IngestServiceTests extends ESTestCase { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); final SetOnce failure = new SetOnce<>(); final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline(id); - final BiConsumer failureHandler = (request, e) -> { + final BiConsumer failureHandler = (slot, e) -> { assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(e.getCause().getCause(), instanceOf(IllegalStateException.class)); assertThat(e.getCause().getCause().getMessage(), equalTo("error")); @@ -649,17 +651,17 @@ public class IngestServiceTests extends ESTestCase { }; @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); + final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); assertTrue(failure.get()); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testExecuteBulkPipelineDoesNotExist() { IngestService ingestService = createWithProcessors(Collections.singletonMap( - "mock", (factories, tag, config) -> mock(CompoundProcessor.class))); + "mock", (factories, tag, config) -> mockCompoundProcessor())); PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), XContentType.JSON); @@ -676,15 +678,16 @@ public class IngestServiceTests extends ESTestCase { new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("does_not_exist"); bulkRequest.add(indexRequest2); @SuppressWarnings("unchecked") - BiConsumer failureHandler = mock(BiConsumer.class); + BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(bulkRequest.requests(), failureHandler, completionHandler, indexReq -> {}); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(bulkRequest.numberOfActions(), bulkRequest.requests(), failureHandler, + completionHandler, indexReq -> {}); verify(failureHandler, times(1)).accept( - argThat(new CustomTypeSafeMatcher("failure handler was not called with the expected arguments") { + argThat(new CustomTypeSafeMatcher("failure handler was not called with the expected arguments") { @Override - protected boolean matchesSafely(IndexRequest item) { - return item == indexRequest2; + protected boolean matchesSafely(Integer item) { + return item == 1; } }), @@ -695,12 +698,12 @@ public class IngestServiceTests extends ESTestCase { } }) ); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testExecuteSuccess() { IngestService ingestService = createWithProcessors(Collections.singletonMap( - "mock", (factories, tag, config) -> mock(CompoundProcessor.class))); + "mock", (factories, tag, config) -> mockCompoundProcessor())); PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), XContentType.JSON); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty @@ -709,12 +712,12 @@ public class IngestServiceTests extends ESTestCase { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); verify(failureHandler, never()).accept(any(), any()); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testExecuteEmptyPipeline() throws Exception { @@ -727,16 +730,16 @@ public class IngestServiceTests extends ESTestCase { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); verify(failureHandler, never()).accept(any(), any()); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testExecutePropagateAllMetaDataUpdates() throws Exception { - final CompoundProcessor processor = mock(CompoundProcessor.class); + final CompoundProcessor processor = mockCompoundProcessor(); IngestService ingestService = createWithProcessors(Collections.singletonMap( "mock", (factories, tag, config) -> processor)); PutPipelineRequest putRequest = new PutPipelineRequest("_id", @@ -758,17 +761,21 @@ public class IngestServiceTests extends ESTestCase { ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName()); } } - return ingestDocument; - }).when(processor).execute(any()); + + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) invocationOnMock.getArguments()[1]; + handler.accept(ingestDocument, null); + return null; + }).when(processor).execute(any(), any()); final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); - verify(processor).execute(any()); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + verify(processor).execute(any(), any()); verify(failureHandler, never()).accept(any(), any()); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); assertThat(indexRequest.index(), equalTo("update_index")); assertThat(indexRequest.type(), equalTo("update_type")); assertThat(indexRequest.id(), equalTo("update_id")); @@ -778,7 +785,7 @@ public class IngestServiceTests extends ESTestCase { } public void testExecuteFailure() throws Exception { - final CompoundProcessor processor = mock(CompoundProcessor.class); + final CompoundProcessor processor = mockCompoundProcessor(); IngestService ingestService = createWithProcessors(Collections.singletonMap( "mock", (factories, tag, config) -> processor)); PutPipelineRequest putRequest = new PutPipelineRequest("_id", @@ -790,22 +797,37 @@ public class IngestServiceTests extends ESTestCase { final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()) .when(processor) - .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap())); + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap()), any()); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); - verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap())); - verify(failureHandler, times(1)).accept(eq(indexRequest), any(RuntimeException.class)); - verify(completionHandler, times(1)).accept(null); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap()), any()); + verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testExecuteSuccessWithOnFailure() throws Exception { final Processor processor = mock(Processor.class); when(processor.getType()).thenReturn("mock_processor_type"); when(processor.getTag()).thenReturn("mock_processor_tag"); + doAnswer(args -> { + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept(null, new RuntimeException()); + return null; + }).when(processor).execute(eqIndexTypeId(emptyMap()), any()); + final Processor onFailureProcessor = mock(Processor.class); + doAnswer(args -> { + IngestDocument ingestDocument = (IngestDocument) args.getArguments()[0]; + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept(ingestDocument, null); + return null; + }).when(onFailureProcessor).execute(eqIndexTypeId(emptyMap()), any()); + final CompoundProcessor compoundProcessor = new CompoundProcessor( false, Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); IngestService ingestService = createWithProcessors(Collections.singletonMap( @@ -817,14 +839,13 @@ public class IngestServiceTests extends ESTestCase { clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); - doThrow(new RuntimeException()).when(processor).execute(eqIndexTypeId(emptyMap())); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); - verify(failureHandler, never()).accept(eq(indexRequest), any(ElasticsearchException.class)); - verify(completionHandler, times(1)).accept(null); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + verify(failureHandler, never()).accept(eq(0), any(ElasticsearchException.class)); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testExecuteFailureWithNestedOnFailure() throws Exception { @@ -848,21 +869,21 @@ public class IngestServiceTests extends ESTestCase { final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()) .when(onFailureOnFailureProcessor) - .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap())); + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap()), any()); doThrow(new RuntimeException()) .when(onFailureProcessor) - .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap())); + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap()), any()); doThrow(new RuntimeException()) .when(processor) - .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap())); + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap()), any()); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); - verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap())); - verify(failureHandler, times(1)).accept(eq(indexRequest), any(RuntimeException.class)); - verify(completionHandler, times(1)).accept(null); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), emptyMap()), any()); + verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testBulkRequestExecutionWithFailures() throws Exception { @@ -891,7 +912,12 @@ public class IngestServiceTests extends ESTestCase { CompoundProcessor processor = mock(CompoundProcessor.class); when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class))); Exception error = new RuntimeException(); - doThrow(error).when(processor).execute(any()); + doAnswer(args -> { + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept(null, error); + return null; + }).when(processor).execute(any(), any()); IngestService ingestService = createWithProcessors(Collections.singletonMap( "mock", (factories, tag, config) -> processor)); PutPipelineRequest putRequest = new PutPipelineRequest("_id", @@ -902,18 +928,18 @@ public class IngestServiceTests extends ESTestCase { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @SuppressWarnings("unchecked") - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler, indexReq -> {}); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(numRequest, bulkRequest.requests(), requestItemErrorHandler, completionHandler, indexReq -> {}); - verify(requestItemErrorHandler, times(numIndexRequests)).accept(any(IndexRequest.class), argThat(new ArgumentMatcher() { + verify(requestItemErrorHandler, times(numIndexRequests)).accept(anyInt(), argThat(new ArgumentMatcher() { @Override public boolean matches(final Object o) { return ((Exception)o).getCause().getCause().equals(error); } })); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } public void testBulkRequestExecution() throws Exception { @@ -936,7 +962,12 @@ public class IngestServiceTests extends ESTestCase { final Processor processor = mock(Processor.class); when(processor.getType()).thenReturn("mock"); when(processor.getTag()).thenReturn("mockTag"); - when(processor.execute(any(IngestDocument.class))).thenReturn( RandomDocumentPicks.randomIngestDocument(random())); + doAnswer(args -> { + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept(RandomDocumentPicks.randomIngestDocument(random()), null); + return null; + }).when(processor).execute(any(), any()); Map map = new HashMap<>(2); map.put("mock", (factories, tag, config) -> processor); @@ -949,13 +980,13 @@ public class IngestServiceTests extends ESTestCase { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @SuppressWarnings("unchecked") - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - ingestService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler, indexReq -> {}); + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest(numRequest, bulkRequest.requests(), requestItemErrorHandler, completionHandler, indexReq -> {}); verify(requestItemErrorHandler, never()).accept(any(), any()); - verify(completionHandler, times(1)).accept(null); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); for (DocWriteRequest docWriteRequest : bulkRequest.requests()) { IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(docWriteRequest); assertThat(indexRequest, notNullValue()); @@ -970,8 +1001,18 @@ public class IngestServiceTests extends ESTestCase { when(processor.getTag()).thenReturn("mockTag"); when(processorFailure.getType()).thenReturn("failure-mock"); //avoid returning null and dropping the document - when(processor.execute(any(IngestDocument.class))).thenReturn( RandomDocumentPicks.randomIngestDocument(random())); - when(processorFailure.execute(any(IngestDocument.class))).thenThrow(new RuntimeException("error")); + doAnswer(args -> { + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept(RandomDocumentPicks.randomIngestDocument(random()), null); + return null; + }).when(processor).execute(any(IngestDocument.class), any()); + doAnswer(args -> { + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept(null, new RuntimeException("error")); + return null; + }).when(processorFailure).execute(any(IngestDocument.class), any()); Map map = new HashMap<>(2); map.put("mock", (factories, tag, config) -> processor); map.put("failure-mock", (factories, tag, config) -> processorFailure); @@ -993,13 +1034,13 @@ public class IngestServiceTests extends ESTestCase { clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - @SuppressWarnings("unchecked") final BiConsumer failureHandler = mock(BiConsumer.class); - @SuppressWarnings("unchecked") final Consumer completionHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") final BiConsumer failureHandler = mock(BiConsumer.class); + @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); final IndexRequest indexRequest = new IndexRequest("_index"); indexRequest.setPipeline("_id1"); indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10)); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterFirstRequestStats = ingestService.stats(); assertThat(afterFirstRequestStats.getPipelineStats().size(), equalTo(2)); @@ -1017,7 +1058,7 @@ public class IngestServiceTests extends ESTestCase { indexRequest.setPipeline("_id2"); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterSecondRequestStats = ingestService.stats(); assertThat(afterSecondRequestStats.getPipelineStats().size(), equalTo(2)); //total @@ -1036,7 +1077,7 @@ public class IngestServiceTests extends ESTestCase { clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); indexRequest.setPipeline("_id1"); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterThirdRequestStats = ingestService.stats(); assertThat(afterThirdRequestStats.getPipelineStats().size(), equalTo(2)); //total @@ -1060,7 +1101,7 @@ public class IngestServiceTests extends ESTestCase { clusterState = IngestService.innerPut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); indexRequest.setPipeline("_id1"); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {}); final IngestStats afterForthRequestStats = ingestService.stats(); assertThat(afterForthRequestStats.getPipelineStats().size(), equalTo(2)); //total @@ -1126,15 +1167,15 @@ public class IngestServiceTests extends ESTestCase { ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - final BiConsumer failureHandler = mock(BiConsumer.class); + final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer completionHandler = mock(Consumer.class); + final BiConsumer completionHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - final Consumer dropHandler = mock(Consumer.class); - ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, dropHandler); + final IntConsumer dropHandler = mock(IntConsumer.class); + ingestService.executeBulkRequest(1, Collections.singletonList(indexRequest), failureHandler, completionHandler, dropHandler); verify(failureHandler, never()).accept(any(), any()); - verify(completionHandler, times(1)).accept(null); - verify(dropHandler, times(1)).accept(indexRequest); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); + verify(dropHandler, times(1)).accept(0); } public void testIngestClusterStateListeners_orderOfExecution() { @@ -1176,7 +1217,7 @@ public class IngestServiceTests extends ESTestCase { } private IngestDocument eqIndexTypeId(final Map source) { - return argThat(new IngestDocumentMatcher("_index", "_type", "_id", source)); + return argThat(new IngestDocumentMatcher("_index", "_type", "_id", -3L, VersionType.INTERNAL, source)); } private IngestDocument eqIndexTypeId(final Long version, final VersionType versionType, final Map source) { @@ -1212,6 +1253,17 @@ public class IngestServiceTests extends ESTestCase { }), client); } + private CompoundProcessor mockCompoundProcessor() { + CompoundProcessor processor = mock(CompoundProcessor.class); + doAnswer(args -> { + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept((IngestDocument) args.getArguments()[0], null); + return null; + }).when(processor).execute(any(), any()); + return processor; + } + private class IngestDocumentMatcher extends ArgumentMatcher { private final IngestDocument ingestDocument; diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java index 0ad88c05ccc..4f36727c7ac 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineProcessorTests.java @@ -64,7 +64,7 @@ public class PipelineProcessorTests extends ESTestCase { PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Map config = new HashMap<>(); config.put("name", pipelineId); - factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument); + factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument, (result, e) -> {}); assertEquals(testIngestDocument, invoked.get()); } @@ -74,12 +74,11 @@ public class PipelineProcessorTests extends ESTestCase { PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); Map config = new HashMap<>(); config.put("name", "missingPipelineId"); - IllegalStateException e = expectThrows( - IllegalStateException.class, - () -> factory.create(Collections.emptyMap(), null, config).execute(testIngestDocument) - ); + IllegalStateException[] e = new IllegalStateException[1]; + factory.create(Collections.emptyMap(), null, config) + .execute(testIngestDocument, (result, e1) -> e[0] = (IllegalStateException) e1); assertEquals( - "Pipeline processor configured for non-existent pipeline [missingPipelineId]", e.getMessage() + "Pipeline processor configured for non-existent pipeline [missingPipelineId]", e[0].getMessage() ); } @@ -104,12 +103,11 @@ public class PipelineProcessorTests extends ESTestCase { when(ingestService.getPipeline(outerPipelineId)).thenReturn(outer); when(ingestService.getPipeline(innerPipelineId)).thenReturn(inner); outerConfig.put("name", innerPipelineId); - ElasticsearchException e = expectThrows( - ElasticsearchException.class, - () -> factory.create(Collections.emptyMap(), null, outerConfig).execute(testIngestDocument) - ); + ElasticsearchException[] e = new ElasticsearchException[1]; + factory.create(Collections.emptyMap(), null, outerConfig) + .execute(testIngestDocument, (result, e1) -> e[0] = (ElasticsearchException) e1); assertEquals( - "Cycle detected for pipeline: inner", e.getRootCause().getMessage() + "Cycle detected for pipeline: inner", e[0].getRootCause().getMessage() ); } @@ -125,8 +123,8 @@ public class PipelineProcessorTests extends ESTestCase { ); when(ingestService.getPipeline(innerPipelineId)).thenReturn(inner); Processor outerProc = factory.create(Collections.emptyMap(), null, outerConfig); - outerProc.execute(testIngestDocument); - outerProc.execute(testIngestDocument); + outerProc.execute(testIngestDocument, (result, e) -> {}); + outerProc.execute(testIngestDocument, (result, e) -> {}); } public void testPipelineProcessorWithPipelineChain() throws Exception { @@ -177,7 +175,7 @@ public class PipelineProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); //start the chain - ingestDocument.executePipeline(pipeline1); + ingestDocument.executePipeline(pipeline1, (result, e) -> {}); assertNotNull(ingestDocument.getSourceAndMetadata().get(key1)); //check the stats diff --git a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java index 53a5cd1d753..b7afb13deaf 100644 --- a/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/TrackingResultProcessorTests.java @@ -66,7 +66,7 @@ public class TrackingResultProcessorTests extends ESTestCase { public void testActualProcessor() throws Exception { TestProcessor actualProcessor = new TestProcessor(ingestDocument -> {}); TrackingResultProcessor trackingProcessor = new TrackingResultProcessor(false, actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); @@ -84,12 +84,9 @@ public class TrackingResultProcessorTests extends ESTestCase { CompoundProcessor actualProcessor = new CompoundProcessor(testProcessor); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - try { - trackingProcessor.execute(ingestDocument); - fail("processor should throw exception"); - } catch (ElasticsearchException e) { - assertThat(e.getRootCause().getMessage(), equalTo(exception.getMessage())); - } + Exception[] holder = new Exception[1]; + trackingProcessor.execute(ingestDocument, (result, e) -> holder[0] = e); + assertThat(((ElasticsearchException) holder[0]).getRootCause().getMessage(), equalTo(exception.getMessage())); SimulateProcessorResult expectedFirstResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(1)); @@ -109,7 +106,7 @@ public class TrackingResultProcessorTests extends ESTestCase { Arrays.asList(onFailureProcessor, failProcessor))), Arrays.asList(onFailureProcessor)); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedFailResult = new SimulateProcessorResult(failProcessor.getTag(), ingestDocument); SimulateProcessorResult expectedSuccessResult = new SimulateProcessorResult(onFailureProcessor.getTag(), ingestDocument); @@ -148,7 +145,7 @@ public class TrackingResultProcessorTests extends ESTestCase { Collections.emptyList()); CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(1)); @@ -178,7 +175,7 @@ public class TrackingResultProcessorTests extends ESTestCase { new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue(key3, randomInt()); })); CompoundProcessor trackingProcessor = decorate(compoundProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(compoundProcessor.getTag(), ingestDocument); //the step for key 2 is never executed due to conditional and thus not part of the result set @@ -221,7 +218,7 @@ public class TrackingResultProcessorTests extends ESTestCase { CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); @@ -287,7 +284,7 @@ public class TrackingResultProcessorTests extends ESTestCase { CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); @@ -355,7 +352,7 @@ public class TrackingResultProcessorTests extends ESTestCase { CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); @@ -407,7 +404,7 @@ public class TrackingResultProcessorTests extends ESTestCase { CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); @@ -457,7 +454,9 @@ public class TrackingResultProcessorTests extends ESTestCase { CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> trackingProcessor.execute(ingestDocument)); + Exception[] holder = new Exception[1]; + trackingProcessor.execute(ingestDocument, (result, e) -> holder[0] = e); + ElasticsearchException exception = (ElasticsearchException) holder[0]; assertThat(exception.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(exception.getCause().getCause(), instanceOf(IllegalStateException.class)); assertThat(exception.getMessage(), containsString("Cycle detected for pipeline: pipeline1")); @@ -482,7 +481,7 @@ public class TrackingResultProcessorTests extends ESTestCase { CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList); - trackingProcessor.execute(ingestDocument); + trackingProcessor.execute(ingestDocument, (result, e) -> {}); SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument); From 04972baffa96c19c1601cfb245a8458195db1693 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 26 Sep 2019 09:20:53 +0200 Subject: [PATCH 74/94] Merge ShardSearchTransportRequest and ShardSearchLocalRequest (#46996) (#47081) This change merges the `ShardSearchTransportRequest` and `ShardSearchLocalRequest` into a single `ShardSearchRequest` that can be used to create a SearchContext. Relates #46523 --- .../query/TransportValidateQueryAction.java | 4 +- .../explain/TransportExplainAction.java | 4 +- .../search/AbstractSearchAsyncAction.java | 6 +- .../action/search/SearchPhaseContext.java | 4 +- .../action/search/SearchTransportService.java | 13 +- .../internal/ShardSearchLocalRequest.java | 282 ----------------- .../search/internal/ShardSearchRequest.java | 297 +++++++++++++++--- .../internal/ShardSearchTransportRequest.java | 191 ----------- .../AbstractSearchAsyncActionTests.java | 4 +- .../CanMatchPreFilterSearchPhaseTests.java | 8 +- .../action/search/MockSearchPhaseContext.java | 4 +- .../index/SearchSlowLogTests.java | 103 +----- .../index/query/InnerHitBuilderTests.java | 4 +- .../search/SearchServiceTests.java | 53 ++-- ...ests.java => ShardSearchRequestTests.java} | 18 +- .../search/slice/SliceBuilderTests.java | 128 +------- .../index/engine/FrozenIndexTests.java | 15 +- .../authz/IndicesAndAliasesResolverTests.java | 8 +- 18 files changed, 344 insertions(+), 802 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java delete mode 100644 server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java rename server/src/test/java/org/elasticsearch/search/internal/{ShardSearchTransportRequestTests.java => ShardSearchRequestTests.java} (93%) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 80bde16e0e4..9849f000ce8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -46,7 +46,7 @@ import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -194,7 +194,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction< boolean valid; String explanation = null; String error = null; - ShardSearchLocalRequest shardSearchLocalRequest = new ShardSearchLocalRequest(request.shardId(), request.types(), + ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(request.shardId(), request.types(), request.nowInMillis(), request.filteringAliases()); SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); try { diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index c29da21fe4a..342c55c6d62 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -45,7 +45,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.rescore.Rescorer; import org.elasticsearch.tasks.Task; @@ -116,7 +116,7 @@ public class TransportExplainAction extends TransportSingleShardAction exten } @Override - public final ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt) { + public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt) { AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); String indexName = shardIt.shardId().getIndex().getName(); final String[] routings = indexRoutings.getOrDefault(indexName, Collections.emptySet()) .toArray(new String[0]); - return new ShardSearchTransportRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(), + return new ShardSearchRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(), filter, indexBoost, timeProvider.getAbsoluteStartMillis(), shardIt.getClusterAlias(), routings); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index 28838defa3e..994ef5553bb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import java.util.concurrent.Executor; @@ -109,7 +109,7 @@ interface SearchPhaseContext extends Executor { /** * Builds an request for the initial search phase. */ - ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt); + ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt); /** * Processes the phase transition from on phase to another. This method handles all errors that happen during the initial run execution diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 37c8fe4fcbd..4b66ed885db 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -41,7 +41,6 @@ import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.ShardSearchRequest; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; @@ -109,7 +108,7 @@ public class SearchTransportService { TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new)); } - public void sendCanMatch(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, final + public void sendCanMatch(Transport.Connection connection, final ShardSearchRequest request, SearchTask task, final ActionListener listener) { transportService.sendChildRequest(connection, QUERY_CAN_MATCH_NAME, request, task, TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchService.CanMatchResponse::new)); @@ -120,13 +119,13 @@ public class SearchTransportService { TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, (in) -> TransportResponse.Empty.INSTANCE)); } - public void sendExecuteDfs(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, + public void sendExecuteDfs(Transport.Connection connection, final ShardSearchRequest request, SearchTask task, final SearchActionListener listener) { transportService.sendChildRequest(connection, DFS_ACTION_NAME, request, task, new ConnectionCountingHandler<>(listener, DfsSearchResult::new, clientConnections, connection.getNode().getId())); } - public void sendExecuteQuery(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, + public void sendExecuteQuery(Transport.Connection connection, final ShardSearchRequest request, SearchTask task, final SearchActionListener listener) { // we optimize this and expect a QueryFetchSearchResult if we only have a single shard in the search request // this used to be the QUERY_AND_FETCH which doesn't exist anymore. @@ -306,7 +305,7 @@ public class SearchTransportService { TransportActionProxy.registerProxyAction(transportService, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, (in) -> TransportResponse.Empty.INSTANCE); - transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, + transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> { searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { @Override @@ -330,7 +329,7 @@ public class SearchTransportService { }); TransportActionProxy.registerProxyAction(transportService, DFS_ACTION_NAME, DfsSearchResult::new); - transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, + transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> { searchService.executeQueryPhase(request, (SearchTask) task, new ChannelActionListener<>( channel, QUERY_ACTION_NAME, request)); @@ -374,7 +373,7 @@ public class SearchTransportService { TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, FetchSearchResult::new); // this is cheap, it does not fetch during the rewrite phase, so we can let it quickly execute on a networking thread - transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, + transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> { searchService.canMatch(request, new ChannelActionListener<>(channel, QUERY_CAN_MATCH_NAME, request)); }); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java deleted file mode 100644 index bf4632253c1..00000000000 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.internal; - -import org.elasticsearch.Version; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.Rewriteable; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.builder.SearchSourceBuilder; - -import java.io.IOException; - -/** - * Shard level search request that gets created and consumed on the local node. - * Used directly by api that need to create a search context within their execution. - * - * Source structure: - *
- * {
- *  from : 0, size : 20, (optional, can be set on the request)
- *  sort : { "name.first" : {}, "name.last" : { reverse : true } }
- *  fields : [ "name.first", "name.last" ]
- *  query : { ... }
- *  aggs : {
- *      "agg1" : {
- *          terms : { ... }
- *      }
- *  }
- * }
- * 
- */ -public class ShardSearchLocalRequest implements ShardSearchRequest { - private final String clusterAlias; - private final ShardId shardId; - private final int numberOfShards; - private final SearchType searchType; - private final Scroll scroll; - private final String[] types; - private final float indexBoost; - private final Boolean requestCache; - private final long nowInMillis; - private final boolean allowPartialSearchResults; - private final String[] indexRoutings; - private final String preference; - //these are the only two mutable fields, as they are subject to rewriting - private AliasFilter aliasFilter; - private SearchSourceBuilder source; - - public ShardSearchLocalRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards, AliasFilter aliasFilter, - float indexBoost, long nowInMillis, @Nullable String clusterAlias, String[] indexRoutings) { - this(shardId, numberOfShards, searchRequest.searchType(), searchRequest.source(), searchRequest.types(), - searchRequest.requestCache(), aliasFilter, indexBoost, searchRequest.allowPartialSearchResults(), indexRoutings, - searchRequest.preference(), searchRequest.scroll(), nowInMillis, clusterAlias); - // If allowPartialSearchResults is unset (ie null), the cluster-level default should have been substituted - // at this stage. Any NPEs in the above are therefore an error in request preparation logic. - assert searchRequest.allowPartialSearchResults() != null; - } - - public ShardSearchLocalRequest(ShardId shardId, String[] types, long nowInMillis, AliasFilter aliasFilter) { - this(shardId, -1, null, null, types, null, aliasFilter, 1.0f, false, Strings.EMPTY_ARRAY, null, null, nowInMillis, null); - } - - private ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, String[] types, - Boolean requestCache, AliasFilter aliasFilter, float indexBoost, boolean allowPartialSearchResults, - String[] indexRoutings, String preference, Scroll scroll, long nowInMillis, - @Nullable String clusterAlias) { - this.shardId = shardId; - this.numberOfShards = numberOfShards; - this.searchType = searchType; - this.source = source; - this.types = types; - this.requestCache = requestCache; - this.aliasFilter = aliasFilter; - this.indexBoost = indexBoost; - this.allowPartialSearchResults = allowPartialSearchResults; - this.indexRoutings = indexRoutings; - this.preference = preference; - this.scroll = scroll; - this.nowInMillis = nowInMillis; - this.clusterAlias = clusterAlias; - } - - ShardSearchLocalRequest(StreamInput in) throws IOException { - shardId = new ShardId(in); - searchType = SearchType.fromId(in.readByte()); - numberOfShards = in.readVInt(); - scroll = in.readOptionalWriteable(Scroll::new); - source = in.readOptionalWriteable(SearchSourceBuilder::new); - types = in.readStringArray(); - aliasFilter = new AliasFilter(in); - indexBoost = in.readFloat(); - nowInMillis = in.readVLong(); - requestCache = in.readOptionalBoolean(); - clusterAlias = in.readOptionalString(); - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { - allowPartialSearchResults = in.readBoolean(); - } else if (in.getVersion().onOrAfter(Version.V_6_3_0)) { - allowPartialSearchResults = in.readOptionalBoolean(); - } else { - allowPartialSearchResults = false; - } - if (in.getVersion().onOrAfter(Version.V_6_4_0)) { - indexRoutings = in.readStringArray(); - preference = in.readOptionalString(); - } else { - indexRoutings = Strings.EMPTY_ARRAY; - preference = null; - } - } - - protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOException { - shardId.writeTo(out); - out.writeByte(searchType.id()); - if (!asKey) { - out.writeVInt(numberOfShards); - } - out.writeOptionalWriteable(scroll); - out.writeOptionalWriteable(source); - out.writeStringArray(types); - aliasFilter.writeTo(out); - out.writeFloat(indexBoost); - if (asKey == false) { - out.writeVLong(nowInMillis); - } - out.writeOptionalBoolean(requestCache); - out.writeOptionalString(clusterAlias); - if (out.getVersion().onOrAfter(Version.V_7_0_0)) { - out.writeBoolean(allowPartialSearchResults); - } else if (out.getVersion().onOrAfter(Version.V_6_3_0)) { - out.writeOptionalBoolean(allowPartialSearchResults); - } - if (asKey == false) { - if (out.getVersion().onOrAfter(Version.V_6_4_0)) { - out.writeStringArray(indexRoutings); - out.writeOptionalString(preference); - } - } - } - - @Override - public ShardId shardId() { - return shardId; - } - - @Override - public String[] types() { - return types; - } - - @Override - public SearchSourceBuilder source() { - return source; - } - - @Override - public AliasFilter getAliasFilter() { - return aliasFilter; - } - - @Override - public void setAliasFilter(AliasFilter aliasFilter) { - this.aliasFilter = aliasFilter; - } - - @Override - public void source(SearchSourceBuilder source) { - this.source = source; - } - - @Override - public int numberOfShards() { - return numberOfShards; - } - - @Override - public SearchType searchType() { - return searchType; - } - - @Override - public float indexBoost() { - return indexBoost; - } - - @Override - public long nowInMillis() { - return nowInMillis; - } - - @Override - public Boolean requestCache() { - return requestCache; - } - - @Override - public boolean allowPartialSearchResults() { - return allowPartialSearchResults; - } - - @Override - public Scroll scroll() { - return scroll; - } - - @Override - public String[] indexRoutings() { - return indexRoutings; - } - - @Override - public String preference() { - return preference; - } - - @Override - public BytesReference cacheKey() throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); - this.innerWriteTo(out, true); - // copy it over, most requests are small, we might as well copy to make sure we are not sliced... - // we could potentially keep it without copying, but then pay the price of extra unused bytes up to a page - return new BytesArray(out.bytes().toBytesRef(), true);// do a deep copy - } - - @Override - public String getClusterAlias() { - return clusterAlias; - } - - @Override - public Rewriteable getRewriteable() { - return new RequestRewritable(this); - } - - static class RequestRewritable implements Rewriteable { - - final ShardSearchRequest request; - - RequestRewritable(ShardSearchRequest request) { - this.request = request; - } - - @Override - public Rewriteable rewrite(QueryRewriteContext ctx) throws IOException { - SearchSourceBuilder newSource = request.source() == null ? null : Rewriteable.rewrite(request.source(), ctx); - AliasFilter newAliasFilter = Rewriteable.rewrite(request.getAliasFilter(), ctx); - if (newSource == request.source() && newAliasFilter == request.getAliasFilter()) { - return this; - } else { - request.source(newSource); - request.setAliasFilter(newAliasFilter); - return new RequestRewritable(request); - } - } - } -} diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index b88bda90090..97fbbec3a65 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -19,74 +19,310 @@ package org.elasticsearch.search.internal; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.AliasFilterParsingException; import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.TransportRequest; import java.io.IOException; +import java.util.Map; import java.util.function.Function; /** * Shard level request that represents a search. - * It provides all the methods that the {@link org.elasticsearch.search.internal.SearchContext} needs. + * It provides all the methods that the {@link SearchContext} needs. * Provides a cache key based on its content that can be used to cache shard level response. */ -public interface ShardSearchRequest { +public class ShardSearchRequest extends TransportRequest implements IndicesRequest { + private final String clusterAlias; + private final ShardId shardId; + private final int numberOfShards; + private final SearchType searchType; + private final Scroll scroll; + private final String[] types; + private final float indexBoost; + private final Boolean requestCache; + private final long nowInMillis; + private final boolean allowPartialSearchResults; + private final String[] indexRoutings; + private final String preference; + private final OriginalIndices originalIndices; - ShardId shardId(); + //these are the only two mutable fields, as they are subject to rewriting + private AliasFilter aliasFilter; + private SearchSourceBuilder source; - String[] types(); + public ShardSearchRequest(OriginalIndices originalIndices, + SearchRequest searchRequest, + ShardId shardId, + int numberOfShards, + AliasFilter aliasFilter, + float indexBoost, + long nowInMillis, + @Nullable String clusterAlias, + String[] indexRoutings) { + this(originalIndices, + shardId, + numberOfShards, + searchRequest.searchType(), + searchRequest.source(), + searchRequest.types(), + searchRequest.requestCache(), + aliasFilter, + indexBoost, + searchRequest.allowPartialSearchResults(), + indexRoutings, + searchRequest.preference(), + searchRequest.scroll(), + nowInMillis, + clusterAlias); + // If allowPartialSearchResults is unset (ie null), the cluster-level default should have been substituted + // at this stage. Any NPEs in the above are therefore an error in request preparation logic. + assert searchRequest.allowPartialSearchResults() != null; + } - SearchSourceBuilder source(); + public ShardSearchRequest(ShardId shardId, + String[] types, + long nowInMillis, + AliasFilter aliasFilter) { + this(OriginalIndices.NONE, shardId, -1, null, null, types, null, + aliasFilter, 1.0f, false, Strings.EMPTY_ARRAY, null, null, nowInMillis, null); + } - AliasFilter getAliasFilter(); + private ShardSearchRequest(OriginalIndices originalIndices, + ShardId shardId, + int numberOfShards, + SearchType searchType, + SearchSourceBuilder source, + String[] types, + Boolean requestCache, + AliasFilter aliasFilter, + float indexBoost, + boolean allowPartialSearchResults, + String[] indexRoutings, + String preference, + Scroll scroll, + long nowInMillis, + @Nullable String clusterAlias) { + this.shardId = shardId; + this.numberOfShards = numberOfShards; + this.searchType = searchType; + this.source = source; + this.types = types; + this.requestCache = requestCache; + this.aliasFilter = aliasFilter; + this.indexBoost = indexBoost; + this.allowPartialSearchResults = allowPartialSearchResults; + this.indexRoutings = indexRoutings; + this.preference = preference; + this.scroll = scroll; + this.nowInMillis = nowInMillis; + this.clusterAlias = clusterAlias; + this.originalIndices = originalIndices; + } - void setAliasFilter(AliasFilter filter); + public ShardSearchRequest(StreamInput in) throws IOException { + super(in); + shardId = new ShardId(in); + searchType = SearchType.fromId(in.readByte()); + numberOfShards = in.readVInt(); + scroll = in.readOptionalWriteable(Scroll::new); + source = in.readOptionalWriteable(SearchSourceBuilder::new); + types = in.readStringArray(); + aliasFilter = new AliasFilter(in); + indexBoost = in.readFloat(); + nowInMillis = in.readVLong(); + requestCache = in.readOptionalBoolean(); + clusterAlias = in.readOptionalString(); + allowPartialSearchResults = in.readBoolean(); + indexRoutings = in.readStringArray(); + preference = in.readOptionalString(); + originalIndices = OriginalIndices.readOriginalIndices(in); + } - void source(SearchSourceBuilder source); + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + innerWriteTo(out, false); + OriginalIndices.writeOriginalIndices(originalIndices, out); + } - int numberOfShards(); + protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOException { + shardId.writeTo(out); + out.writeByte(searchType.id()); + if (!asKey) { + out.writeVInt(numberOfShards); + } + out.writeOptionalWriteable(scroll); + out.writeOptionalWriteable(source); + out.writeStringArray(types); + aliasFilter.writeTo(out); + out.writeFloat(indexBoost); + if (asKey == false) { + out.writeVLong(nowInMillis); + } + out.writeOptionalBoolean(requestCache); + out.writeOptionalString(clusterAlias); + out.writeBoolean(allowPartialSearchResults); + if (asKey == false) { + out.writeStringArray(indexRoutings); + out.writeOptionalString(preference); + } + } - SearchType searchType(); + @Override + public String[] indices() { + if (originalIndices == null) { + return null; + } + return originalIndices.indices(); + } - float indexBoost(); + @Override + public IndicesOptions indicesOptions() { + if (originalIndices == null) { + return null; + } + return originalIndices.indicesOptions(); + } - long nowInMillis(); + public ShardId shardId() { + return shardId; + } - Boolean requestCache(); + public String[] types() { + return types; + } - boolean allowPartialSearchResults(); + public SearchSourceBuilder source() { + return source; + } - Scroll scroll(); + public AliasFilter getAliasFilter() { + return aliasFilter; + } - /** - * Returns the routing values resolved by the coordinating node for the index pointed by {@link #shardId()}. - */ - String[] indexRoutings(); + public void setAliasFilter(AliasFilter aliasFilter) { + this.aliasFilter = aliasFilter; + } - /** - * Returns the preference of the original {@link SearchRequest#preference()}. - */ - String preference(); + public void source(SearchSourceBuilder source) { + this.source = source; + } + + public int numberOfShards() { + return numberOfShards; + } + + public SearchType searchType() { + return searchType; + } + + public float indexBoost() { + return indexBoost; + } + + public long nowInMillis() { + return nowInMillis; + } + + public Boolean requestCache() { + return requestCache; + } + + public boolean allowPartialSearchResults() { + return allowPartialSearchResults; + } + + public Scroll scroll() { + return scroll; + } + + public String[] indexRoutings() { + return indexRoutings; + } + + public String preference() { + return preference; + } /** * Returns the cache key for this shard search request, based on its content */ - BytesReference cacheKey() throws IOException; + public BytesReference cacheKey() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + this.innerWriteTo(out, true); + // copy it over, most requests are small, we might as well copy to make sure we are not sliced... + // we could potentially keep it without copying, but then pay the price of extra unused bytes up to a page + return new BytesArray(out.bytes().toBytesRef(), true);// do a deep copy + } + + public String getClusterAlias() { + return clusterAlias; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new SearchTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + // Shard id is enough here, the request itself can be found by looking at the parent task description + return "shardId[" + shardId() + "]"; + } + + public Rewriteable getRewriteable() { + return new RequestRewritable(this); + } + + static class RequestRewritable implements Rewriteable { + + final ShardSearchRequest request; + + RequestRewritable(ShardSearchRequest request) { + this.request = request; + } + + @Override + public Rewriteable rewrite(QueryRewriteContext ctx) throws IOException { + SearchSourceBuilder newSource = request.source() == null ? null : Rewriteable.rewrite(request.source(), ctx); + AliasFilter newAliasFilter = Rewriteable.rewrite(request.getAliasFilter(), ctx); + if (newSource == request.source() && newAliasFilter == request.getAliasFilter()) { + return this; + } else { + request.source(newSource); + request.setAliasFilter(newAliasFilter); + return new RequestRewritable(request); + } + } + } /** * Returns the filter associated with listed filtering aliases. @@ -94,8 +330,8 @@ public interface ShardSearchRequest { * The list of filtering aliases should be obtained by calling MetaData.filteringAliases. * Returns {@code null} if no filtering is required.

*/ - static QueryBuilder parseAliasFilter(CheckedFunction filterParser, - IndexMetaData metaData, String... aliasNames) { + public static QueryBuilder parseAliasFilter(CheckedFunction filterParser, + IndexMetaData metaData, String... aliasNames) { if (aliasNames == null || aliasNames.length == 0) { return null; } @@ -139,13 +375,4 @@ public interface ShardSearchRequest { return combined; } } - - /** - * Returns the cluster alias in case the request is part of a cross-cluster search request, null otherwise. - */ - @Nullable - String getClusterAlias(); - - Rewriteable getRewriteable(); - } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java deleted file mode 100644 index bfea1300f7e..00000000000 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.internal; - -import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchTask; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.query.Rewriteable; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.transport.TransportRequest; - -import java.io.IOException; -import java.util.Map; - -/** - * Shard level search request that represents an actual search sent from the coordinating node to the nodes holding - * the shards where the query needs to be executed. Holds the same info as {@link org.elasticsearch.search.internal.ShardSearchLocalRequest} - * but gets sent over the transport and holds also the indices coming from the original request that generated it, plus its headers and - * context. - */ -public class ShardSearchTransportRequest extends TransportRequest implements ShardSearchRequest, IndicesRequest { - - private final OriginalIndices originalIndices; - private final ShardSearchLocalRequest shardSearchLocalRequest; - - public ShardSearchTransportRequest(OriginalIndices originalIndices, SearchRequest searchRequest, ShardId shardId, int numberOfShards, - AliasFilter aliasFilter, float indexBoost, long nowInMillis, - @Nullable String clusterAlias, String[] indexRoutings) { - this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, indexBoost, - nowInMillis, clusterAlias, indexRoutings); - this.originalIndices = originalIndices; - } - - public ShardSearchTransportRequest(StreamInput in) throws IOException { - super(in); - shardSearchLocalRequest = new ShardSearchLocalRequest(in); - originalIndices = OriginalIndices.readOriginalIndices(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - shardSearchLocalRequest.innerWriteTo(out, false); - OriginalIndices.writeOriginalIndices(originalIndices, out); - } - - @Override - public String[] indices() { - if (originalIndices == null) { - return null; - } - return originalIndices.indices(); - } - - @Override - public IndicesOptions indicesOptions() { - if (originalIndices == null) { - return null; - } - return originalIndices.indicesOptions(); - } - - @Override - public ShardId shardId() { - return shardSearchLocalRequest.shardId(); - } - - @Override - public String[] types() { - return shardSearchLocalRequest.types(); - } - - @Override - public SearchSourceBuilder source() { - return shardSearchLocalRequest.source(); - } - - @Override - public AliasFilter getAliasFilter() { - return shardSearchLocalRequest.getAliasFilter(); - } - - @Override - public void setAliasFilter(AliasFilter filter) { - shardSearchLocalRequest.setAliasFilter(filter); - } - - @Override - public void source(SearchSourceBuilder source) { - shardSearchLocalRequest.source(source); - } - - @Override - public int numberOfShards() { - return shardSearchLocalRequest.numberOfShards(); - } - - @Override - public SearchType searchType() { - return shardSearchLocalRequest.searchType(); - } - - @Override - public float indexBoost() { - return shardSearchLocalRequest.indexBoost(); - } - - @Override - public long nowInMillis() { - return shardSearchLocalRequest.nowInMillis(); - } - - @Override - public Boolean requestCache() { - return shardSearchLocalRequest.requestCache(); - } - - @Override - public boolean allowPartialSearchResults() { - return shardSearchLocalRequest.allowPartialSearchResults(); - } - - @Override - public Scroll scroll() { - return shardSearchLocalRequest.scroll(); - } - - @Override - public String[] indexRoutings() { - return shardSearchLocalRequest.indexRoutings(); - } - - @Override - public String preference() { - return shardSearchLocalRequest.preference(); - } - - @Override - public BytesReference cacheKey() throws IOException { - return shardSearchLocalRequest.cacheKey(); - } - - @Override - public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new SearchTask(id, type, action, getDescription(), parentTaskId, headers); - } - - @Override - public String getDescription() { - // Shard id is enough here, the request itself can be found by looking at the parent task description - return "shardId[" + shardSearchLocalRequest.shardId() + "]"; - } - - @Override - public String getClusterAlias() { - return shardSearchLocalRequest.getClusterAlias(); - } - - @Override - public Rewriteable getRewriteable() { - return shardSearchLocalRequest.getRewriteable(); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 174b164aead..83484ab6870 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -146,7 +146,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); SearchShardIterator iterator = new SearchShardIterator(clusterAlias, new ShardId(new Index("name", "foo"), 1), Collections.emptyList(), new OriginalIndices(new String[] {"name", "name1"}, IndicesOptions.strictExpand())); - ShardSearchTransportRequest shardSearchTransportRequest = action.buildShardSearchRequest(iterator); + ShardSearchRequest shardSearchTransportRequest = action.buildShardSearchRequest(iterator); assertEquals(IndicesOptions.strictExpand(), shardSearchTransportRequest.indicesOptions()); assertArrayEquals(new String[] {"name", "name1"}, shardSearchTransportRequest.indices()); assertEquals(new MatchAllQueryBuilder(), shardSearchTransportRequest.getAliasFilter().getQueryBuilder()); diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 44fe3f92c61..8098459ce71 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -60,7 +60,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override - public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, + public void sendCanMatch(Transport.Connection connection, ShardSearchRequest request, SearchTask task, ActionListener listener) { new Thread(() -> listener.onResponse(new SearchService.CanMatchResponse(request.shardId().id() == 0 ? shard1 : shard2))).start(); @@ -117,7 +117,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { final boolean shard1 = randomBoolean(); SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override - public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, + public void sendCanMatch(Transport.Connection connection, ShardSearchRequest request, SearchTask task, ActionListener listener) { boolean throwException = request.shardId().id() != 0; if (throwException && randomBoolean()) { @@ -185,7 +185,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { @Override public void sendCanMatch( Transport.Connection connection, - ShardSearchTransportRequest request, + ShardSearchRequest request, SearchTask task, ActionListener listener) { listener.onResponse(new SearchService.CanMatchResponse(randomBoolean())); diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 40c3ad0afc0..f1314c100eb 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import org.junit.Assert; @@ -111,7 +111,7 @@ public final class MockSearchPhaseContext implements SearchPhaseContext { } @Override - public ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt) { + public ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt) { Assert.fail("should not be called"); return null; } diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index 9be3e429e5a..4da6ef9a7ab 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -21,24 +21,18 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchTask; -import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TestSearchContext; -import org.elasticsearch.threadpool.ThreadPool; import org.hamcrest.Matchers; import java.io.IOException; @@ -59,103 +53,12 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { protected SearchContext createSearchContext(IndexService indexService) { return createSearchContext(indexService, new String[]{}); } + protected SearchContext createSearchContext(IndexService indexService, String ... groupStats) { BigArrays bigArrays = indexService.getBigArrays(); - ThreadPool threadPool = indexService.getThreadPool(); + final ShardSearchRequest request = + new ShardSearchRequest(new ShardId(indexService.index(), 0), new String[0], 0L, null); return new TestSearchContext(bigArrays, indexService) { - final ShardSearchRequest request = new ShardSearchRequest() { - private SearchSourceBuilder searchSourceBuilder; - @Override - public ShardId shardId() { - return new ShardId(indexService.index(), 0); - } - - @Override - public String[] types() { - return new String[0]; - } - - @Override - public SearchSourceBuilder source() { - return searchSourceBuilder; - } - - @Override - public AliasFilter getAliasFilter() { - return new AliasFilter(QueryBuilders.matchAllQuery(), "foo"); - } - - @Override - public void setAliasFilter(AliasFilter filter) { - - } - - @Override - public void source(SearchSourceBuilder source) { - searchSourceBuilder = source; - } - - @Override - public int numberOfShards() { - return 0; - } - - @Override - public SearchType searchType() { - return null; - } - - @Override - public float indexBoost() { - return 1.0f; - } - - @Override - public long nowInMillis() { - return 0; - } - - @Override - public Boolean requestCache() { - return null; - } - - @Override - public boolean allowPartialSearchResults() { - return true; - } - - @Override - public Scroll scroll() { - return null; - } - - @Override - public String[] indexRoutings() { - return null; - } - - @Override - public String preference() { - return null; - } - - @Override - public BytesReference cacheKey() { - return null; - } - - @Override - public Rewriteable getRewriteable() { - return null; - } - - @Override - public String getClusterAlias() { - return null; - } - }; - @Override public List groupStats() { return Arrays.asList(groupStats); diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index db32c251fd3..40fa7ff6d95 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -99,7 +99,7 @@ public class InnerHitBuilderTests extends ESTestCase { * * This is necessary to ensure because we use the serialized BytesReference * of this builder as part of the cacheKey in - * {@link ShardSearchLocalRequest} (via + * {@link ShardSearchRequest} (via * {@link SearchSourceBuilder#collapse(org.elasticsearch.search.collapse.CollapseBuilder)}) */ public void testSerializationOrder() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 81f7fcfef9d..0dd8d4d6e67 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -73,8 +73,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -271,7 +270,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { PlainActionFuture result = new PlainActionFuture<>(); final boolean useScroll = randomBoolean(); service.executeQueryPhase( - new ShardSearchLocalRequest(useScroll ? scrollSearchRequest : searchRequest, + new ShardSearchRequest(OriginalIndices.NONE, useScroll ? scrollSearchRequest : searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), new SearchTask(123L, "", "", "", null, Collections.emptyMap()), result); @@ -319,7 +318,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final IndexShard indexShard = indexService.getShard(0); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); final SearchContext contextWithDefaultTimeout = service.createContext( - new ShardSearchLocalRequest( + new ShardSearchRequest( + OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, @@ -337,7 +337,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final long seconds = randomIntBetween(6, 10); searchRequest.source(new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds))); final SearchContext context = service.createContext( - new ShardSearchLocalRequest( + new ShardSearchRequest( + OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, @@ -371,12 +372,14 @@ public class SearchServiceTests extends ESSingleNodeTestCase { for (int i = 0; i < indexService.getIndexSettings().getMaxDocvalueFields(); i++) { searchSourceBuilder.docValueField("field" + i); } - try (SearchContext context = service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))) { + try (SearchContext context = service.createContext( + new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, + new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null)) + ) { assertNotNull(context); searchSourceBuilder.docValueField("one_field_too_much"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + () -> service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))); assertEquals( "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [100] but was [101]. " @@ -404,13 +407,14 @@ public class SearchServiceTests extends ESSingleNodeTestCase { searchSourceBuilder.scriptField("field" + i, new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); } - try (SearchContext context = service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))) { + try (SearchContext context = service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, + indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), + 1.0f, -1, null, null))) { assertNotNull(context); searchSourceBuilder.scriptField("anotherScriptField", new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + () -> service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))); assertEquals( "Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxScriptFields + "] but was [" @@ -433,8 +437,9 @@ public class SearchServiceTests extends ESSingleNodeTestCase { searchSourceBuilder.scriptField("field" + 0, new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); searchSourceBuilder.size(0); - try (SearchContext context = service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))) { + try (SearchContext context = service.createContext(new ShardSearchRequest(OriginalIndices.NONE, + searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), + 1.0f, -1, null, null))) { assertEquals(0, context.scriptFields().fields().size()); } } @@ -532,11 +537,11 @@ public class SearchServiceTests extends ESSingleNodeTestCase { } } - private static class ShardScrollRequestTest extends ShardSearchLocalRequest { + private static class ShardScrollRequestTest extends ShardSearchRequest { private Scroll scroll; ShardScrollRequestTest(ShardId shardId) { - super(new SearchRequest().allowPartialSearchResults(true), + super(OriginalIndices.NONE, new SearchRequest().allowPartialSearchResults(true), shardId, 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null); this.scroll = new Scroll(TimeValue.timeValueMinutes(1)); } @@ -554,28 +559,28 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); final IndexShard indexShard = indexService.getShard(0); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder()); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchAllQueryBuilder())); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchNoneQueryBuilder()) .aggregation(new TermsAggregationBuilder("test", ValueType.STRING).minDocCount(0))); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchNoneQueryBuilder()) .aggregation(new GlobalAggregationBuilder("test"))); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchNoneQueryBuilder())); - assertFalse(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertFalse(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); } @@ -624,7 +629,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { iae.getMessage()); assertFalse(service.getIndicesService().indexServiceSafe(index).getIndexSettings().isSearchThrottled()); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false); - ShardSearchLocalRequest req = new ShardSearchLocalRequest(searchRequest, new ShardId(index, 0), 1, + ShardSearchRequest req = new ShardSearchRequest(OriginalIndices.NONE, searchRequest, new ShardId(index, 0), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null); Thread currentThread = Thread.currentThread(); // we still make sure can match is executed on the network thread @@ -666,7 +671,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10); SearchRequest searchRequest = new SearchRequest(); searchRequest.allowPartialSearchResults(randomBoolean()); - ShardSearchTransportRequest request = new ShardSearchTransportRequest(OriginalIndices.NONE, searchRequest, shardId, + ShardSearchRequest request = new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shardId, indexService.numberOfShards(), AliasFilter.EMPTY, 1f, nowInMillis, clusterAlias, Strings.EMPTY_ARRAY); try (DefaultSearchContext searchContext = service.createSearchContext(request, new TimeValue(System.currentTimeMillis()))) { SearchShardTarget searchShardTarget = searchContext.shardTarget(); @@ -694,7 +699,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { NullPointerException e = expectThrows(NullPointerException.class, () -> service.createContext( - new ShardSearchLocalRequest(shardId, null, 0, null) { + new ShardSearchRequest(shardId, null, 0, null) { @Override public SearchType searchType() { // induce an artificial NPE diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java rename to server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java index f9d9c798216..f0503ff1210 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java @@ -50,15 +50,15 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { +public class ShardSearchRequestTests extends AbstractSearchTestCase { private IndexMetaData baseMetaData = IndexMetaData.builder("test").settings(Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()) .numberOfShards(1).numberOfReplicas(1).build(); public void testSerialization() throws Exception { - ShardSearchTransportRequest shardSearchTransportRequest = createShardSearchTransportRequest(); - ShardSearchTransportRequest deserializedRequest = - copyWriteable(shardSearchTransportRequest, namedWriteableRegistry, ShardSearchTransportRequest::new); + ShardSearchRequest shardSearchTransportRequest = createShardSearchRequest(); + ShardSearchRequest deserializedRequest = + copyWriteable(shardSearchTransportRequest, namedWriteableRegistry, ShardSearchRequest::new); assertEquals(deserializedRequest.scroll(), shardSearchTransportRequest.scroll()); assertEquals(deserializedRequest.getAliasFilter(), shardSearchTransportRequest.getAliasFilter()); assertArrayEquals(deserializedRequest.indices(), shardSearchTransportRequest.indices()); @@ -81,9 +81,9 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { public void testAllowPartialResultsSerializationPre7_0_0() throws IOException { Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)); - ShardSearchTransportRequest shardSearchTransportRequest = createShardSearchTransportRequest(); - ShardSearchTransportRequest deserializedRequest = - copyWriteable(shardSearchTransportRequest, namedWriteableRegistry, ShardSearchTransportRequest::new, version); + ShardSearchRequest shardSearchTransportRequest = createShardSearchRequest(); + ShardSearchRequest deserializedRequest = + copyWriteable(shardSearchTransportRequest, namedWriteableRegistry, ShardSearchRequest::new, version); if (version.before(Version.V_6_3_0)) { assertFalse(deserializedRequest.allowPartialSearchResults()); } else { @@ -91,7 +91,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { } } - private ShardSearchTransportRequest createShardSearchTransportRequest() throws IOException { + private ShardSearchRequest createShardSearchRequest() throws IOException { SearchRequest searchRequest = createSearchRequest(); ShardId shardId = new ShardId(randomAlphaOfLengthBetween(2, 10), randomAlphaOfLengthBetween(2, 10), randomInt()); final AliasFilter filteringAliases; @@ -102,7 +102,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { filteringAliases = new AliasFilter(null, Strings.EMPTY_ARRAY); } final String[] routings = generateRandomStringArray(5, 10, false, true); - return new ShardSearchTransportRequest(new OriginalIndices(searchRequest), searchRequest, shardId, + return new ShardSearchRequest(new OriginalIndices(searchRequest), searchRequest, shardId, randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), Math.abs(randomLong()), randomAlphaOfLengthBetween(3, 10), routings); } diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index bf053d34bff..767722bc690 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -30,10 +30,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.Version; -import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardIterator; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -43,24 +42,18 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; @@ -84,120 +77,6 @@ import static org.mockito.Mockito.when; public class SliceBuilderTests extends ESTestCase { private static final int MAX_SLICE = 20; - static class ShardSearchRequestTest implements IndicesRequest, ShardSearchRequest { - private final String[] indices; - private final int shardId; - private final String[] indexRoutings; - private final String preference; - - ShardSearchRequestTest(String index, int shardId, String[] indexRoutings, String preference) { - this.indices = new String[] { index }; - this.shardId = shardId; - this.indexRoutings = indexRoutings; - this.preference = preference; - } - - @Override - public String[] indices() { - return indices; - } - - @Override - public IndicesOptions indicesOptions() { - return null; - } - - @Override - public ShardId shardId() { - return new ShardId(new Index(indices[0], indices[0]), shardId); - } - - @Override - public String[] types() { - return new String[0]; - } - - @Override - public SearchSourceBuilder source() { - return null; - } - - @Override - public AliasFilter getAliasFilter() { - return null; - } - - @Override - public void setAliasFilter(AliasFilter filter) { - - } - - @Override - public void source(SearchSourceBuilder source) { - - } - - @Override - public int numberOfShards() { - return 0; - } - - @Override - public SearchType searchType() { - return null; - } - - @Override - public float indexBoost() { - return 0; - } - - @Override - public long nowInMillis() { - return 0; - } - - @Override - public Boolean requestCache() { - return null; - } - - @Override - public boolean allowPartialSearchResults() { - return true; - } - - @Override - public Scroll scroll() { - return null; - } - - @Override - public String[] indexRoutings() { - return indexRoutings; - } - - @Override - public String preference() { - return preference; - } - - @Override - public BytesReference cacheKey() { - return null; - } - - @Override - public String getClusterAlias() { - return null; - } - - @Override - public Rewriteable getRewriteable() { - return null; - } - } - private static SliceBuilder randomSliceBuilder() { int max = randomIntBetween(2, MAX_SLICE); int id = randomIntBetween(1, max - 1); @@ -233,7 +112,8 @@ public class SliceBuilderTests extends ESTestCase { } private ShardSearchRequest createRequest(int shardId, String[] routings, String preference) { - return new ShardSearchRequestTest("index", shardId, routings, preference); + return new ShardSearchRequest(OriginalIndices.NONE, new SearchRequest().preference(preference).allowPartialSearchResults(true), + new ShardId("index", "index", shardId), 1, null, 0f, System.currentTimeMillis(), null, routings); } private QueryShardContext createShardContext(Version indexVersionCreated, IndexReader reader, diff --git a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java index 812a3800527..b8e711d25a5 100644 --- a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.index.engine; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; @@ -36,7 +37,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.XPackClient; import org.elasticsearch.xpack.frozen.FrozenIndices; @@ -254,17 +255,17 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertFalse(indexService.getIndexSettings().isSearchThrottled()); SearchService searchService = getInstanceFromNode(SearchService.class); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); searchRequest.source(sourceBuilder); sourceBuilder.query(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d/d")); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); sourceBuilder.query(QueryBuilders.rangeQuery("field").gt("2010-01-06T02:00").lt("2010-01-07T02:00")); - assertFalse(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertFalse(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); } @@ -279,17 +280,17 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertTrue(indexService.getIndexSettings().isSearchThrottled()); SearchService searchService = getInstanceFromNode(SearchService.class); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.query(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d/d")); searchRequest.source(sourceBuilder); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); sourceBuilder.query(QueryBuilders.rangeQuery("field").gt("2010-01-06T02:00").lt("2010-01-07T02:00")); - assertFalse(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertFalse(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); IndicesStatsResponse response = client().admin().indices().prepareStats("index").clear().setRefresh(true).get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 72e54ecf7a9..e3d562317e5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -47,7 +47,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; @@ -197,7 +197,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testDashIndicesAreAllowedInShardLevelRequests() { //indices with names starting with '-' or '+' can be created up to version 2.x and can be around in 5.x //aliases with names starting with '-' or '+' can be created up to version 5.x and can be around in 6.x - ShardSearchTransportRequest request = mock(ShardSearchTransportRequest.class); + ShardSearchRequest request = mock(ShardSearchRequest.class); when(request.indices()).thenReturn(new String[]{"-index10", "-index20", "+index30"}); List indices = resolveIndices(request, buildAuthorizedIndices(userDashIndices, SearchAction.NAME)) .getLocal(); @@ -207,7 +207,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { } public void testWildcardsAreNotAllowedInShardLevelRequests() { - ShardSearchTransportRequest request = mock(ShardSearchTransportRequest.class); + ShardSearchRequest request = mock(ShardSearchRequest.class); when(request.indices()).thenReturn(new String[]{"index*"}); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () -> resolveIndices(request, buildAuthorizedIndices(userDashIndices, SearchAction.NAME)) @@ -217,7 +217,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { } public void testAllIsNotAllowedInShardLevelRequests() { - ShardSearchTransportRequest request = mock(ShardSearchTransportRequest.class); + ShardSearchRequest request = mock(ShardSearchRequest.class); if (randomBoolean()) { when(request.indices()).thenReturn(new String[]{"_all"}); } else { From 97d977f381a9870ebda6ba26a9d453174983d5d1 Mon Sep 17 00:00:00 2001 From: jimczi Date: Thu, 26 Sep 2019 09:56:24 +0200 Subject: [PATCH 75/94] #47046 Fix serialization version check after backport --- .../search/internal/ShardSearchRequest.java | 30 +++++++++++++++---- 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 97fbbec3a65..17a2e2cb764 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.internal; +import org.elasticsearch.Version; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; @@ -160,9 +161,20 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque nowInMillis = in.readVLong(); requestCache = in.readOptionalBoolean(); clusterAlias = in.readOptionalString(); - allowPartialSearchResults = in.readBoolean(); - indexRoutings = in.readStringArray(); - preference = in.readOptionalString(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + allowPartialSearchResults = in.readBoolean(); + } else if (in.getVersion().onOrAfter(Version.V_6_3_0)) { + allowPartialSearchResults = in.readOptionalBoolean(); + } else { + allowPartialSearchResults = false; + } + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { + indexRoutings = in.readStringArray(); + preference = in.readOptionalString(); + } else { + indexRoutings = Strings.EMPTY_ARRAY; + preference = null; + } originalIndices = OriginalIndices.readOriginalIndices(in); } @@ -189,10 +201,16 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque } out.writeOptionalBoolean(requestCache); out.writeOptionalString(clusterAlias); - out.writeBoolean(allowPartialSearchResults); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeBoolean(allowPartialSearchResults); + } else if (out.getVersion().onOrAfter(Version.V_6_3_0)) { + out.writeOptionalBoolean(allowPartialSearchResults); + } if (asKey == false) { - out.writeStringArray(indexRoutings); - out.writeOptionalString(preference); + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { + out.writeStringArray(indexRoutings); + out.writeOptionalString(preference); + } } } From 9a64b7a888e4b95845d4611814a12297dc7d5319 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Thu, 26 Sep 2019 17:57:36 +1000 Subject: [PATCH 76/94] [Backport] Validate `query` field when creating roles (#46275) (#47094) In the current implementation, the validation of the role query occurs at runtime when the query is being executed. This commit adds validation for the role query when creating a role but not for the template query as we do not have the runtime information required for evaluating the template query (eg. authenticated user's information). This is similar to the scripts that we store but do not evaluate or parse if they are valid queries or not. For validation, the query is evaluated (if not a template), parsed to build the QueryBuilder and verify if the query type is allowed. Closes #34252 --- .../org/elasticsearch/client/SecurityIT.java | 2 +- .../authz/permission/DocumentPermissions.java | 65 +------ .../authz/support/DLSRoleQueryValidator.java | 182 ++++++++++++++++++ .../SecurityQueryTemplateEvaluator.java | 6 +- ...ityIndexReaderWrapperIntegrationTests.java | 32 +-- .../permission/DocumentPermissionsTests.java | 47 ----- .../support/DLSRoleQueryValidatorTests.java | 63 ++++++ .../xpack/security/Security.java | 8 +- .../action/TransportCreateApiKeyAction.java | 20 +- .../action/role/TransportPutRoleAction.java | 15 +- .../user/TransportHasPrivilegesAction.java | 22 ++- .../esnative/ESNativeRealmMigrateTool.java | 2 +- .../security/authz/store/FileRolesStore.java | 63 +++--- .../xpack/security/SecurityTests.java | 3 +- .../action/role/PutRoleBuilderTests.java | 2 +- .../role/TransportPutRoleActionTests.java | 108 ++++++++++- .../HasPrivilegesRequestBuilderTests.java | 2 +- .../TransportHasPrivilegesActionTests.java | 78 ++++++++ .../esnative/ESNativeMigrateToolTests.java | 2 +- .../authc/esnative/NativeRealmIntegTests.java | 6 +- .../security/authz/RoleDescriptorTests.java | 21 +- .../authz/store/FileRolesStoreTests.java | 49 +++-- .../xpack/security/authz/store/roles.yml | 12 +- .../security/authz/store/roles2xformat.yml | 2 +- .../test/roles/30_prohibited_role_query.yml | 50 +---- 25 files changed, 627 insertions(+), 235 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidator.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidatorTests.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java index abf65d19df3..8122ff17648 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java @@ -190,7 +190,7 @@ public class SecurityIT extends ESRestHighLevelClientTestCase { .name(roleName) .clusterPrivileges(randomSubsetOf(randomInt(3), Role.ClusterPrivilegeName.ALL_ARRAY)) .indicesPrivileges( - randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom(randomAlphaOfLength(3)))) + randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom("{\"match_all\": {}}"))) .applicationResourcePrivileges(randomArray(3, ApplicationResourcePrivileges[]::new, () -> ApplicationResourcePrivilegesTests.createNewRandom(randomAlphaOfLength(3).toLowerCase(Locale.ROOT)))) .runAsPrivilege(randomArray(3, String[]::new, () -> randomAlphaOfLength(3))); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java index 08d754b4e53..c5b946ad595 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java @@ -12,29 +12,18 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ToChildBlockJoinQuery; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.BoostingQueryBuilder; -import org.elasticsearch.index.query.ConstantScoreQueryBuilder; -import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.Rewriteable; -import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.search.NestedHelper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.xpack.core.security.authz.support.SecurityQueryTemplateEvaluator; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.core.security.user.User; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; import java.util.Set; import java.util.function.Function; @@ -127,11 +116,9 @@ public final class DocumentPermissions { BooleanQuery.Builder filter) throws IOException { for (BytesReference bytesReference : queries) { QueryShardContext queryShardContext = queryShardContextProvider.apply(shardId); - String templateResult = SecurityQueryTemplateEvaluator.evaluateTemplate(bytesReference.utf8ToString(), scriptService, user); - try (XContentParser parser = XContentFactory.xContent(templateResult).createParser(queryShardContext.getXContentRegistry(), - LoggingDeprecationHandler.INSTANCE, templateResult)) { - QueryBuilder queryBuilder = queryShardContext.parseInnerQueryBuilder(parser); - verifyRoleQuery(queryBuilder); + QueryBuilder queryBuilder = DLSRoleQueryValidator.evaluateAndVerifyRoleQuery(bytesReference, scriptService, + queryShardContext.getXContentRegistry(), user); + if (queryBuilder != null) { failIfQueryUsesClient(queryBuilder, queryShardContext); Query roleQuery = queryShardContext.toQuery(queryBuilder).query(); filter.add(roleQuery, SHOULD); @@ -153,50 +140,6 @@ public final class DocumentPermissions { filter.setMinimumNumberShouldMatch(1); } - /** - * Checks whether the role query contains queries we know can't be used as DLS role query. - */ - static void verifyRoleQuery(QueryBuilder queryBuilder) throws IOException { - if (queryBuilder instanceof TermsQueryBuilder) { - TermsQueryBuilder termsQueryBuilder = (TermsQueryBuilder) queryBuilder; - if (termsQueryBuilder.termsLookup() != null) { - throw new IllegalArgumentException("terms query with terms lookup isn't supported as part of a role query"); - } - } else if (queryBuilder instanceof GeoShapeQueryBuilder) { - GeoShapeQueryBuilder geoShapeQueryBuilder = (GeoShapeQueryBuilder) queryBuilder; - if (geoShapeQueryBuilder.shape() == null) { - throw new IllegalArgumentException("geoshape query referring to indexed shapes isn't support as part of a role query"); - } - } else if (queryBuilder.getName().equals("percolate")) { - // actually only if percolate query is referring to an existing document then this is problematic, - // a normal percolate query does work. However we can't check that here as this query builder is inside - // another module. So we don't allow the entire percolate query. I don't think users would ever use - // a percolate query as role query, so this restriction shouldn't prohibit anyone from using dls. - throw new IllegalArgumentException("percolate query isn't support as part of a role query"); - } else if (queryBuilder.getName().equals("has_child")) { - throw new IllegalArgumentException("has_child query isn't support as part of a role query"); - } else if (queryBuilder.getName().equals("has_parent")) { - throw new IllegalArgumentException("has_parent query isn't support as part of a role query"); - } else if (queryBuilder instanceof BoolQueryBuilder) { - BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder; - List clauses = new ArrayList<>(); - clauses.addAll(boolQueryBuilder.filter()); - clauses.addAll(boolQueryBuilder.must()); - clauses.addAll(boolQueryBuilder.mustNot()); - clauses.addAll(boolQueryBuilder.should()); - for (QueryBuilder clause : clauses) { - verifyRoleQuery(clause); - } - } else if (queryBuilder instanceof ConstantScoreQueryBuilder) { - verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery()); - } else if (queryBuilder instanceof FunctionScoreQueryBuilder) { - verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query()); - } else if (queryBuilder instanceof BoostingQueryBuilder) { - verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery()); - verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery()); - } - } - /** * Fall back validation that verifies that queries during rewrite don't use * the client to make remote calls. In the case of DLS this can cause a dead diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidator.java new file mode 100644 index 00000000000..1e4df7e8a4a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidator.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.support; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.BoostingQueryBuilder; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.GeoShapeQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * This class helps in evaluating the query field if it is template, + * validating the query and checking if the query type is allowed to be used in DLS role query. + */ +public final class DLSRoleQueryValidator { + + private DLSRoleQueryValidator() { + } + + /** + * Validates the query field in the {@link RoleDescriptor.IndicesPrivileges} only if it is not a template query.
+ * It parses the query and builds the {@link QueryBuilder}, also checks if the query type is supported in DLS role query. + * + * @param indicesPrivileges {@link RoleDescriptor.IndicesPrivileges} + * @param xContentRegistry {@link NamedXContentRegistry} for finding named queries + */ + public static void validateQueryField(RoleDescriptor.IndicesPrivileges[] indicesPrivileges, + NamedXContentRegistry xContentRegistry) { + if (indicesPrivileges != null) { + for (int i = 0; i < indicesPrivileges.length; i++) { + BytesReference query = indicesPrivileges[i].getQuery(); + try { + if (query != null) { + if (isTemplateQuery(query, xContentRegistry)) { + // skip template query, this requires runtime information like 'User' information. + continue; + } + + evaluateAndVerifyRoleQuery(query.utf8ToString(), xContentRegistry); + } + } catch (ParsingException | IllegalArgumentException | IOException e) { + throw new ElasticsearchParseException("failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(indicesPrivileges[i].getIndices()) + + "] at index privilege [" + i + "] of role descriptor", e); + } + } + } + } + + private static boolean isTemplateQuery(BytesReference query, NamedXContentRegistry xContentRegistry) throws IOException { + try (XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, + LoggingDeprecationHandler.INSTANCE, query.utf8ToString())) { + XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.START_OBJECT + "] but " + + "found [" + token + "] instead"); + } + token = parser.nextToken(); + if (token != XContentParser.Token.FIELD_NAME) { + throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.FIELD_NAME + "] with " + + "value a query name or 'template' but found [" + token + "] instead"); + } + String fieldName = parser.currentName(); + if ("template".equals(fieldName)) { + return true; + } + } + + return false; + } + + /** + * Evaluates the query if it is a template and then validates the query by parsing + * and building the {@link QueryBuilder}. It also checks if the query type is + * supported in DLS role query. + * + * @param query {@link BytesReference} query field from the role + * @param scriptService {@link ScriptService} used for evaluation of a template query + * @param xContentRegistry {@link NamedXContentRegistry} for finding named queries + * @param user {@link User} used when evaluation a template query + * @return {@link QueryBuilder} if the query is valid and allowed, in case {@link RoleDescriptor.IndicesPrivileges} + * * does not have a query field then it returns {@code null}. + */ + @Nullable + public static QueryBuilder evaluateAndVerifyRoleQuery(BytesReference query, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, User user) { + if (query != null) { + String templateResult = SecurityQueryTemplateEvaluator.evaluateTemplate(query.utf8ToString(), scriptService, + user); + try { + return evaluateAndVerifyRoleQuery(templateResult, xContentRegistry); + } catch (ElasticsearchParseException | ParsingException | XContentParseException | IOException e) { + throw new ElasticsearchParseException("failed to parse field 'query' from the role descriptor", e); + } + } + return null; + } + + @Nullable + private static QueryBuilder evaluateAndVerifyRoleQuery(String query, NamedXContentRegistry xContentRegistry) throws IOException { + if (query != null) { + try (XContentParser parser = XContentFactory.xContent(query).createParser(xContentRegistry, + LoggingDeprecationHandler.INSTANCE, query)) { + QueryBuilder queryBuilder = AbstractQueryBuilder.parseInnerQueryBuilder(parser); + verifyRoleQuery(queryBuilder); + return queryBuilder; + } + } + return null; + } + + /** + * Checks whether the role query contains queries we know can't be used as DLS role query. + * + * @param queryBuilder {@link QueryBuilder} for given query + */ + // pkg protected for testing + static void verifyRoleQuery(QueryBuilder queryBuilder) { + if (queryBuilder instanceof TermsQueryBuilder) { + TermsQueryBuilder termsQueryBuilder = (TermsQueryBuilder) queryBuilder; + if (termsQueryBuilder.termsLookup() != null) { + throw new IllegalArgumentException("terms query with terms lookup isn't supported as part of a role query"); + } + } else if (queryBuilder instanceof GeoShapeQueryBuilder) { + GeoShapeQueryBuilder geoShapeQueryBuilder = (GeoShapeQueryBuilder) queryBuilder; + if (geoShapeQueryBuilder.shape() == null) { + throw new IllegalArgumentException("geoshape query referring to indexed shapes isn't supported as part of a role query"); + } + } else if (queryBuilder.getName().equals("percolate")) { + // actually only if percolate query is referring to an existing document then this is problematic, + // a normal percolate query does work. However we can't check that here as this query builder is inside + // another module. So we don't allow the entire percolate query. I don't think users would ever use + // a percolate query as role query, so this restriction shouldn't prohibit anyone from using dls. + throw new IllegalArgumentException("percolate query isn't supported as part of a role query"); + } else if (queryBuilder.getName().equals("has_child")) { + throw new IllegalArgumentException("has_child query isn't supported as part of a role query"); + } else if (queryBuilder.getName().equals("has_parent")) { + throw new IllegalArgumentException("has_parent query isn't supported as part of a role query"); + } else if (queryBuilder instanceof BoolQueryBuilder) { + BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder; + List clauses = new ArrayList<>(); + clauses.addAll(boolQueryBuilder.filter()); + clauses.addAll(boolQueryBuilder.must()); + clauses.addAll(boolQueryBuilder.mustNot()); + clauses.addAll(boolQueryBuilder.should()); + for (QueryBuilder clause : clauses) { + verifyRoleQuery(clause); + } + } else if (queryBuilder instanceof ConstantScoreQueryBuilder) { + verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery()); + } else if (queryBuilder instanceof FunctionScoreQueryBuilder) { + verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query()); + } else if (queryBuilder instanceof BoostingQueryBuilder) { + verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery()); + verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java index 73a1d7fcde5..0fac1016342 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java @@ -44,10 +44,8 @@ public final class SecurityQueryTemplateEvaluator { * @return resultant query string after compiling and executing the script. * If the source does not contain template then it will return the query * source without any modifications. - * @throws IOException thrown when there is any error parsing the query - * string. */ - public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) throws IOException { + public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) { // EMPTY is safe here because we never use namedObject try (XContentParser parser = XContentFactory.xContent(querySource).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, querySource)) { @@ -76,6 +74,8 @@ public final class SecurityQueryTemplateEvaluator { } else { return querySource; } + } catch (IOException ioe) { + throw new ElasticsearchParseException("failed to parse query", ioe); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index ca49e4ae4a3..ca2b38318a0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -28,12 +28,10 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.XPackLicenseState; @@ -55,9 +53,7 @@ import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -76,9 +72,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT final Authentication authentication = mock(Authentication.class); when(authentication.getUser()).thenReturn(mock(User.class)); threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); - IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new - FieldPermissions(), - DocumentPermissions.filteredBy(singleton(new BytesArray("{\"match_all\" : {}}")))); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -90,14 +83,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); - SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, - bitsetCache, threadContext, licenseState, scriptService) { - - @Override - protected IndicesAccessControl getIndicesAccessControl() { - return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl)); - } - }; Directory directory = newDirectory(); IndexWriter iw = new IndexWriter( @@ -144,17 +129,32 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); for (int i = 0; i < numValues; i++) { + String termQuery = "{\"term\": {\"field\": \""+ values[i] + "\"} }"; + IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new + FieldPermissions(), + DocumentPermissions.filteredBy(singleton(new BytesArray(termQuery)))); + SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, + bitsetCache, threadContext, licenseState, scriptService) { + + @Override + protected IndicesAccessControl getIndicesAccessControl() { + return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl)); + } + }; + ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i]))); - doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext).parseInnerQueryBuilder(any(XContentParser.class)); when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery); + DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader); IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); + TotalHitCountCollector countCollector = new TotalHitCountCollector(); indexSearcher.search(new MatchAllDocsQuery(), countCollector); + assertThat(countCollector.getTotalHits(), equalTo(expectedHitCount)); assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java index f8d1334df7e..37b6dd70e5d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java @@ -6,23 +6,14 @@ package org.elasticsearch.xpack.core.security.authz.permission; -import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.BoostingQueryBuilder; -import org.elasticsearch.index.query.ConstantScoreQueryBuilder; -import org.elasticsearch.index.query.GeoShapeQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.indices.TermsLookup; -import org.elasticsearch.join.query.HasChildQueryBuilder; -import org.elasticsearch.join.query.HasParentQueryBuilder; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -68,44 +59,6 @@ public class DocumentPermissionsTests extends ESTestCase { assertThat(ae.getMessage(), containsString("nested scoping for document permissions is not permitted")); } - public void testVerifyRoleQuery() throws Exception { - QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2"); - DocumentPermissions.verifyRoleQuery(queryBuilder1); - - QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_type", "_id", "_path")); - Exception e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder2)); - assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query")); - - QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type"); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder3)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder4)); - assertThat(e.getMessage(), equalTo("has_child query isn't support as part of a role query")); - - QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder5)); - assertThat(e.getMessage(), equalTo("has_parent query isn't support as part of a role query")); - - QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type")); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder6)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder7)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder8)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"), - new MatchAllQueryBuilder()); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder9)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - } - public void testFailIfQueryUsesClient() throws Exception { Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidatorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidatorTests.java new file mode 100644 index 00000000000..b441c40340d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidatorTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.support; + +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.BoostingQueryBuilder; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.GeoShapeQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.indices.TermsLookup; +import org.elasticsearch.join.query.HasChildQueryBuilder; +import org.elasticsearch.join.query.HasParentQueryBuilder; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class DLSRoleQueryValidatorTests extends ESTestCase { + + public void testVerifyRoleQuery() throws Exception { + QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2"); + DLSRoleQueryValidator.verifyRoleQuery(queryBuilder1); + + QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_id", "_path")); + Exception e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder2)); + assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query")); + + QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type"); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder3)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder4)); + assertThat(e.getMessage(), equalTo("has_child query isn't supported as part of a role query")); + + QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder5)); + assertThat(e.getMessage(), equalTo("has_parent query isn't supported as part of a role query")); + + QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type")); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder6)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder7)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder8)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"), + new MatchAllQueryBuilder()); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder9)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 46419739f6e..7bd57c1b02a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -388,7 +388,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { try { - return createComponents(client, threadPool, clusterService, resourceWatcherService, scriptService); + return createComponents(client, threadPool, clusterService, resourceWatcherService, scriptService, xContentRegistry); } catch (final Exception e) { throw new IllegalStateException("security initialization failed", e); } @@ -396,7 +396,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw // pkg private for testing - tests want to pass in their set of extensions hence we are not using the extension service directly Collection createComponents(Client client, ThreadPool threadPool, ClusterService clusterService, - ResourceWatcherService resourceWatcherService, ScriptService scriptService) throws Exception { + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry) throws Exception { if (enabled == false) { return Collections.emptyList(); } @@ -451,7 +452,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings)); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings); - final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState()); + final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState(), + xContentRegistry); final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityIndex.get()); final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(); List, ActionListener>> rolesProviders = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java index 09612c5e01f..72a92516e59 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java @@ -6,11 +6,13 @@ package org.elasticsearch.xpack.security.action; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -18,6 +20,8 @@ import org.elasticsearch.xpack.core.security.action.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; @@ -32,14 +36,16 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction) CreateApiKeyRequest::new); this.apiKeyService = apiKeyService; this.securityContext = context; this.rolesStore = rolesStore; + this.xContentRegistry = xContentRegistry; } @Override @@ -49,7 +55,17 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction(Arrays.asList(authentication.getUser().roles())), - ActionListener.wrap(roleDescriptors -> apiKeyService.createApiKey(authentication, request, roleDescriptors, listener), + ActionListener.wrap(roleDescriptors -> { + for (RoleDescriptor rd : roleDescriptors) { + try { + DLSRoleQueryValidator.validateQueryField(rd.getIndicesPrivileges(), xContentRegistry); + } catch (ElasticsearchException | IllegalArgumentException e) { + listener.onFailure(e); + return; + } + } + apiKeyService.createApiKey(authentication, request, roleDescriptors, listener); + }, listener::onFailure)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java index c0a91bcdb02..300c8c835ff 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java @@ -5,26 +5,32 @@ */ package org.elasticsearch.xpack.security.action.role; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; public class TransportPutRoleAction extends HandledTransportAction { private final NativeRolesStore rolesStore; + private final NamedXContentRegistry xContentRegistry; @Inject - public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) { + public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService, + NamedXContentRegistry xContentRegistry) { super(PutRoleAction.NAME, transportService, actionFilters, PutRoleRequest::new); this.rolesStore = rolesStore; + this.xContentRegistry = xContentRegistry; } @Override @@ -35,6 +41,13 @@ public class TransportPutRoleAction extends HandledTransportAction() { @Override public void onResponse(Boolean created) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index ae400172bf1..7401463bf57 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; @@ -36,15 +39,20 @@ public class TransportHasPrivilegesAction extends HandledTransportAction authorizationService.checkPrivileges(authentication, request, applicationPrivilegeDescriptors, listener), listener::onFailure)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java index 0fbe54d7c10..dc77385bc1c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java @@ -323,7 +323,7 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand { } terminal.println("importing roles from [" + rolesFile + "]..."); Logger logger = getTerminalLogger(terminal); - Map roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY); + Map roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY, null); Set existingRoles; try { existingRoles = getRolesThatExist(terminal, env.settings(), env, options); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index a2be72dc6d6..5a2c5456534 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Nullable; @@ -29,6 +30,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.core.security.support.NoOpLogger; import org.elasticsearch.xpack.core.security.support.Validation; @@ -61,27 +63,30 @@ public class FileRolesStore implements BiConsumer, ActionListener>> listeners = new ArrayList<>(); private volatile Map permissions; - public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState) + public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState, + NamedXContentRegistry xContentRegistry) throws IOException { - this(settings, env, watcherService, null, licenseState); + this(settings, env, watcherService, null, licenseState, xContentRegistry); } FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer> listener, - XPackLicenseState licenseState) throws IOException { + XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) throws IOException { this.settings = settings; this.file = resolveFile(env); if (listener != null) { listeners.add(listener); } this.licenseState = licenseState; + this.xContentRegistry = xContentRegistry; FileWatcher watcher = new FileWatcher(file.getParent()); watcher.addListener(new FileListener()); watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH); - permissions = parseFile(file, logger, settings, licenseState); + permissions = parseFile(file, logger, settings, licenseState, xContentRegistry); } @@ -150,15 +155,17 @@ public class FileRolesStore implements BiConsumer, ActionListener parseFileForRoleNames(Path path, Logger logger) { - return parseRoleDescriptors(path, logger, false, Settings.EMPTY).keySet(); + // EMPTY is safe here because we never use namedObject as we are just parsing role names + return parseRoleDescriptors(path, logger, false, Settings.EMPTY, NamedXContentRegistry.EMPTY).keySet(); } - public static Map parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState) { - return parseFile(path, logger, true, settings, licenseState); + public static Map parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState, + NamedXContentRegistry xContentRegistry) { + return parseFile(path, logger, true, settings, licenseState, xContentRegistry); } - public static Map parseFile(Path path, Logger logger, boolean resolvePermission, - Settings settings, XPackLicenseState licenseState) { + public static Map parseFile(Path path, Logger logger, boolean resolvePermission, Settings settings, + XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) { if (logger == null) { logger = NoOpLogger.INSTANCE; } @@ -170,7 +177,7 @@ public class FileRolesStore implements BiConsumer, ActionListener roleSegments = roleSegments(path); final boolean flsDlsLicensed = licenseState.isDocumentAndFieldLevelSecurityAllowed(); for (String segment : roleSegments) { - RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings); + RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry); if (descriptor != null) { if (ReservedRolesStore.isReserved(descriptor.getName())) { logger.warn("role [{}] is reserved. the relevant role definition in the mapping file will be ignored", @@ -202,7 +209,8 @@ public class FileRolesStore implements BiConsumer, ActionListener parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings) { + public static Map parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings, + NamedXContentRegistry xContentRegistry) { if (logger == null) { logger = NoOpLogger.INSTANCE; } @@ -213,7 +221,7 @@ public class FileRolesStore implements BiConsumer, ActionListener roleSegments = roleSegments(path); for (String segment : roleSegments) { - RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings); + RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry); if (rd != null) { roles.put(rd.getName(), rd); } @@ -231,12 +239,12 @@ public class FileRolesStore implements BiConsumer, ActionListener, ActionListener, ActionListener) () -> new ParameterizedMessage( + "invalid role definition [{}] in roles file [{}]. failed to validate query field. skipping role...", roleName, + path.toAbsolutePath()), e); + return null; + } } } return descriptor; @@ -350,7 +367,7 @@ public class FileRolesStore implements BiConsumer, ActionListener previousPermissions = permissions; try { - permissions = parseFile(file, logger, settings, licenseState); + permissions = parseFile(file, logger, settings, licenseState, xContentRegistry); logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), Files.exists(file) ? "changed" : "removed"); } catch (Exception e) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 92130ca5f4e..4a1cf946676 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -128,7 +128,8 @@ public class SecurityTests extends ESTestCase { Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); - return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class)); + return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class), + xContentRegistry()); } private static T findComponent(Class type, Collection components) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java index ba305e15ed7..33fe3259b3c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java @@ -32,4 +32,4 @@ public class PutRoleBuilderTests extends ESTestCase { "[role1], use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead")); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 3cbb7782688..f7f3647f798 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -5,10 +5,21 @@ */ package org.elasticsearch.xpack.security.action.role; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.join.query.HasChildQueryBuilder; +import org.elasticsearch.join.query.HasParentQueryBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -22,6 +33,7 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; @@ -41,12 +53,26 @@ import static org.mockito.Mockito.verifyZeroInteractions; public class TransportPutRoleActionTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(Arrays.asList( + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(MatchAllQueryBuilder.NAME), + (p, c) -> MatchAllQueryBuilder.fromXContent(p)), + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasChildQueryBuilder.NAME), + (p, c) -> HasChildQueryBuilder.fromXContent(p)), + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasParentQueryBuilder.NAME), + (p, c) -> HasParentQueryBuilder.fromXContent(p)), + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(TermQueryBuilder.NAME), + (p, c) -> TermQueryBuilder.fromXContent(p)))); + } + public void testReservedRole() { final String roleName = randomFrom(new ArrayList<>(ReservedRolesStore.names())); NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -76,7 +102,8 @@ public class TransportPutRoleActionTests extends ESTestCase { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); final boolean created = randomBoolean(); PutRoleRequest request = new PutRoleRequest(); @@ -119,7 +146,8 @@ public class TransportPutRoleActionTests extends ESTestCase { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -154,4 +182,78 @@ public class TransportPutRoleActionTests extends ESTestCase { assertThat(throwableRef.get(), is(sameInstance(e))); verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), any(ActionListener.class)); } + + public void testCreationOfRoleWithMalformedQueryJsonFails() { + NativeRolesStore rolesStore = mock(NativeRolesStore.class); + TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); + PutRoleRequest request = new PutRoleRequest(); + request.name("test"); + String[] malformedQueryJson = new String[]{"{ \"match_all\": { \"unknown_field\": \"\" } }", + "{ malformed JSON }", + "{ \"unknown\": {\"\"} }", + "{}"}; + BytesReference query = new BytesArray(randomFrom(malformedQueryJson)); + request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean()); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(mock(Task.class), request, new ActionListener() { + @Override + public void onResponse(PutRoleResponse response) { + responseRef.set(response); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), is(notNullValue())); + Throwable t = throwableRef.get(); + assertThat(t, instanceOf(ElasticsearchParseException.class)); + assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) + + "] at index privilege [0] of role descriptor")); + } + + public void testCreationOfRoleWithUnsupportedQueryFails() throws Exception { + NativeRolesStore rolesStore = mock(NativeRolesStore.class); + TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); + PutRoleRequest request = new PutRoleRequest(); + request.name("test"); + String hasChildQuery = "{ \"has_child\": { \"type\": \"child\", \"query\": { \"match_all\": {} } } }"; + String hasParentQuery = "{ \"has_parent\": { \"parent_type\": \"parent\", \"query\": { \"match_all\": {} } } }"; + BytesReference query = new BytesArray(randomFrom(hasChildQuery, hasParentQuery)); + request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean()); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(mock(Task.class), request, new ActionListener() { + @Override + public void onResponse(PutRoleResponse response) { + responseRef.set(response); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), is(notNullValue())); + Throwable t = throwableRef.get(); + assertThat(t, instanceOf(ElasticsearchParseException.class)); + assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) + + "] at index privilege [0] of role descriptor")); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java index 0b9de2da332..612437a4b99 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java @@ -116,4 +116,4 @@ public class HasPrivilegesRequestBuilderTests extends ESTestCase { ); assertThat(parseException.getMessage(), containsString("[cluster,index,applications] are missing")); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java new file mode 100644 index 00000000000..7cd2ed7a2f7 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authz.AuthorizationService; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; +import org.junit.After; +import org.junit.Before; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransportHasPrivilegesActionTests extends ESTestCase { + private ThreadPool threadPool; + + @Before + public void createThreadPool() { + threadPool = new TestThreadPool("has privileges action tests"); + } + + @After + public void stopThreadPool() { + terminate(threadPool); + } + + public void testHasPrivilegesRequestDoesNotAllowDLSRoleQueryBasedIndicesPrivileges() { + final ThreadContext threadContext = threadPool.getThreadContext(); + final SecurityContext context = mock(SecurityContext.class); + final User user = new User("user-1", "superuser"); + final Authentication authentication = new Authentication(user, + new Authentication.RealmRef("native", "default_native", "node1"), null); + when(context.getAuthentication()).thenReturn(authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); + final TransportHasPrivilegesAction transportHasPrivilegesAction = new TransportHasPrivilegesAction(threadPool, + mock(TransportService.class), mock(ActionFilters.class), mock(AuthorizationService.class), mock(NativePrivilegeStore.class), + context, xContentRegistry()); + + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + final RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(1, 5)]; + for (int i = 0; i < indicesPrivileges.length; i++) { + indicesPrivileges[i] = RoleDescriptor.IndicesPrivileges.builder() + .privileges(randomFrom("read", "write")) + .indices(randomAlphaOfLengthBetween(2, 8)) + .query(new BytesArray(randomAlphaOfLength(5))) + .build(); + } + request.indexPrivileges(indicesPrivileges); + request.username("user-1"); + + final PlainActionFuture listener = new PlainActionFuture<>(); + transportHasPrivilegesAction.doExecute(mock(Task.class), request, listener); + + final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> listener.actionGet()); + assertThat(ile, notNullValue()); + assertThat(ile.getMessage(), containsString("users may only check the index privileges without any DLS role query")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index a73fc93f32e..2e5e87d3077 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -131,7 +131,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[] { "index" }, new String[] { "read" }, new String[] { "body", "title" }, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) + new BytesArray("{\"match_all\": {}}"), randomBoolean()) .get(); addedRoles.add(rname); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 89a82c5e51b..3bc220d6a68 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -198,7 +198,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) + new BytesArray("{\"match_all\": {}}"), randomBoolean()) .metadata(metadata) .get(); logger.error("--> waiting for .security index"); @@ -215,13 +215,13 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) + new BytesArray("{\"match_all\": {}}"), randomBoolean()) .get(); c.preparePutRole("test_role3") .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) + new BytesArray("{\"match_all\": {}}"), randomBoolean()) .get(); logger.info("--> retrieving all roles"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java index 37788d82403..765d5e83a76 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java @@ -61,7 +61,7 @@ public class RoleDescriptorTests extends ESTestCase { .indices("i1", "i2") .privileges("read") .grantedFields("body", "title") - .query("{\"query\": {\"match_all\": {}}}") + .query("{\"match_all\": {}}") .build() }; final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = { @@ -82,7 +82,7 @@ public class RoleDescriptorTests extends ESTestCase { assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none]" + ", global=[{APPLICATION:manage:applications=app01,app02}]" + ", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], allowRestrictedIndices=[false], privileges=[read]" + - ", field_security=[grant=[body,title], except=null], query={\"query\": {\"match_all\": {}}}],]" + + ", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" + ", runAs=[sudo], metadata=[{}]]")); } @@ -94,7 +94,7 @@ public class RoleDescriptorTests extends ESTestCase { .privileges("read") .grantedFields("body", "title") .allowRestrictedIndices(randomBoolean()) - .query("{\"query\": {\"match_all\": {}}}") + .query("{\"match_all\": {}}") .build() }; final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = { @@ -136,7 +136,7 @@ public class RoleDescriptorTests extends ESTestCase { "\"p2\"]}, {\"names\": \"idx2\", \"allow_restricted_indices\": true, \"privileges\": [\"p3\"], \"field_security\": " + "{\"grant\": [\"f1\", \"f2\"]}}, {\"names\": " + "\"idx2\", \"allow_restricted_indices\": false," + - "\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": \"{\\\"match_all\\\": {}}\"}]}"; + "\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": {\"match_all\": {}} }]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); @@ -261,6 +261,18 @@ public class RoleDescriptorTests extends ESTestCase { assertNull(rd.getIndicesPrivileges()[0].getQuery()); } + public void testParseNullQuery() throws Exception { + String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"index\": [{\"names\": [\"idx1\",\"idx2\"], " + + "\"privileges\": [\"p1\", \"p2\"], \"query\": null}]}"; + RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + assertEquals("test", rd.getName()); + assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); + assertEquals(1, rd.getIndicesPrivileges().length); + assertArrayEquals(new String[] { "idx1", "idx2" }, rd.getIndicesPrivileges()[0].getIndices()); + assertArrayEquals(new String[] { "m", "n" }, rd.getRunAs()); + assertNull(rd.getIndicesPrivileges()[0].getQuery()); + } + public void testParseEmptyQueryUsingDeprecatedIndicesField() throws Exception { String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], " + "\"privileges\": [\"p1\", \"p2\"], \"query\": \"\"}]}"; @@ -283,4 +295,5 @@ public class RoleDescriptorTests extends ESTestCase { assertEquals(1, parsed.getTransientMetadata().size()); assertEquals(true, parsed.getTransientMetadata().get("enabled")); } + } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 3a2c3089100..99ae113e15f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -9,9 +9,13 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -45,6 +49,7 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -61,11 +66,17 @@ import static org.mockito.Mockito.when; public class FileRolesStoreTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(singletonList(new NamedXContentRegistry.Entry(QueryBuilder.class, + new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> MatchAllQueryBuilder.fromXContent(p)))); + } + public void testParseFile() throws Exception { Path path = getDataPath("roles.yml"); Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() .put(XPackSettings.DLS_FLS_ENABLED.getKey(), true) - .build(), new XPackLicenseState(Settings.EMPTY)); + .build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(9)); @@ -235,6 +246,8 @@ public class FileRolesStoreTests extends ESTestCase { assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); + + assertThat(roles.get("role_query_invalid"), nullValue()); } public void testParseFileWithFLSAndDLSDisabled() throws Exception { @@ -244,14 +257,15 @@ public class FileRolesStoreTests extends ESTestCase { events.clear(); Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() .put(XPackSettings.DLS_FLS_ENABLED.getKey(), false) - .build(), new XPackLicenseState(Settings.EMPTY)); + .build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(6)); assertThat(roles.get("role_fields"), nullValue()); assertThat(roles.get("role_query"), nullValue()); assertThat(roles.get("role_query_fields"), nullValue()); + assertThat(roles.get("role_query_invalid"), nullValue()); - assertThat(events, hasSize(3)); + assertThat(events, hasSize(4)); assertThat( events.get(0), startsWith("invalid role definition [role_fields] in roles file [" + path.toAbsolutePath() + @@ -262,6 +276,9 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(events.get(2), startsWith("invalid role definition [role_query_fields] in roles file [" + path.toAbsolutePath() + "]. document and field level security is not enabled.")); + assertThat(events.get(3), + startsWith("invalid role definition [role_query_invalid] in roles file [" + path.toAbsolutePath() + + "]. document and field level security is not enabled.")); } public void testParseFileWithFLSAndDLSUnlicensed() throws Exception { @@ -271,7 +288,7 @@ public class FileRolesStoreTests extends ESTestCase { events.clear(); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState, xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(9)); assertNotNull(roles.get("role_fields")); @@ -295,7 +312,8 @@ public class FileRolesStoreTests extends ESTestCase { public void testDefaultRolesFile() throws Exception { // TODO we should add the config dir to the resources so we don't copy this stuff around... Path path = getDataPath("default_roles.yml"); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(0)); } @@ -325,7 +343,7 @@ public class FileRolesStoreTests extends ESTestCase { FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> { modifiedRoles.addAll(roleSet); latch.countDown(); - }, new XPackLicenseState(Settings.EMPTY)); + }, new XPackLicenseState(Settings.EMPTY), xContentRegistry()); Set descriptors = store.roleDescriptors(Collections.singleton("role1")); assertThat(descriptors, notNullValue()); @@ -368,7 +386,7 @@ public class FileRolesStoreTests extends ESTestCase { store = new FileRolesStore(settings, env, watcherService, roleSet -> { truncatedFileRolesModified.addAll(roleSet); truncateLatch.countDown(); - }, new XPackLicenseState(Settings.EMPTY)); + }, new XPackLicenseState(Settings.EMPTY), xContentRegistry()); final Set allRolesPreTruncate = store.getAllRoleNames(); try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { @@ -391,7 +409,7 @@ public class FileRolesStoreTests extends ESTestCase { store = new FileRolesStore(settings, env, watcherService, roleSet -> { modifiedFileRolesModified.addAll(roleSet); modifyLatch.countDown(); - }, new XPackLicenseState(Settings.EMPTY)); + }, new XPackLicenseState(Settings.EMPTY), xContentRegistry()); try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { writer.append("role5:").append(System.lineSeparator()); @@ -416,7 +434,8 @@ public class FileRolesStoreTests extends ESTestCase { public void testThatEmptyFileDoesNotResultInLoop() throws Exception { Path file = createTempFile(); Files.write(file, Collections.singletonList("#"), StandardCharsets.UTF_8); - Map roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles.keySet(), is(empty())); } @@ -425,7 +444,8 @@ public class FileRolesStoreTests extends ESTestCase { Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null); List entries = CapturingLogger.output(logger.getName(), Level.ERROR); entries.clear(); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles.size(), is(1)); assertThat(roles, hasKey("valid_role")); RoleDescriptor descriptor = roles.get("valid_role"); @@ -467,7 +487,8 @@ public class FileRolesStoreTests extends ESTestCase { List events = CapturingLogger.output(logger.getName(), Level.ERROR); events.clear(); Path path = getDataPath("reserved_roles.yml"); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(1)); @@ -498,7 +519,8 @@ public class FileRolesStoreTests extends ESTestCase { .put(XPackSettings.DLS_FLS_ENABLED.getKey(), flsDlsEnabled) .build(); Environment env = TestEnvironment.newEnvironment(settings); - FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY)); + FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); Map usageStats = store.usageStats(); @@ -512,9 +534,10 @@ public class FileRolesStoreTests extends ESTestCase { Path path = getDataPath("roles2xformat.yml"); byte[] bytes = Files.readAllBytes(path); String roleString = new String(bytes, Charset.defaultCharset()); - RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY); + RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY, xContentRegistry()); RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0]; assertThat(indicesPrivileges.getGrantedFields(), arrayContaining("foo", "boo")); assertNull(indicesPrivileges.getDeniedFields()); } + } diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml index 99459c5f5ec..c1c8bc4b1d7 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml @@ -65,8 +65,16 @@ role_query_fields: privileges: - READ query: - match_all: + match_all: {} field_security: grant: - foo - - boo \ No newline at end of file + - boo + +role_query_invalid: + indices: + - names: + - 'query_idx' + privileges: + - READ + query: '{ "unknown": {} }' \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml index ebfdce617a0..d0eb7ba4922 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml @@ -5,7 +5,7 @@ role1: privileges: - READ query: - match_all: + match_all: {} fields: - foo - boo \ No newline at end of file diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml index abddcdc6dda..d3b32456324 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml @@ -7,7 +7,11 @@ setup: cluster.health: wait_for_status: yellow +--- +"Test use prohibited query inside role query": + - do: + catch: /terms query with terms lookup isn't supported as part of a role query/ security.put_role: name: "role" body: > @@ -24,49 +28,3 @@ setup: ] } - - do: - security.put_user: - username: "joe" - body: > - { - "password": "x-pack-test-password", - "roles" : [ "role" ] - } - ---- -teardown: - - do: - security.delete_user: - username: "joe" - ignore: 404 - - do: - security.delete_role: - name: "role" - ignore: 404 - - ---- -"Test use prohibited query inside role query": - - - do: - headers: - Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" - index: - index: index - type: type - id: 1 - body: > - { - "foo": "bar" - } - - - - do: - catch: /terms query with terms lookup isn't supported as part of a role query/ - headers: - Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" - search: - rest_total_hits_as_int: true - index: index - body: { "query" : { "match_all" : {} } } - From 95e2ca741eb6d73e1f36bb17e1a44afdef61ca58 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Thu, 26 Sep 2019 12:49:21 +0200 Subject: [PATCH 77/94] Remove unused private methods and fields (#47154) This commit removes a bunch of unused private fields and unused private methods from the code base. Backport of (#47115) --- .../client/indices/DetailAnalyzeResponse.java | 2 -- .../org/elasticsearch/client/MigrationIT.java | 18 ---------------- .../ml/DeleteModelSnapshotRequestTests.java | 4 ---- .../elasticsearch/threadpool/Scheduler.java | 3 --- .../metadata/IndexCreationTaskTests.java | 4 ---- .../common/xcontent/BaseXContentTestCase.java | 6 ------ .../terms/RareTermsAggregatorTests.java | 11 ---------- .../index/shard/IndexShardTestCase.java | 15 ------------- .../analysis/AnalysisFactoryTestCase.java | 17 +-------------- .../ml/action/TransportOpenJobAction.java | 4 ---- .../ml/job/persistence/JobConfigProvider.java | 15 ------------- .../persistence/JobResultsProviderTests.java | 4 ---- .../esnative/tool/SetupPasswordToolTests.java | 21 +------------------ .../watcher/trigger/TriggerServiceTests.java | 8 +------ 14 files changed, 3 insertions(+), 129 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java index 36cf8afad0d..0696ac7b555 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -113,8 +113,6 @@ public class DetailAnalyzeResponse { private final String name; private final AnalyzeResponse.AnalyzeToken[] tokens; - private static final String TOKENS = "tokens"; - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java index 57b6e422bb1..3396f3352e2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java @@ -21,12 +21,10 @@ package org.elasticsearch.client; import org.elasticsearch.client.migration.DeprecationInfoRequest; import org.elasticsearch.client.migration.DeprecationInfoResponse; -import org.elasticsearch.client.tasks.TaskSubmissionResponse; import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.util.Collections; -import java.util.function.BooleanSupplier; import static org.hamcrest.Matchers.equalTo; @@ -42,20 +40,4 @@ public class MigrationIT extends ESRestHighLevelClientTestCase { assertThat(response.getNodeSettingsIssues().size(), equalTo(0)); assertThat(response.getMlSettingsIssues().size(), equalTo(0)); } - - /** - * Using low-level api as high-level-rest-client's getTaskById work is in progress. - * TODO revisit once that work is finished - */ - private BooleanSupplier checkCompletionStatus(TaskSubmissionResponse upgrade) { - return () -> { - try { - Response response = client().performRequest(new Request("GET", "/_tasks/" + upgrade.getTask())); - return (boolean) entityAsMap(response).get("completed"); - } catch (IOException e) { - fail(e.getMessage()); - return false; - } - }; - } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequestTests.java index 4e02344d768..5cc0155c117 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequestTests.java @@ -33,8 +33,4 @@ public class DeleteModelSnapshotRequestTests extends ESTestCase { -> new DeleteModelSnapshotRequest(randomAlphaOfLength(10), null)); assertEquals("[snapshot_id] must not be null", ex.getMessage()); } - - private DeleteModelSnapshotRequest createTestInstance() { - return new DeleteModelSnapshotRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); - } } diff --git a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java index 570b1b60410..edf42cc4546 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java +++ b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java @@ -19,8 +19,6 @@ package org.elasticsearch.threadpool; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; @@ -263,7 +261,6 @@ public interface Scheduler { * tasks to the uncaught exception handler */ class SafeScheduledThreadPoolExecutor extends ScheduledThreadPoolExecutor { - private static final Logger logger = LogManager.getLogger(SafeScheduledThreadPoolExecutor.class); @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") public SafeScheduledThreadPoolExecutor(int corePoolSize, ThreadFactory threadFactory, RejectedExecutionHandler handler) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index 893d1f58e8e..946f4c45f38 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -360,10 +360,6 @@ public class IndexCreationTaskTests extends ESTestCase { .numberOfReplicas(numReplicas); } - private Map createCustom() { - return Collections.singletonMap("a", "b"); - } - private interface MetaDataBuilderConfigurator { void configure(IndexTemplateMetaData.Builder builder) throws IOException; } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 7c5cc2bc802..4c2fcfbb589 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -1208,11 +1207,6 @@ public abstract class BaseXContentTestCase extends ESTestCase { assertThat(e.getMessage(), containsString("Field name cannot be null")); } - private static void expectNonNullFormatterException(ThrowingRunnable runnable) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable); - assertThat(e.getMessage(), containsString("DateTimeFormatter cannot be null")); - } - private static void expectObjectException(ThrowingRunnable runnable) { JsonGenerationException e = expectThrows(JsonGenerationException.class, runnable); assertThat(e.getMessage(), containsString("Current context not Object")); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index ef678df3dca..441ed43d244 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -56,7 +56,6 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; @@ -523,16 +522,6 @@ public class RareTermsAggregatorTests extends AggregatorTestCase { return documents; } - - private InternalAggregation buildInternalAggregation(RareTermsAggregationBuilder builder, MappedFieldType fieldType, - IndexSearcher searcher) throws IOException { - AbstractRareTermsAggregator aggregator = createAggregator(builder, searcher, fieldType); - aggregator.preCollection(); - searcher.search(new MatchAllDocsQuery(), aggregator); - aggregator.postCollection(); - return aggregator.buildAggregation(0L); - } - private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify, ValueType valueType) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index cce9780b092..d7b13816560 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.shard; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexNotFoundException; import org.apache.lucene.store.Directory; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; @@ -695,20 +694,6 @@ public abstract class IndexShardTestCase extends ESTestCase { inSyncIds, newRoutingTable); } - private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException { - Store.MetadataSnapshot result; - try { - result = replica.snapshotStoreMetadata(); - } catch (IndexNotFoundException e) { - // OK! - result = Store.MetadataSnapshot.EMPTY; - } catch (IOException e) { - logger.warn("failed read store, treating as empty", e); - result = Store.MetadataSnapshot.EMPTY; - } - return result; - } - public static Set getShardDocUIDs(final IndexShard shard) throws IOException { return getDocIdAndSeqNos(shard).stream().map(DocIdSeqNoAndSource::getId).collect(Collectors.toSet()); } diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 758a468f93f..368e99f3d58 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -35,8 +35,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -49,20 +47,7 @@ import static java.util.Collections.emptyMap; */ public abstract class AnalysisFactoryTestCase extends ESTestCase { - private static final Pattern UNDERSCORE_THEN_ANYTHING = Pattern.compile("_(.)"); - - private static String toCamelCase(String s) { - Matcher m = UNDERSCORE_THEN_ANYTHING.matcher(s); - StringBuffer sb = new StringBuffer(); - while (m.find()) { - m.appendReplacement(sb, m.group(1).toUpperCase(Locale.ROOT)); - } - m.appendTail(sb); - sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); - return sb.toString(); - } - - static final Map> KNOWN_TOKENIZERS = new MapBuilder>() + private static final Map> KNOWN_TOKENIZERS = new MapBuilder>() // exposed in ES .put("classic", MovedToAnalysisCommon.class) .put("edgengram", MovedToAnalysisCommon.class) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 2b36f350272..02c62cccd25 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -162,10 +162,6 @@ public class TransportOpenJobAction extends TransportMasterNodeAction d.getRules().isEmpty() == false); - } - public static String nodeFilter(DiscoveryNode node, Job job) { String jobId = job.getId(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 64c6583f7c2..549945e44fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -549,21 +549,6 @@ public class JobConfigProvider { } - private SearchRequest makeExpandIdsSearchRequest(String expression, boolean excludeDeleting) { - String [] tokens = ExpandedIdsMatcher.tokenizeExpression(expression); - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildQuery(tokens, excludeDeleting)); - sourceBuilder.sort(Job.ID.getPreferredName()); - sourceBuilder.fetchSource(false); - sourceBuilder.docValueField(Job.ID.getPreferredName(), null); - sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null); - - return client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setSource(sourceBuilder) - .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) - .request(); - } - /** * The same logic as {@link #expandJobsIds(String, boolean, boolean, ActionListener)} but * the full anomaly detector job configuration is returned. diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index 010aecfc66b..bfd785b5112 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -1037,10 +1037,6 @@ public class JobResultsProviderTests extends ESTestCase { verifyNoMoreInteractions(client); } - private Bucket createBucketAtEpochTime(long epoch) { - return new Bucket("foo", new Date(epoch), 123); - } - private JobResultsProvider createProvider(Client client) { return new JobResultsProvider(client, Settings.EMPTY); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java index 6e821720069..4d0e05a5c32 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java @@ -12,19 +12,15 @@ import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.DeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.protocol.xpack.XPackInfoResponse; @@ -443,21 +439,6 @@ public class SetupPasswordToolTests extends CommandTestCase { } } - private String parsePassword(String value) throws IOException { - try (XContentParser parser = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, value)) { - XContentParser.Token token = parser.nextToken(); - if (token == XContentParser.Token.START_OBJECT) { - if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - if (parser.nextToken() == XContentParser.Token.VALUE_STRING) { - return parser.text(); - } - } - } - } - throw new RuntimeException("Did not properly parse password."); - } - private URL authenticateUrl(URL url) throws MalformedURLException, URISyntaxException { return new URL(url, (url.toURI().getPath() + "/_security/_authenticate").replaceAll("/+", "/") + "?pretty"); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java index 6372823d36d..1e1dd6050b4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java @@ -178,10 +178,4 @@ public class TriggerServiceTests extends ESTestCase { newActions.add(actionWrapper); when(watch.actions()).thenReturn(newActions); } - - private void setTransform(Watch watch, String type) { - ExecutableTransform transform = mock(ExecutableTransform.class); - when(transform.type()).thenReturn(type); - when(watch.transform()).thenReturn(transform); - } -} \ No newline at end of file +} From 73a09b34b82591d779bcf24094cf07960fb1c44d Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 26 Sep 2019 14:21:23 +0200 Subject: [PATCH 78/94] Replace SearchContextException with SearchException (#47046) This commit removes the SearchContextException in favor of a simpler SearchException that doesn't leak the SearchContext. Relates #46523 --- .../index/rankeval/RankEvalResponseTests.java | 4 +- .../elasticsearch/ElasticsearchException.java | 8 +++- .../index/query/QueryShardContext.java | 8 ++++ .../search/DefaultSearchContext.java | 2 +- .../search/SearchContextException.java | 41 ------------------- .../search/SearchParseException.java | 11 +++-- .../elasticsearch/search/SearchService.java | 29 ++++++------- .../search/aggregations/AggregationPhase.java | 2 +- .../search/aggregations/AggregatorBase.java | 5 ++- .../ScriptedMetricAggregatorFactory.java | 2 +- .../elasticsearch/search/dfs/DfsPhase.java | 2 +- .../dfs/DfsPhaseExecutionException.java | 14 +++---- .../search/fetch/FetchPhase.java | 9 ++-- .../fetch/FetchPhaseExecutionException.java | 14 +++---- .../fetch/subphase/ExplainFetchSubPhase.java | 2 +- .../highlight/FastVectorHighlighter.java | 2 +- .../subphase/highlight/PlainHighlighter.java | 6 ++- .../highlight/UnifiedHighlighter.java | 2 +- .../search/query/QueryPhase.java | 4 +- .../query/QueryPhaseExecutionException.java | 14 +++---- .../ElasticsearchExceptionTests.java | 6 +-- .../ExceptionSerializationTests.java | 9 ++-- .../search/searchafter/SearchAfterIT.java | 6 +-- .../search/slice/SearchSliceIT.java | 4 +- .../search/source/MetadataFetchingIT.java | 10 ++--- .../elasticsearch/test/TestSearchContext.java | 4 ++ 26 files changed, 98 insertions(+), 122 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/search/SearchContextException.java diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 649db936d4f..1939b5e19c1 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -44,7 +44,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.ArrayList; @@ -58,6 +57,7 @@ import java.util.function.Predicate; import static java.util.Collections.singleton; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.TestSearchContext.SHARD_TARGET; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.instanceOf; @@ -67,7 +67,7 @@ public class RankEvalResponseTests extends ESTestCase { private static final Exception[] RANDOM_EXCEPTIONS = new Exception[] { new ClusterBlockException(singleton(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)), new CircuitBreakingException("Data too large", 123, 456, CircuitBreaker.Durability.PERMANENT), - new SearchParseException(new TestSearchContext(null), "Parse failure", new XContentLocation(12, 98)), + new SearchParseException(SHARD_TARGET, "Parse failure", new XContentLocation(12, 98)), new IllegalArgumentException("Closed resource", new RuntimeException("Resource")), new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] { new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 12f45f71138..ec3469db7e4 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.transport.TcpTransport; @@ -283,6 +284,10 @@ public class ElasticsearchException extends RuntimeException implements ToXConte public static ElasticsearchException readException(StreamInput input, int id) throws IOException { CheckedFunction elasticsearchException = ID_TO_SUPPLIER.get(id); if (elasticsearchException == null) { + if (id == 127 && input.getVersion().before(Version.V_7_5_0)) { + // was SearchContextException + return new SearchException(input); + } throw new IllegalStateException("unknown exception for id: " + id); } return elasticsearchException.apply(input); @@ -965,8 +970,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte TcpTransport.HttpRequestOnTransportException::new, 125, UNKNOWN_VERSION_ADDED), MAPPER_PARSING_EXCEPTION(org.elasticsearch.index.mapper.MapperParsingException.class, org.elasticsearch.index.mapper.MapperParsingException::new, 126, UNKNOWN_VERSION_ADDED), - SEARCH_CONTEXT_EXCEPTION(org.elasticsearch.search.SearchContextException.class, - org.elasticsearch.search.SearchContextException::new, 127, UNKNOWN_VERSION_ADDED), + // 127 used to be org.elasticsearch.search.SearchContextException SEARCH_SOURCE_BUILDER_EXCEPTION(org.elasticsearch.search.builder.SearchSourceBuilderException.class, org.elasticsearch.search.builder.SearchSourceBuilderException::new, 128, UNKNOWN_VERSION_ADDED), // 129 was EngineClosedException diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index b6eea750748..088d2b97328 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -472,4 +472,12 @@ public class QueryShardContext extends QueryRewriteContext { public BigArrays bigArrays() { return bigArrays; } + + public SimilarityService getSimilarityService() { + return similarityService; + } + + public BitsetFilterCache getBitsetFilterCache() { + return bitsetFilterCache; + } } diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index c6a572efcb7..95f8a05b482 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -263,7 +263,7 @@ final class DefaultSearchContext extends SearchContext { try { this.query = searcher.rewrite(query); } catch (IOException e) { - throw new QueryPhaseExecutionException(this, "Failed to rewrite main query", e); + throw new QueryPhaseExecutionException(shardTarget, "Failed to rewrite main query", e); } } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchContextException.java b/server/src/main/java/org/elasticsearch/search/SearchContextException.java deleted file mode 100644 index 8f1ebb80139..00000000000 --- a/server/src/main/java/org/elasticsearch/search/SearchContextException.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.internal.SearchContext; - -import java.io.IOException; - -public class SearchContextException extends SearchException { - - public SearchContextException(SearchContext context, String msg) { - super(context.shardTarget(), msg); - } - - public SearchContextException(SearchContext context, String msg, Throwable t) { - super(context.shardTarget(), msg, t); - } - - public SearchContextException(StreamInput in) throws IOException { - super(in); - } - -} diff --git a/server/src/main/java/org/elasticsearch/search/SearchParseException.java b/server/src/main/java/org/elasticsearch/search/SearchParseException.java index f8ae3c7f674..a876a512202 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchParseException.java +++ b/server/src/main/java/org/elasticsearch/search/SearchParseException.java @@ -25,22 +25,21 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -public class SearchParseException extends SearchContextException { +public class SearchParseException extends SearchException { public static final int UNKNOWN_POSITION = -1; private final int lineNumber; private final int columnNumber; - public SearchParseException(SearchContext context, String msg, @Nullable XContentLocation location) { - this(context, msg, location, null); + public SearchParseException(SearchShardTarget shardTarget, String msg, @Nullable XContentLocation location) { + this(shardTarget, msg, location, null); } - public SearchParseException(SearchContext context, String msg, @Nullable XContentLocation location, Throwable cause) { - super(context, msg, cause); + public SearchParseException(SearchShardTarget shardTarget, String msg, @Nullable XContentLocation location, Throwable cause) { + super(shardTarget, msg, cause); int lineNumber = UNKNOWN_POSITION; int columnNumber = UNKNOWN_POSITION; if (location != null) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 407228c0526..f3daa34eb4f 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -730,11 +730,12 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } } - private void parseSource(DefaultSearchContext context, SearchSourceBuilder source) throws SearchContextException { + private void parseSource(DefaultSearchContext context, SearchSourceBuilder source) throws SearchException { // nothing to parse... if (source == null) { return; } + SearchShardTarget shardTarget = context.shardTarget(); QueryShardContext queryShardContext = context.getQueryShardContext(); context.from(source.from()); context.size(source.size()); @@ -760,14 +761,14 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv context.sort(optionalSort.get()); } } catch (IOException e) { - throw new SearchContextException(context, "failed to create sort elements", e); + throw new SearchException(shardTarget, "failed to create sort elements", e); } } context.trackScores(source.trackScores()); if (source.trackTotalHitsUpTo() != null && source.trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_ACCURATE && context.scrollContext() != null) { - throw new SearchContextException(context, "disabling [track_total_hits] is not allowed in a scroll context"); + throw new SearchException(shardTarget, "disabling [track_total_hits] is not allowed in a scroll context"); } if (source.trackTotalHitsUpTo() != null) { context.trackTotalHitsUpTo(source.trackTotalHitsUpTo()); @@ -794,7 +795,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv try { context.suggest(source.suggest().build(queryShardContext)); } catch (IOException e) { - throw new SearchContextException(context, "failed to create SuggestionSearchContext", e); + throw new SearchException(shardTarget, "failed to create SuggestionSearchContext", e); } } if (source.rescores() != null) { @@ -803,7 +804,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv context.addRescore(rescore.buildContext(queryShardContext)); } } catch (IOException e) { - throw new SearchContextException(context, "failed to create RescoreSearchContext", e); + throw new SearchException(shardTarget, "failed to create RescoreSearchContext", e); } } if (source.explain() != null) { @@ -834,7 +835,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv try { context.highlight(highlightBuilder.build(queryShardContext)); } catch (IOException e) { - throw new SearchContextException(context, "failed to create SearchContextHighlighter", e); + throw new SearchException(shardTarget, "failed to create SearchContextHighlighter", e); } } if (source.scriptFields() != null && source.size() != 0) { @@ -869,10 +870,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } if (source.searchAfter() != null && source.searchAfter().length > 0) { if (context.scrollContext() != null) { - throw new SearchContextException(context, "`search_after` cannot be used in a scroll context."); + throw new SearchException(shardTarget, "`search_after` cannot be used in a scroll context."); } if (context.from() > 0) { - throw new SearchContextException(context, "`from` parameter must be set to 0 when `search_after` is used."); + throw new SearchException(shardTarget, "`from` parameter must be set to 0 when `search_after` is used."); } FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter()); context.searchAfter(fieldDoc); @@ -880,7 +881,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.slice() != null) { if (context.scrollContext() == null) { - throw new SearchContextException(context, "`slice` cannot be used outside of a scroll context"); + throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context"); } context.sliceBuilder(source.slice()); } @@ -888,10 +889,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.storedFields() != null) { if (source.storedFields().fetchFields() == false) { if (context.version()) { - throw new SearchContextException(context, "`stored_fields` cannot be disabled if version is requested"); + throw new SearchException(shardTarget, "`stored_fields` cannot be disabled if version is requested"); } if (context.sourceRequested()) { - throw new SearchContextException(context, "`stored_fields` cannot be disabled if _source is requested"); + throw new SearchException(shardTarget, "`stored_fields` cannot be disabled if _source is requested"); } } context.storedFieldsContext(source.storedFields()); @@ -899,13 +900,13 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.collapse() != null) { if (context.scrollContext() != null) { - throw new SearchContextException(context, "cannot use `collapse` in a scroll context"); + throw new SearchException(shardTarget, "cannot use `collapse` in a scroll context"); } if (context.searchAfter() != null) { - throw new SearchContextException(context, "cannot use `collapse` in conjunction with `search_after`"); + throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `search_after`"); } if (context.rescore() != null && context.rescore().isEmpty() == false) { - throw new SearchContextException(context, "cannot use `collapse` in conjunction with `rescore`"); + throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `rescore`"); } final CollapseContext collapseContext = source.collapse().build(queryShardContext); context.collapse(collapseContext); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index 851997ab468..c150f99cfe1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -116,7 +116,7 @@ public class AggregationPhase implements SearchPhase { globalsCollector.preCollection(); context.searcher().search(query, collector); } catch (Exception e) { - throw new QueryPhaseExecutionException(context, "Failed to execute global aggregators", e); + throw new QueryPhaseExecutionException(context.shardTarget(), "Failed to execute global aggregators", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index ec276b7e01b..ea897005217 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext.Lifetime; @@ -76,11 +77,11 @@ public abstract class AggregatorBase extends Aggregator { assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(context, this); context.addReleasable(this, Lifetime.PHASE); + final SearchShardTarget shardTarget = context.shardTarget(); // Register a safeguard to highlight any invalid construction logic (call to this constructor without subsequent preCollection call) collectableSubAggregators = new BucketCollector() { void badState(){ - throw new QueryPhaseExecutionException(AggregatorBase.this.context, - "preCollection not called on new Aggregator before use", null); + throw new QueryPhaseExecutionException(shardTarget, "preCollection not called on new Aggregator before use", null); } @Override public LeafBucketCollector getLeafCollector(LeafReaderContext reader) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java index 2c4d73fc752..fa5ecf29654 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java @@ -134,7 +134,7 @@ class ScriptedMetricAggregatorFactory extends AggregatorFactory { || original instanceof Boolean) { clone = original; } else { - throw new SearchParseException(context, + throw new SearchParseException(context.shardTarget(), "Can only clone primitives, String, ArrayList, and HashMap. Found: " + original.getClass().getCanonicalName(), null); } return clone; diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index 3cbc47173c6..35df8e322ad 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -95,7 +95,7 @@ public class DfsPhase implements SearchPhase { .fieldStatistics(fieldStatistics) .maxDoc(context.searcher().getIndexReader().maxDoc()); } catch (Exception e) { - throw new DfsPhaseExecutionException(context, "Exception during dfs phase", e); + throw new DfsPhaseExecutionException(context.shardTarget(), "Exception during dfs phase", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java index f493bb4d052..2a9bd4a8f11 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java @@ -20,19 +20,19 @@ package org.elasticsearch.search.dfs; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.SearchContextException; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.SearchException; +import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -public class DfsPhaseExecutionException extends SearchContextException { +public class DfsPhaseExecutionException extends SearchException { - public DfsPhaseExecutionException(SearchContext context, String msg, Throwable t) { - super(context, "Dfs Failed [" + msg + "]", t); + public DfsPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable t) { + super(shardTarget, "Dfs Failed [" + msg + "]", t); } - public DfsPhaseExecutionException(SearchContext context, String msg) { - super(context, "Dfs Failed [" + msg + "]"); + public DfsPhaseExecutionException(SearchShardTarget shardTarget, String msg) { + super(shardTarget, "Dfs Failed [" + msg + "]"); } public DfsPhaseExecutionException(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index ab2f864bfce..0b0c740f5c4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhase; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase; @@ -229,7 +230,7 @@ public class FetchPhase implements SearchPhase { int subDocId, Map> storedToRequestedFields, LeafReaderContext subReaderContext) { - loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); + loadStoredFields(context.shardTarget(), subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); if (fieldsVisitor.fields().isEmpty()) { @@ -266,7 +267,7 @@ public class FetchPhase implements SearchPhase { final boolean needSource = context.sourceRequested() || context.highlight() != null; if (needSource || (context instanceof InnerHitsContext.InnerHitSubContext == false)) { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(context, subReaderContext, rootFieldsVisitor, rootSubDocId); + loadStoredFields(context.shardTarget(), subReaderContext, rootFieldsVisitor, rootSubDocId); rootFieldsVisitor.postProcess(context.mapperService()); uid = rootFieldsVisitor.uid(); source = rootFieldsVisitor.source(); @@ -419,12 +420,12 @@ public class FetchPhase implements SearchPhase { return nestedIdentity; } - private void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { + private void loadStoredFields(SearchShardTarget shardTarget, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { fieldVisitor.reset(); try { readerContext.reader().document(docId, fieldVisitor); } catch (IOException e) { - throw new FetchPhaseExecutionException(searchContext, "Failed to fetch doc id [" + docId + "]", e); + throw new FetchPhaseExecutionException(shardTarget, "Failed to fetch doc id [" + docId + "]", e); } } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java index e3fb542134e..7bf8b878c0d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java @@ -20,19 +20,19 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.SearchContextException; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.SearchException; +import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -public class FetchPhaseExecutionException extends SearchContextException { +public class FetchPhaseExecutionException extends SearchException { - public FetchPhaseExecutionException(SearchContext context, String msg, Throwable t) { - super(context, "Fetch Failed [" + msg + "]", t); + public FetchPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable t) { + super(shardTarget, "Fetch Failed [" + msg + "]", t); } - public FetchPhaseExecutionException(SearchContext context, String msg) { - super(context, "Fetch Failed [" + msg + "]"); + public FetchPhaseExecutionException(SearchShardTarget shardTarget, String msg) { + super(shardTarget, "Fetch Failed [" + msg + "]"); } public FetchPhaseExecutionException(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java index c177cc8c3ae..a869c4fefbc 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java @@ -46,7 +46,7 @@ public final class ExplainFetchSubPhase implements FetchSubPhase { // we use the top level doc id, since we work with the top level searcher hitContext.hit().explanation(explanation); } catch (IOException e) { - throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().getType() + "#" + throw new FetchPhaseExecutionException(context.shardTarget(), "Failed to explain doc [" + hitContext.hit().getType() + "#" + hitContext.hit().getId() + "]", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index 358b652fbdf..d9cc0d0be06 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -203,7 +203,7 @@ public class FastVectorHighlighter implements Highlighter { return null; } catch (Exception e) { - throw new FetchPhaseExecutionException(context, + throw new FetchPhaseExecutionException(context.shardTarget(), "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 6ad155104a4..09ec505ca1b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -139,7 +139,8 @@ public class PlainHighlighter implements Highlighter { // the plain highlighter will parse the source and try to analyze it. return null; } else { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + throw new FetchPhaseExecutionException(context.shardTarget(), + "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } if (field.fieldOptions().scoreOrdered()) { @@ -178,7 +179,8 @@ public class PlainHighlighter implements Highlighter { try { end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents); } catch (Exception e) { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + throw new FetchPhaseExecutionException(context.shardTarget(), + "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } if (end > 0) { return new HighlightField(highlighterContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) }); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index b806fb9cd31..67db3f287b3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -123,7 +123,7 @@ public class UnifiedHighlighter implements Highlighter { } } } catch (IOException e) { - throw new FetchPhaseExecutionException(context, + throw new FetchPhaseExecutionException(context.shardTarget(), "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 64621277f6e..7f3a7a5b1b5 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -275,7 +275,7 @@ public class QueryPhase implements SearchPhase { if (searchContext.request().allowPartialSearchResults() == false) { // Can't rethrow TimeExceededException because not serializable - throw new QueryPhaseExecutionException(searchContext, "Time exceeded"); + throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Time exceeded"); } queryResult.searchTimedOut(true); } finally { @@ -302,7 +302,7 @@ public class QueryPhase implements SearchPhase { } return topDocsFactory.shouldRescore(); } catch (Exception e) { - throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); + throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Failed to execute main query", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java index 94d259ef525..01a0bcde4ce 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java @@ -20,22 +20,22 @@ package org.elasticsearch.search.query; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.SearchContextException; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.SearchException; +import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -public class QueryPhaseExecutionException extends SearchContextException { +public class QueryPhaseExecutionException extends SearchException { - public QueryPhaseExecutionException(SearchContext context, String msg, Throwable cause) { - super(context, "Query Failed [" + msg + "]", cause); + public QueryPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable cause) { + super(shardTarget, "Query Failed [" + msg + "]", cause); } public QueryPhaseExecutionException(StreamInput in) throws IOException { super(in); } - public QueryPhaseExecutionException(SearchContext context, String msg) { - super(context, msg); + public QueryPhaseExecutionException(SearchShardTarget shardTarget, String msg) { + super(shardTarget, msg); } } diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 2706e14a361..0f9fba89118 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -57,7 +57,6 @@ import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.transport.RemoteTransportException; import java.io.EOFException; @@ -73,6 +72,7 @@ import java.util.Map; import static java.util.Collections.emptyList; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; +import static org.elasticsearch.test.TestSearchContext.SHARD_TARGET; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.hasItems; @@ -300,7 +300,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { "\"caused_by\":{\"type\":\"illegal_argument_exception\",\"reason\":\"foo\"}}"); } { - ElasticsearchException e = new SearchParseException(new TestSearchContext(null), "foo", new XContentLocation(1,0)); + ElasticsearchException e = new SearchParseException(SHARD_TARGET, "foo", new XContentLocation(1,0)); assertExceptionAsJson(e, "{\"type\":\"search_parse_exception\",\"reason\":\"foo\",\"line\":1,\"col\":0}"); } { @@ -920,7 +920,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { expected = new ElasticsearchException("Elasticsearch exception [type=parsing_exception, reason=Unknown identifier]"); break; case 2: - actual = new SearchParseException(new TestSearchContext(null), "Parse failure", new XContentLocation(12, 98)); + actual = new SearchParseException(SHARD_TARGET, "Parse failure", new XContentLocation(12, 98)); expected = new ElasticsearchException("Elasticsearch exception [type=search_parse_exception, reason=Parse failure]"); break; case 3: diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 4846a75b9ba..0ef7a168f1c 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -79,13 +79,11 @@ import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotException; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.ActionTransportException; @@ -121,6 +119,7 @@ import static java.lang.reflect.Modifier.isInterface; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; +import static org.elasticsearch.test.TestSearchContext.SHARD_TARGET; import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class ExceptionSerializationTests extends ESTestCase { @@ -388,12 +387,10 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testSearchParseException() throws IOException { - SearchContext ctx = new TestSearchContext(null); - SearchParseException ex = serialize(new SearchParseException(ctx, "foo", new XContentLocation(66, 666))); + SearchParseException ex = serialize(new SearchParseException(SHARD_TARGET, "foo", new XContentLocation(66, 666))); assertEquals("foo", ex.getMessage()); assertEquals(66, ex.getLineNumber()); assertEquals(666, ex.getColumnNumber()); - assertEquals(ctx.shardTarget(), ex.shard()); } public void testIllegalIndexShardStateException() throws IOException { @@ -790,7 +787,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(124, null); ids.put(125, TcpTransport.HttpRequestOnTransportException.class); ids.put(126, org.elasticsearch.index.mapper.MapperParsingException.class); - ids.put(127, org.elasticsearch.search.SearchContextException.class); + ids.put(127, null); // was org.elasticsearch.search.SearchContextException.class ids.put(128, org.elasticsearch.search.builder.SearchSourceBuilderException.class); ids.put(129, null); // was org.elasticsearch.index.engine.EngineClosedException.class ids.put(130, org.elasticsearch.action.NoShardAvailableActionException.class); diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 806c3dfca67..5a0128bd1cd 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.SearchContextException; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -67,7 +67,7 @@ public class SearchAfterIT extends ESIntegTestCase { } catch (SearchPhaseExecutionException e) { assertTrue(e.shardFailures().length > 0); for (ShardSearchFailure failure : e.shardFailures()) { - assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchContextException.class)); + assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchException.class)); assertThat(failure.getCause().getMessage(), Matchers.equalTo("`search_after` cannot be used in a scroll context.")); } } @@ -83,7 +83,7 @@ public class SearchAfterIT extends ESIntegTestCase { } catch (SearchPhaseExecutionException e) { assertTrue(e.shardFailures().length > 0); for (ShardSearchFailure failure : e.shardFailures()) { - assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchContextException.class)); + assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchException.class)); assertThat(failure.getCause().getMessage(), Matchers.equalTo("`from` parameter must be set to 0 when `search_after` is used.")); } diff --git a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java index 568c4c596b4..8e7d95e69a7 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.SearchContextException; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -205,7 +205,7 @@ public class SearchSliceIT extends ESIntegTestCase { .slice(new SliceBuilder("invalid_random_int", 0, 10)) .get()); Throwable rootCause = findRootCause(exc); - assertThat(rootCause.getClass(), equalTo(SearchContextException.class)); + assertThat(rootCause.getClass(), equalTo(SearchException.class)); assertThat(rootCause.getMessage(), equalTo("`slice` cannot be used outside of a scroll context")); } diff --git a/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 545968140f6..2bb61edeed4 100644 --- a/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.search.SearchContextException; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.test.ESIntegTestCase; @@ -128,18 +128,18 @@ public class MetadataFetchingIT extends ESIntegTestCase { { SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test").setFetchSource(true).storedFields("_none_").get()); - Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchContextException.class); + Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchException.class); assertNotNull(rootCause); - assertThat(rootCause.getClass(), equalTo(SearchContextException.class)); + assertThat(rootCause.getClass(), equalTo(SearchException.class)); assertThat(rootCause.getMessage(), equalTo("`stored_fields` cannot be disabled if _source is requested")); } { SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test").storedFields("_none_").setVersion(true).get()); - Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchContextException.class); + Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchException.class); assertNotNull(rootCause); - assertThat(rootCause.getClass(), equalTo(SearchContextException.class)); + assertThat(rootCause.getClass(), equalTo(SearchException.class)); assertThat(rootCause.getMessage(), equalTo("`stored_fields` cannot be disabled if version is requested")); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 97b7de893ba..6cd451d6e40 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -21,6 +21,7 @@ package org.elasticsearch.test; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.unit.TimeValue; @@ -35,6 +36,7 @@ import org.elasticsearch.index.query.InnerHitContextBuilder; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.SearchShardTarget; @@ -64,6 +66,8 @@ import java.util.List; import java.util.Map; public class TestSearchContext extends SearchContext { + public static final SearchShardTarget SHARD_TARGET = + new SearchShardTarget("test", new ShardId("test", "test", 0), null, OriginalIndices.NONE); final BigArrays bigArrays; final IndexService indexService; From 0765bd4bf7d5d1f4a1fe5497d72069fe4b848f68 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Thu, 26 Sep 2019 15:26:06 +0300 Subject: [PATCH 79/94] [7.x][ML] Ensure data frame analytics task is only marked completed once (#47119) (#47157) Closes #46907 --- .../ml/dataframe/DataFrameAnalyticsTask.java | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java index 2a172fd6d9c..67b5e988826 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java @@ -60,6 +60,7 @@ public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements S private volatile Long reindexingTaskId; private volatile boolean isReindexingFinished; private volatile boolean isStopping; + private volatile boolean isMarkAsCompletedCalled; private final ProgressTracker progressTracker = new ProgressTracker(); public DataFrameAnalyticsTask(long id, String type, String action, TaskId parentTask, Map headers, @@ -102,10 +103,17 @@ public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements S public void markAsCompleted() { // It is possible that the stop API has been called in the meantime and that // may also cause this method to be called. We check whether we have already - // been marked completed to avoid doing it twice. - if (isCompleted() == false) { - persistProgress(() -> super.markAsCompleted()); + // been marked completed to avoid doing it twice. We need to capture that + // locally instead of relying to isCompleted() because of the asynchronous + // persistence of progress. + synchronized (this) { + if (isMarkAsCompletedCalled) { + return; + } + isMarkAsCompletedCalled = true; } + + persistProgress(() -> super.markAsCompleted()); } @Override @@ -224,6 +232,7 @@ public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements S } private void persistProgress(Runnable runnable) { + LOGGER.debug("[{}] Persisting progress", taskParams.getId()); GetDataFrameAnalyticsStatsAction.Request getStatsRequest = new GetDataFrameAnalyticsStatsAction.Request(taskParams.getId()); executeAsyncWithOrigin(client, ML_ORIGIN, GetDataFrameAnalyticsStatsAction.INSTANCE, getStatsRequest, ActionListener.wrap( statsResponse -> { From 77cc6d5bad06ffb53b0422806c4019cd6ff615c2 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 26 Sep 2019 13:19:10 +0100 Subject: [PATCH 80/94] [TEST] Work around _cat/indices bug with security enabled (#47160) When the ML native multi-node tests use _cat/indices/_all and the request goes to a non-master node, _all is translated to a list of concrete indices by the authz layer on the coordinating node before the request is forwarded to the master node. Then it is possible for the master node to return an index_not_found_exception if one of the concrete indices that was expanded on the coordinating node has been deleted in the meantime. (#47159 has been opened to track the underlying problem.) It has been observed that the index that gets deleted when the problem affects the ML native multi-node tests is always the ML notifications index. The tests that fail are only interested in the presence or absense of ML results indices. Therefore the workaround is to only _cat indices that match the ML results index pattern. Fixes #45652 --- .../xpack/ml/integration/MlJobIT.java | 40 +++++++++++++------ 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 1c7a367239e..cfd760df926 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -208,7 +208,9 @@ public class MlJobIT extends ESRestTestCase { } }); - String responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String responseAsString = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); @@ -272,7 +274,8 @@ public class MlJobIT extends ESRestTestCase { assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); assertThat(responseAsString, containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))); //job2 still exists - responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + responseAsString = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); client().performRequest(new Request("POST", "/_refresh")); @@ -287,7 +290,8 @@ public class MlJobIT extends ESRestTestCase { assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)))); client().performRequest(new Request("POST", "/_refresh")); - responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + responseAsString = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName))); } @@ -394,19 +398,21 @@ public class MlJobIT extends ESRestTestCase { "avoid the clash by assigning a dedicated results index")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/45652") public void testDeleteJob() throws Exception { String jobId = "delete-job-job"; String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)); // check that the index still exists (it's shared by default) - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, containsString(indexName)); waitUntilIndexIsEmpty(indexName); @@ -465,13 +471,14 @@ public class MlJobIT extends ESRestTestCase { assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(404)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/45652") public void testDeleteJobAsync() throws Exception { String jobId = "delete-job-async-job"; String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); Response response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId @@ -483,7 +490,8 @@ public class MlJobIT extends ESRestTestCase { assertThat(EntityUtils.toString(taskResponse.getEntity()), containsString("\"acknowledged\":true")); // check that the index still exists (it's shared by default) - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, containsString(indexName)); waitUntilIndexIsEmpty(indexName); @@ -518,7 +526,9 @@ public class MlJobIT extends ESRestTestCase { String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); // Manually delete the index so that we can test that deletion proceeds @@ -528,7 +538,8 @@ public class MlJobIT extends ESRestTestCase { client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)); // check index was deleted - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, not(containsString(aliasName))); assertThat(indicesAfterDelete, not(containsString(indexName))); @@ -598,7 +609,9 @@ public class MlJobIT extends ESRestTestCase { "}"); client().performRequest(extraIndex2); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); assertThat(indicesBeforeDelete, containsString(indexName + "-001")); assertThat(indicesBeforeDelete, containsString(indexName + "-002")); @@ -637,7 +650,8 @@ public class MlJobIT extends ESRestTestCase { client().performRequest(new Request("POST", "/_refresh")); // check that the indices still exist but are empty - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, containsString(indexName)); assertThat(indicesAfterDelete, containsString(indexName + "-001")); assertThat(indicesAfterDelete, containsString(indexName + "-002")); From 3b626c2d5649fc66431a61e49fc617f15c01b421 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 26 Sep 2019 08:51:12 -0400 Subject: [PATCH 81/94] [DOCS] Reformat get pipeline API (#47131) (#47163) --- .../ingest/apis/get-pipeline.asciidoc | 182 ++++++++++-------- .../ingest/apis/put-pipeline.asciidoc | 45 +++++ docs/reference/rest-api/common-parms.asciidoc | 6 + 3 files changed, 151 insertions(+), 82 deletions(-) diff --git a/docs/reference/ingest/apis/get-pipeline.asciidoc b/docs/reference/ingest/apis/get-pipeline.asciidoc index 887ae870f52..b046b788668 100644 --- a/docs/reference/ingest/apis/get-pipeline.asciidoc +++ b/docs/reference/ingest/apis/get-pipeline.asciidoc @@ -1,70 +1,16 @@ [[get-pipeline-api]] -=== Get Pipeline API +=== Get pipeline API +++++ +Get pipeline +++++ -The get pipeline API returns pipelines based on ID. This API always returns a local reference of the pipeline. - -////////////////////////// +Returns information about one or more ingest pipelines. +This API returns a local reference of the pipeline. +//// [source,console] --------------------------------------------------- -PUT _ingest/pipeline/my-pipeline-id -{ - "description" : "describe pipeline", - "processors" : [ - { - "set" : { - "field": "foo", - "value": "bar" - } - } - ] -} --------------------------------------------------- - -////////////////////////// - -[source,console] --------------------------------------------------- -GET _ingest/pipeline/my-pipeline-id --------------------------------------------------- -// TEST[continued] - -Example response: - -[source,console-result] --------------------------------------------------- -{ - "my-pipeline-id" : { - "description" : "describe pipeline", - "processors" : [ - { - "set" : { - "field" : "foo", - "value" : "bar" - } - } - ] - } -} --------------------------------------------------- - -For each returned pipeline, the source and the version are returned. -The version is useful for knowing which version of the pipeline the node has. -You can specify multiple IDs to return more than one pipeline. Wildcards are also supported. - -[float] -[[versioning-pipelines]] -==== Pipeline Versioning - -Pipelines can optionally add a `version` number, which can be any integer value, -in order to simplify pipeline management by external systems. The `version` -field is completely optional and it is meant solely for external management of -pipelines. To unset a `version`, simply replace the pipeline without specifying -one. - -[source,console] --------------------------------------------------- -PUT _ingest/pipeline/my-pipeline-id +---- +PUT /_ingest/pipeline/my-pipeline-id { "description" : "describe pipeline", "version" : 123, @@ -77,42 +23,114 @@ PUT _ingest/pipeline/my-pipeline-id } ] } --------------------------------------------------- - -To check for the `version`, you can -<> -using `filter_path` to limit the response to just the `version`: +---- +//// [source,console] --------------------------------------------------- -GET /_ingest/pipeline/my-pipeline-id?filter_path=*.version --------------------------------------------------- +---- +GET /_ingest/pipeline/my-pipeline-id +---- // TEST[continued] -This should give a small response that makes it both easy and inexpensive to parse: + + +[[get-pipeline-api-request]] +==== {api-request-title} + +`GET /_ingest/pipeline/` + +`GET /_ingest/pipeline` + + +[[get-pipeline-api-path-params]] +==== {api-path-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=path-pipeline] + + + +[[get-pipeline-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + + +[[get-pipeline-api-api-example]] +==== {api-examples-title} + + +[[get-pipeline-api-specific-ex]] +===== Get information for a specific ingest pipeline + +[source,console] +---- +GET /_ingest/pipeline/my-pipeline-id +---- +// TEST[continued] + +The API returns the following response: [source,console-result] --------------------------------------------------- +---- +{ + "my-pipeline-id" : { + "description" : "describe pipeline", + "version" : 123, + "processors" : [ + { + "set" : { + "field" : "foo", + "value" : "bar" + } + } + ] + } +} +---- + + +[[get-pipeline-api-version-ex]] +===== Get the version of an ingest pipeline + +When you create or update an ingest pipeline, +you can specify an optional `version` parameter. +The version is useful for managing changes to pipeline +and viewing the current pipeline for an ingest node. + + +To check the pipeline version, +use the `filter_path` query parameter +to <> +to only the version. + +[source,console] +---- +GET /_ingest/pipeline/my-pipeline-id?filter_path=*.version +---- +// TEST[continued] + +The API returns the following response: + +[source,console-result] +---- { "my-pipeline-id" : { "version" : 123 } } --------------------------------------------------- - -////////////////////////// +---- +//// [source,console] --------------------------------------------------- +---- DELETE /_ingest/pipeline/my-pipeline-id --------------------------------------------------- +---- // TEST[continued] [source,console-result] --------------------------------------------------- +---- { "acknowledged": true } --------------------------------------------------- - -////////////////////////// +---- +//// diff --git a/docs/reference/ingest/apis/put-pipeline.asciidoc b/docs/reference/ingest/apis/put-pipeline.asciidoc index fe7b388b9ab..fd42b3c97c8 100644 --- a/docs/reference/ingest/apis/put-pipeline.asciidoc +++ b/docs/reference/ingest/apis/put-pipeline.asciidoc @@ -19,6 +19,51 @@ PUT _ingest/pipeline/my-pipeline-id } -------------------------------------------------- +[float] +[[versioning-pipelines]] +==== Pipeline versioning + +Pipelines can optionally add a `version` number, which can be any integer value, +in order to simplify pipeline management by external systems. The `version` +field is completely optional and it is meant solely for external management of +pipelines. + +[source,console] +-------------------------------------------------- +PUT /_ingest/pipeline/my-pipeline-id +{ + "description" : "describe pipeline", + "version" : 123, + "processors" : [ + { + "set" : { + "field": "foo", + "value": "bar" + } + } + ] +} +-------------------------------------------------- + +To unset a `version`, simply replace the pipeline without specifying +one. + +[source,console] +-------------------------------------------------- +PUT /_ingest/pipeline/my-pipeline-id +{ + "description" : "describe pipeline", + "processors" : [ + { + "set" : { + "field": "foo", + "value": "bar" + } + } + ] +} +-------------------------------------------------- + ////////////////////////// [source,console] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 4b76893b730..d797eb7800a 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -401,6 +401,12 @@ tag::pipeline[] (Optional, string) ID of the pipeline to use to preprocess incoming documents. end::pipeline[] +tag::path-pipeline[] +``:: +(Optional, string) Comma-separated list or wildcard expression of pipeline IDs +used to limit the request. +end::path-pipeline[] + tag::preference[] `preference`:: (Optional, string) Specifies the node or shard the operation should be From 0c575dc1e8d8dcff28548869f39241b7c5349507 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 26 Sep 2019 08:56:24 -0400 Subject: [PATCH 82/94] [DOCS] Correct link to `index.store.preload` setting (#47145) --- docs/reference/how-to/search-speed.asciidoc | 4 ++-- docs/reference/index-modules/store.asciidoc | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc index 0f3e112c1dc..91337b0b0a1 100644 --- a/docs/reference/how-to/search-speed.asciidoc +++ b/docs/reference/how-to/search-speed.asciidoc @@ -336,8 +336,8 @@ If the machine running Elasticsearch is restarted, the filesystem cache will be empty, so it will take some time before the operating system loads hot regions of the index into memory so that search operations are fast. You can explicitly tell the operating system which files should be loaded into memory eagerly -depending on the file extension using the <> -setting. +depending on the file extension using the +<> setting. WARNING: Loading data into the filesystem cache eagerly on too many indices or too many files will make search _slower_ if the filesystem cache is not large diff --git a/docs/reference/index-modules/store.asciidoc b/docs/reference/index-modules/store.asciidoc index 9bd25968e22..fd86b29df67 100644 --- a/docs/reference/index-modules/store.asciidoc +++ b/docs/reference/index-modules/store.asciidoc @@ -83,7 +83,8 @@ setting is useful, for example, if you are in an environment where you can not control the ability to create a lot of memory maps so you need disable the ability to use memory-mapping. -=== Pre-loading data into the file system cache +[[preload-data-to-file-system-cache]] +=== Preloading data into the file system cache NOTE: This is an expert setting, the details of which may change in the future. From ae202fda214c145c75990a85baf586be05a79e10 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Wed, 25 Sep 2019 13:43:05 -0400 Subject: [PATCH 83/94] SQL: Add support for shape type (#46464) Enables support for Cartesian geometries shape type. We still need to decide how to handle the distance function since it is currently using the haversine distance formula and returns results in meters, which doesn't make any sense for Cartesian geometries. Closes #46412 Relates to #43644 --- docs/reference/sql/functions/geo.asciidoc | 9 ++-- .../sql/language/data-types.asciidoc | 1 + .../elasticsearch/xpack/sql/jdbc/EsType.java | 5 ++- .../xpack/sql/jdbc/TypeConverter.java | 1 + .../xpack/sql/jdbc/TypeUtils.java | 1 + .../xpack/sql/qa/geo/GeoDataLoader.java | 16 +++---- .../sql/qa/src/main/resources/geo/geo.csv | 32 +++++++------- .../src/main/resources/geo/geosql-bulk.json | 30 ++++++------- .../qa/src/main/resources/geo/geosql.csv-spec | 43 ++++++++++--------- .../sql/qa/src/main/resources/geo/geosql.json | 7 ++- .../src/main/resources/geo/setup_test_geo.sql | 1 + .../qa/src/main/resources/ogc/ogc.csv-spec | 16 +++---- .../single-node-only/command-sys-geo.csv-spec | 11 ++--- .../xpack/sql/analysis/analyzer/Verifier.java | 26 +++++++---- .../search/extractor/FieldHitExtractor.java | 15 +++++-- .../function/scalar/geo/GeoShape.java | 4 +- .../xpack/sql/type/DataType.java | 15 ++++--- .../extractor/FieldHitExtractorTests.java | 19 ++++---- .../scalar/geo/StWkttosqlProcessorTests.java | 4 +- .../logical/command/sys/SysTypesTests.java | 4 +- 20 files changed, 146 insertions(+), 114 deletions(-) diff --git a/docs/reference/sql/functions/geo.asciidoc b/docs/reference/sql/functions/geo.asciidoc index bb8680ac183..c90d833919e 100644 --- a/docs/reference/sql/functions/geo.asciidoc +++ b/docs/reference/sql/functions/geo.asciidoc @@ -5,14 +5,15 @@ beta[] -The geo functions work with geometries stored in `geo_point` and `geo_shape` fields, or returned by other geo functions. +The geo functions work with geometries stored in `geo_point`, `geo_shape` and `shape` fields, or returned by other geo functions. ==== Limitations -Both <> and <> types are represented in SQL as geometry and can be used -interchangeably with the following exceptions: +<>, <> and <> and types are represented in SQL as +geometry and can be used interchangeably with the following exceptions: -* `geo_shape` fields don't have doc values, therefore these fields cannot be used for filtering, grouping or sorting. +* `geo_shape` and `shape` fields don't have doc values, therefore these fields cannot be used for filtering, grouping + or sorting. * `geo_points` fields are indexed and have doc values by default, however only latitude and longitude are stored and indexed with some loss of precision from the original values (4.190951585769653E-8 for the latitude and diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index ee73a1eea7c..811bb1ac6a4 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -83,6 +83,7 @@ s|SQL precision | interval_minute_to_second | 23 | geo_point | 52 | geo_shape | 2,147,483,647 +| shape | 2,147,483,647 |=== diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java index 51a03dad70b..0b96204fd5d 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java @@ -11,7 +11,7 @@ import java.sql.SQLType; import java.sql.Types; public enum EsType implements SQLType { - + NULL( Types.NULL), UNSUPPORTED( Types.OTHER), BOOLEAN( Types.BOOLEAN), @@ -46,7 +46,8 @@ public enum EsType implements SQLType { INTERVAL_HOUR_TO_SECOND( ExtraTypes.INTERVAL_HOUR_SECOND), INTERVAL_MINUTE_TO_SECOND(ExtraTypes.INTERVAL_MINUTE_SECOND), GEO_POINT( ExtraTypes.GEOMETRY), - GEO_SHAPE( ExtraTypes.GEOMETRY); + GEO_SHAPE( ExtraTypes.GEOMETRY), + SHAPE( ExtraTypes.GEOMETRY); private final Integer type; diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index b788161cd4a..bf72b4454e4 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -248,6 +248,7 @@ final class TypeConverter { return Duration.parse(v.toString()); case GEO_POINT: case GEO_SHAPE: + case SHAPE: try { return WKT.fromWKT(v.toString()); } catch (IOException | ParseException ex) { diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java index 0f1df554d3f..511cf92be56 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java @@ -94,6 +94,7 @@ final class TypeUtils { types.put(EsType.INTERVAL_MINUTE_TO_SECOND, Duration.class); types.put(EsType.GEO_POINT, String.class); types.put(EsType.GEO_SHAPE, String.class); + types.put(EsType.SHAPE, String.class); TYPE_TO_CLASS = unmodifiableMap(types); diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java index 40e8f64be87..dfee527a331 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java @@ -73,25 +73,25 @@ public class GeoDataLoader { createString("name", createIndex); // Type specific - createIndex.startObject("shore").field("type", "geo_shape").endObject(); // lakes + createIndex.startObject("shore").field("type", "shape").endObject(); // lakes createString("aliases", createIndex); // road_segments createIndex.startObject("num_lanes").field("type", "integer").endObject(); // road_segments, divided_routes - createIndex.startObject("centerline").field("type", "geo_shape").endObject(); // road_segments, streams + createIndex.startObject("centerline").field("type", "shape").endObject(); // road_segments, streams - createIndex.startObject("centerlines").field("type", "geo_shape").endObject(); // divided_routes + createIndex.startObject("centerlines").field("type", "shape").endObject(); // divided_routes - createIndex.startObject("boundary").field("type", "geo_shape").endObject(); // forests, named_places + createIndex.startObject("boundary").field("type", "shape").endObject(); // forests, named_places - createIndex.startObject("position").field("type", "geo_shape").endObject(); // bridges, buildings + createIndex.startObject("position").field("type", "shape").endObject(); // bridges, buildings createString("address", createIndex); // buildings - createIndex.startObject("footprint").field("type", "geo_shape").endObject(); // buildings + createIndex.startObject("footprint").field("type", "shape").endObject(); // buildings createIndex.startObject("type").field("type", "keyword").endObject(); // ponds - createIndex.startObject("shores").field("type", "geo_shape").endObject(); // ponds + createIndex.startObject("shores").field("type", "shape").endObject(); // ponds - createIndex.startObject("neatline").field("type", "geo_shape").endObject(); // map_neatlines + createIndex.startObject("neatline").field("type", "shape").endObject(); // map_neatlines } createIndex.endObject(); diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv b/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv index d21ea71c5b9..3171f8adcda 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv @@ -1,16 +1,16 @@ -city,region,region_point,location,shape -Mountain View,Americas,POINT(-105.2551 54.5260),POINT (-122.083843 37.386483),POINT (-122.083843 37.386483) -Chicago,Americas,POINT(-105.2551 54.5260),POINT (-87.637874 41.888783),POINT (-87.637874 41.888783) -New York,Americas,POINT(-105.2551 54.5260),POINT (-73.990027 40.745171),POINT (-73.990027 40.745171) -San Francisco,Americas,POINT(-105.2551 54.5260),POINT (-122.394228 37.789541),POINT (-122.394228 37.789541) -Phoenix,Americas,POINT(-105.2551 54.5260),POINT (-111.973505 33.376242),POINT (-111.973505 33.376242) -Amsterdam,Europe,POINT(15.2551 54.5260),POINT (4.850312 52.347557),POINT (4.850312 52.347557) -Berlin,Europe,POINT(15.2551 54.5260),POINT (13.390889 52.486701),POINT (13.390889 52.486701) -Munich,Europe,POINT(15.2551 54.5260),POINT (11.537505 48.146321),POINT (11.537505 48.146321) -London,Europe,POINT(15.2551 54.5260),POINT (-0.121672 51.510871),POINT (-0.121672 51.510871) -Paris,Europe,POINT(15.2551 54.5260),POINT (2.351773 48.845538),POINT (2.351773 48.845538) -Singapore,Asia,POINT(100.6197 34.0479),POINT (103.855535 1.295868),POINT (103.855535 1.295868) -Hong Kong,Asia,POINT(100.6197 34.0479),POINT (114.183925 22.281397),POINT (114.183925 22.281397) -Seoul,Asia,POINT(100.6197 34.0479),POINT (127.060851 37.509132),POINT (127.060851 37.509132) -Tokyo,Asia,POINT(100.6197 34.0479),POINT (139.76402225 35.669616),POINT (139.76402225 35.669616) -Sydney,Asia,POINT(100.6197 34.0479),POINT (151.208629 -33.863385),POINT (151.208629 -33.863385) +city,region,region_point,location,geoshape,shape +Mountain View,Americas,POINT(-105.2551 54.5260),POINT (-122.083843 37.386483),POINT (-122.083843 37.386483)),POINT (-122.083843 37.386483) +Chicago,Americas,POINT(-105.2551 54.5260),POINT (-87.637874 41.888783),POINT (-87.637874 41.888783),POINT (-87.637874 41.888783) +New York,Americas,POINT(-105.2551 54.5260),POINT (-73.990027 40.745171),POINT (-73.990027 40.745171),POINT (-73.990027 40.745171) +San Francisco,Americas,POINT(-105.2551 54.5260),POINT (-122.394228 37.789541),POINT (-122.394228 37.789541),POINT (-122.394228 37.789541) +Phoenix,Americas,POINT(-105.2551 54.5260),POINT (-111.973505 33.376242),POINT (-111.973505 33.376242),POINT (-111.973505 33.376242) +Amsterdam,Europe,POINT(15.2551 54.5260),POINT (4.850312 52.347557),POINT (4.850312 52.347557),POINT (4.850312 52.347557) +Berlin,Europe,POINT(15.2551 54.5260),POINT (13.390889 52.486701),POINT (13.390889 52.486701),POINT (13.390889 52.486701) +Munich,Europe,POINT(15.2551 54.5260),POINT (11.537505 48.146321),POINT (11.537505 48.146321),POINT (11.537505 48.146321) +London,Europe,POINT(15.2551 54.5260),POINT (-0.121672 51.510871),POINT (-0.121672 51.510871),POINT (-0.121672 51.510871) +Paris,Europe,POINT(15.2551 54.5260),POINT (2.351773 48.845538),POINT (2.351773 48.845538),POINT (2.351773 48.845538) +Singapore,Asia,POINT(100.6197 34.0479),POINT (103.855535 1.295868),POINT (103.855535 1.295868),POINT (103.855535 1.295868) +Hong Kong,Asia,POINT(100.6197 34.0479),POINT (114.183925 22.281397),POINT (114.183925 22.281397),POINT (114.183925 22.281397) +Seoul,Asia,POINT(100.6197 34.0479),POINT (127.060851 37.509132),POINT (127.060851 37.509132),POINT (127.060851 37.509132) +Tokyo,Asia,POINT(100.6197 34.0479),POINT (139.76402225 35.669616),POINT (139.76402225 35.669616),POINT (139.76402225 35.669616) +Sydney,Asia,POINT(100.6197 34.0479),POINT (151.208629 -33.863385),POINT (151.208629 -33.863385),POINT (151.208629 -33.863385) diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json index 8c65742aac0..1c03fe77bc9 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json @@ -1,33 +1,33 @@ {"index":{"_id": "1"}} -{"region": "Americas", "city": "Mountain View", "location": {"lat":"37.386483", "lon":"-122.083843"}, "location_no_dv": {"lat":"37.386483", "lon":"-122.083843"}, "shape": "POINT (-122.083843 37.386483 30)", "region_point": "POINT(-105.2551 54.5260)"} +{"region": "Americas", "city": "Mountain View", "location": {"lat":"37.386483", "lon":"-122.083843"}, "location_no_dv": {"lat":"37.386483", "lon":"-122.083843"}, "geoshape": "POINT (-122.083843 37.386483 30)", "shape": "POINT (-122.083843 37.386483 30)", "region_point": "POINT(-105.2551 54.5260)"} {"index":{"_id": "2"}} -{"region": "Americas", "city": "Chicago", "location": [-87.637874, 41.888783], "location_no_dv": [-87.637874, 41.888783], "shape": {"type" : "point", "coordinates" : [-87.637874, 41.888783, 181]}, "region_point": "POINT(-105.2551 54.5260)"} +{"region": "Americas", "city": "Chicago", "location": [-87.637874, 41.888783], "location_no_dv": [-87.637874, 41.888783], "geoshape": {"type" : "point", "coordinates" : [-87.637874, 41.888783, 181]}, "shape": {"type" : "point", "coordinates" : [-87.637874, 41.888783, 181]}, "region_point": "POINT(-105.2551 54.5260)"} {"index":{"_id": "3"}} -{"region": "Americas", "city": "New York", "location": "40.745171,-73.990027", "location_no_dv": "40.745171,-73.990027", "shape": "POINT (-73.990027 40.745171 10)", "region_point": "POINT(-105.2551 54.5260)"} +{"region": "Americas", "city": "New York", "location": "40.745171,-73.990027", "location_no_dv": "40.745171,-73.990027", "geoshape": "POINT (-73.990027 40.745171 10)", "shape": "POINT (-73.990027 40.745171 10)", "region_point": "POINT(-105.2551 54.5260)"} {"index":{"_id": "4"}} -{"region": "Americas", "city": "San Francisco", "location": "37.789541,-122.394228", "location_no_dv": "37.789541,-122.394228", "shape": "POINT (-122.394228 37.789541 16)", "region_point": "POINT(-105.2551 54.5260)"} +{"region": "Americas", "city": "San Francisco", "location": "37.789541,-122.394228", "location_no_dv": "37.789541,-122.394228", "geoshape": "POINT (-122.394228 37.789541 16)", "shape": "POINT (-122.394228 37.789541 16)", "region_point": "POINT(-105.2551 54.5260)"} {"index":{"_id": "5"}} -{"region": "Americas", "city": "Phoenix", "location": "33.376242,-111.973505", "location_no_dv": "33.376242,-111.973505", "shape": "POINT (-111.973505 33.376242 331)", "region_point": "POINT(-105.2551 54.5260)"} +{"region": "Americas", "city": "Phoenix", "location": "33.376242,-111.973505", "location_no_dv": "33.376242,-111.973505", "geoshape": "POINT (-111.973505 33.376242 331)", "shape": "POINT (-111.973505 33.376242 331)", "region_point": "POINT(-105.2551 54.5260)"} {"index":{"_id": "6"}} -{"region": "Europe", "city": "Amsterdam", "location": "52.347557,4.850312", "location_no_dv": "52.347557,4.850312", "shape": "POINT (4.850312 52.347557 2)", "region_point": "POINT(15.2551 54.5260)"} +{"region": "Europe", "city": "Amsterdam", "location": "52.347557,4.850312", "location_no_dv": "52.347557,4.850312", "geoshape": "POINT (4.850312 52.347557 2)", "shape": "POINT (4.850312 52.347557 2)", "region_point": "POINT(15.2551 54.5260)"} {"index":{"_id": "7"}} -{"region": "Europe", "city": "Berlin", "location": "52.486701,13.390889", "location_no_dv": "52.486701,13.390889", "shape": "POINT (13.390889 52.486701 34)", "region_point": "POINT(15.2551 54.5260)"} +{"region": "Europe", "city": "Berlin", "location": "52.486701,13.390889", "location_no_dv": "52.486701,13.390889", "geoshape": "POINT (13.390889 52.486701 34)", "shape": "POINT (13.390889 52.486701 34)", "region_point": "POINT(15.2551 54.5260)"} {"index":{"_id": "8"}} -{"region": "Europe", "city": "Munich", "location": "48.146321,11.537505", "location_no_dv": "48.146321,11.537505", "shape": "POINT (11.537505 48.146321 519)", "region_point": "POINT(15.2551 54.5260)"} +{"region": "Europe", "city": "Munich", "location": "48.146321,11.537505", "location_no_dv": "48.146321,11.537505", "geoshape": "POINT (11.537505 48.146321 519)", "shape": "POINT (11.537505 48.146321 519)", "region_point": "POINT(15.2551 54.5260)"} {"index":{"_id": "9"}} -{"region": "Europe", "city": "London", "location": "51.510871,-0.121672", "location_no_dv": "51.510871,-0.121672", "shape": "POINT (-0.121672 51.510871 11)", "region_point": "POINT(15.2551 54.5260)"} +{"region": "Europe", "city": "London", "location": "51.510871,-0.121672", "location_no_dv": "51.510871,-0.121672", "geoshape": "POINT (-0.121672 51.510871 11)", "shape": "POINT (-0.121672 51.510871 11)", "region_point": "POINT(15.2551 54.5260)"} {"index":{"_id": "10"}} -{"region": "Europe", "city": "Paris", "location": "48.845538,2.351773", "location_no_dv": "48.845538,2.351773", "shape": "POINT (2.351773 48.845538 35)", "region_point": "POINT(15.2551 54.5260)"} +{"region": "Europe", "city": "Paris", "location": "48.845538,2.351773", "location_no_dv": "48.845538,2.351773", "geoshape": "POINT (2.351773 48.845538 35)", "shape": "POINT (2.351773 48.845538 35)", "region_point": "POINT(15.2551 54.5260)"} {"index":{"_id": "11"}} -{"region": "Asia", "city": "Singapore", "location": "1.295868,103.855535", "location_no_dv": "1.295868,103.855535", "shape": "POINT (103.855535 1.295868 15)", "region_point": "POINT(100.6197 34.0479)"} +{"region": "Asia", "city": "Singapore", "location": "1.295868,103.855535", "location_no_dv": "1.295868,103.855535", "geoshape": "POINT (103.855535 1.295868 15)", "shape": "POINT (103.855535 1.295868 15)", "region_point": "POINT(100.6197 34.0479)"} {"index":{"_id": "12"}} -{"region": "Asia", "city": "Hong Kong", "location": "22.281397,114.183925", "location_no_dv": "22.281397,114.183925", "shape": "POINT (114.183925 22.281397 552)", "region_point": "POINT(100.6197 34.0479)"} +{"region": "Asia", "city": "Hong Kong", "location": "22.281397,114.183925", "location_no_dv": "22.281397,114.183925", "geoshape": "POINT (114.183925 22.281397 552)", "shape": "POINT (114.183925 22.281397 552)", "region_point": "POINT(100.6197 34.0479)"} {"index":{"_id": "13"}} -{"region": "Asia", "city": "Seoul", "location": "37.509132,127.060851", "location_no_dv": "37.509132,127.060851", "shape": "POINT (127.060851 37.509132 38)", "region_point": "POINT(100.6197 34.0479)"} +{"region": "Asia", "city": "Seoul", "location": "37.509132,127.060851", "location_no_dv": "37.509132,127.060851", "geoshape": "POINT (127.060851 37.509132 38)", "shape": "POINT (127.060851 37.509132 38)", "region_point": "POINT(100.6197 34.0479)"} {"index":{"_id": "14"}} -{"region": "Asia", "city": "Tokyo", "location": "35.669616,139.76402225", "location_no_dv": "35.669616,139.76402225", "shape": "POINT (139.76402225 35.669616 40)", "region_point": "POINT(100.6197 34.0479)"} +{"region": "Asia", "city": "Tokyo", "location": "35.669616,139.76402225", "location_no_dv": "35.669616,139.76402225", "geoshape": "POINT (139.76402225 35.669616 40)", "shape": "POINT (139.76402225 35.669616 40)", "region_point": "POINT(100.6197 34.0479)"} {"index":{"_id": "15"}} -{"region": "Asia", "city": "Sydney", "location": "-33.863385,151.208629", "location_no_dv": "-33.863385,151.208629", "shape": "POINT (151.208629 -33.863385 100)", "region_point": "POINT(100.6197 34.0479)"} +{"region": "Asia", "city": "Sydney", "location": "-33.863385,151.208629", "location_no_dv": "-33.863385,151.208629", "geoshape": "POINT (151.208629 -33.863385 100)", "shape": "POINT (151.208629 -33.863385 100)", "region_point": "POINT(100.6197 34.0479)"} diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec index 8ee9a44adff..44c747f4bdc 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec @@ -16,42 +16,43 @@ DESCRIBE "geo"; column:s | type:s | mapping:s city | VARCHAR | keyword +geoshape | GEOMETRY | geo_shape location | GEOMETRY | geo_point location_no_dv | GEOMETRY | geo_point region | VARCHAR | keyword region_point | VARCHAR | keyword -shape | GEOMETRY | geo_shape +shape | GEOMETRY | shape ; // SELECT ALL // TODO: For now we just get geopoint formatted as is and we also need to convert it to STRING to work with CSV selectAllPointsAsStrings -SELECT city, CAST(location AS STRING) location, CAST(location_no_dv AS STRING) location_no_dv, CAST(shape AS STRING) shape, region FROM "geo" ORDER BY "city"; +SELECT city, CAST(location AS STRING) location, CAST(location_no_dv AS STRING) location_no_dv, CAST(geoshape AS STRING) geoshape, CAST(shape AS STRING) shape, region FROM "geo" ORDER BY "city"; - city:s | location:s | location_no_dv:s | shape:s | region:s -Amsterdam |point (4.850311987102032 52.347556999884546) |point (4.850312 52.347557) |point (4.850312 52.347557 2.0) |Europe -Berlin |point (13.390888944268227 52.48670099303126) |point (13.390889 52.486701) |point (13.390889 52.486701 34.0) |Europe -Chicago |point (-87.63787407428026 41.888782968744636) |point (-87.637874 41.888783) |point (-87.637874 41.888783 181.0) |Americas -Hong Kong |point (114.18392493389547 22.28139698971063) |point (114.183925 22.281397) |point (114.183925 22.281397 552.0) |Asia -London |point (-0.12167204171419144 51.51087098289281)|point (-0.121672 51.510871) |point (-0.121672 51.510871 11.0) |Europe -Mountain View |point (-122.08384302444756 37.38648299127817) |point (-122.083843 37.386483) |point (-122.083843 37.386483 30.0) |Americas -Munich |point (11.537504978477955 48.14632098656148) |point (11.537505 48.146321) |point (11.537505 48.146321 519.0) |Europe -New York |point (-73.9900270756334 40.74517097789794) |point (-73.990027 40.745171) |point (-73.990027 40.745171 10.0) |Americas -Paris |point (2.3517729341983795 48.84553796611726) |point (2.351773 48.845538) |point (2.351773 48.845538 35.0) |Europe -Phoenix |point (-111.97350500151515 33.37624196894467) |point (-111.973505 33.376242) |point (-111.973505 33.376242 331.0) |Americas -San Francisco |point (-122.39422800019383 37.789540970698) |point (-122.394228 37.789541) |point (-122.394228 37.789541 16.0) |Americas -Seoul |point (127.06085099838674 37.50913198571652) |point (127.060851 37.509132) |point (127.060851 37.509132 38.0) |Asia -Singapore |point (103.8555349688977 1.2958679627627134) |point (103.855535 1.295868) |point (103.855535 1.295868 15.0) |Asia -Sydney |point (151.20862897485495 -33.863385021686554)|point (151.208629 -33.863385) |point (151.208629 -33.863385 100.0) |Asia -Tokyo |point (139.76402222178876 35.66961596254259) |point (139.76402225 35.669616)|point (139.76402225 35.669616 40.0) |Asia + city:s | location:s | location_no_dv:s | geoshape:s | shape:s | region:s +Amsterdam |point (4.850311987102032 52.347556999884546) |point (4.850312 52.347557) |point (4.850312 52.347557 2.0) |point (4.850312 52.347557 2.0) |Europe +Berlin |point (13.390888944268227 52.48670099303126) |point (13.390889 52.486701) |point (13.390889 52.486701 34.0) |point (13.390889 52.486701 34.0) |Europe +Chicago |point (-87.63787407428026 41.888782968744636) |point (-87.637874 41.888783) |point (-87.637874 41.888783 181.0) |point (-87.637874 41.888783 181.0) |Americas +Hong Kong |point (114.18392493389547 22.28139698971063) |point (114.183925 22.281397) |point (114.183925 22.281397 552.0) |point (114.183925 22.281397 552.0) |Asia +London |point (-0.12167204171419144 51.51087098289281)|point (-0.121672 51.510871) |point (-0.121672 51.510871 11.0) |point (-0.121672 51.510871 11.0) |Europe +Mountain View |point (-122.08384302444756 37.38648299127817) |point (-122.083843 37.386483) |point (-122.083843 37.386483 30.0) |point (-122.083843 37.386483 30.0) |Americas +Munich |point (11.537504978477955 48.14632098656148) |point (11.537505 48.146321) |point (11.537505 48.146321 519.0) |point (11.537505 48.146321 519.0) |Europe +New York |point (-73.9900270756334 40.74517097789794) |point (-73.990027 40.745171) |point (-73.990027 40.745171 10.0) |point (-73.990027 40.745171 10.0) |Americas +Paris |point (2.3517729341983795 48.84553796611726) |point (2.351773 48.845538) |point (2.351773 48.845538 35.0) |point (2.351773 48.845538 35.0) |Europe +Phoenix |point (-111.97350500151515 33.37624196894467) |point (-111.973505 33.376242) |point (-111.973505 33.376242 331.0)|point (-111.973505 33.376242 331.0)|Americas +San Francisco |point (-122.39422800019383 37.789540970698) |point (-122.394228 37.789541) |point (-122.394228 37.789541 16.0) |point (-122.394228 37.789541 16.0) |Americas +Seoul |point (127.06085099838674 37.50913198571652) |point (127.060851 37.509132) |point (127.060851 37.509132 38.0) |point (127.060851 37.509132 38.0) |Asia +Singapore |point (103.8555349688977 1.2958679627627134) |point (103.855535 1.295868) |point (103.855535 1.295868 15.0) |point (103.855535 1.295868 15.0) |Asia +Sydney |point (151.20862897485495 -33.863385021686554)|point (151.208629 -33.863385) |point (151.208629 -33.863385 100.0)|point (151.208629 -33.863385 100.0)|Asia +Tokyo |point (139.76402222178876 35.66961596254259) |point (139.76402225 35.669616)|point (139.76402225 35.669616 40.0)|point (139.76402225 35.669616 40.0)|Asia ; // TODO: Both shape and location contain the same data for now, we should change it later to make things more interesting selectAllPointsAsWKT -SELECT city, ST_ASWKT(location) location_wkt, ST_ASWKT(shape) shape_wkt, region FROM "geo" ORDER BY "city"; +SELECT city, ST_ASWKT(location) location_wkt, ST_ASWKT(geoshape) geoshape_wkt, region FROM "geo" ORDER BY "city"; - city:s | location_wkt:s | shape_wkt:s | region:s + city:s | location_wkt:s | geoshape_wkt:s | region:s Amsterdam |point (4.850311987102032 52.347556999884546) |point (4.850312 52.347557 2.0) |Europe Berlin |point (13.390888944268227 52.48670099303126) |point (13.390889 52.486701 34.0) |Europe Chicago |point (-87.63787407428026 41.888782968744636) |point (-87.637874 41.888783 181.0) |Americas @@ -262,7 +263,7 @@ SELECT COUNT(*) cnt, FLOOR(ST_Y(location)/45) north, FLOOR(ST_X(location)/90) ea ; selectFilterByXOfLocation -SELECT city, ST_X(shape) x, ST_Y(shape) y, ST_Z(shape) z, ST_X(location) lx, ST_Y(location) ly FROM geo WHERE lx > 0 ORDER BY ly; +SELECT city, ST_X(geoshape) x, ST_Y(geoshape) y, ST_Z(geoshape) z, ST_X(location) lx, ST_Y(location) ly FROM geo WHERE lx > 0 ORDER BY ly; city:s | x:d | y:d | z:d | lx:d | ly:d Sydney |151.208629 |-33.863385 |100.0 |151.20862897485495|-33.863385021686554 diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json index 56007a0284c..8ef9e39991e 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json @@ -17,12 +17,15 @@ "type": "geo_point", "doc_values": "false" }, - "shape": { + "geoshape": { "type": "geo_shape" }, "region_point": { "type": "keyword" + }, + "shape": { + "type": "shape" } - } + } } } \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql index b8b8d4e36f4..c736b0b5f4f 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql @@ -4,6 +4,7 @@ CREATE TABLE "geo" ( "region" VARCHAR(50), "region_point" VARCHAR(50), "location" POINT, + "geoshape" GEOMETRY, "shape" GEOMETRY ) AS SELECT * FROM CSVREAD('classpath:/geo/geo.csv'); diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec index f1941161697..3f51c34d6ea 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec @@ -19,18 +19,18 @@ address | VARCHAR | text address.keyword | VARCHAR | keyword aliases | VARCHAR | text aliases.keyword | VARCHAR | keyword -boundary | GEOMETRY | geo_shape -centerline | GEOMETRY | geo_shape -centerlines | GEOMETRY | geo_shape +boundary | GEOMETRY | shape +centerline | GEOMETRY | shape +centerlines | GEOMETRY | shape fid | INTEGER | integer -footprint | GEOMETRY | geo_shape +footprint | GEOMETRY | shape name | VARCHAR | text name.keyword | VARCHAR | keyword -neatline | GEOMETRY | geo_shape +neatline | GEOMETRY | shape num_lanes | INTEGER | integer ogc_type | VARCHAR | keyword -position | GEOMETRY | geo_shape -shore | GEOMETRY | geo_shape -shores | GEOMETRY | geo_shape +position | GEOMETRY | shape +shore | GEOMETRY | shape +shores | GEOMETRY | shape type | VARCHAR | keyword ; diff --git a/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec index 6d165c33433..b3c32d7eabd 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec @@ -7,9 +7,10 @@ SYS COLUMNS TABLE LIKE 'geo'; TABLE_CAT:s | TABLE_SCHEM:s| TABLE_NAME:s | COLUMN_NAME:s | DATA_TYPE:i | TYPE_NAME:s | COLUMN_SIZE:i|BUFFER_LENGTH:i|DECIMAL_DIGITS:i|NUM_PREC_RADIX:i| NULLABLE:i| REMARKS:s | COLUMN_DEF:s |SQL_DATA_TYPE:i|SQL_DATETIME_SUB:i|CHAR_OCTET_LENGTH:i|ORDINAL_POSITION:i|IS_NULLABLE:s|SCOPE_CATALOG:s|SCOPE_SCHEMA:s|SCOPE_TABLE:s|SOURCE_DATA_TYPE:sh|IS_AUTOINCREMENT:s|IS_GENERATEDCOLUMN:s integTest|null |geo |city |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |1 |YES |null |null |null |null |NO |NO -integTest|null |geo |location |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |2 |YES |null |null |null |null |NO |NO -integTest|null |geo |location_no_dv |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |3 |YES |null |null |null |null |NO |NO -integTest|null |geo |region |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |4 |YES |null |null |null |null |NO |NO -integTest|null |geo |region_point |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |5 |YES |null |null |null |null |NO |NO -integTest|null |geo |shape |114 |GEO_SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |6 |YES |null |null |null |null |NO |NO +integTest|null |geo |geoshape |114 |GEO_SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |2 |YES |null |null |null |null |NO |NO +integTest|null |geo |location |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |3 |YES |null |null |null |null |NO |NO +integTest|null |geo |location_no_dv |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |4 |YES |null |null |null |null |NO |NO +integTest|null |geo |region |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |5 |YES |null |null |null |null |NO |NO +integTest|null |geo |region_point |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |6 |YES |null |null |null |null |NO |NO +integTest|null |geo |shape |114 |SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |7 |YES |null |null |null |null |NO |NO ; \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 5c4b89209fa..31636a30c68 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -69,6 +69,7 @@ import static org.elasticsearch.xpack.sql.stats.FeatureMetric.LOCAL; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.ORDERBY; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.WHERE; import static org.elasticsearch.xpack.sql.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.sql.type.DataType.SHAPE; import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; /** @@ -77,7 +78,7 @@ import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; */ public final class Verifier { private final Metrics metrics; - + public Verifier(Metrics metrics) { this.metrics = metrics; } @@ -254,7 +255,7 @@ public final class Verifier { failures.addAll(localFailures); }); } - + // gather metrics if (failures.isEmpty()) { BitSet b = new BitSet(FeatureMetric.values().length); @@ -631,7 +632,7 @@ public final class Verifier { if (Functions.isAggregate(e)) { return true; } - + // left without leaves which have to match; if not there's a failure // make sure to match directly on the expression and not on the tree // (since otherwise exp might match the function argument which would be incorrect) @@ -644,7 +645,7 @@ public final class Verifier { } return false; } - + private static void checkGroupingFunctionInGroupBy(LogicalPlan p, Set localFailures) { // check if the query has a grouping function (Histogram) but no GROUP BY if (p instanceof Project) { @@ -734,14 +735,14 @@ public final class Verifier { fail(nested.get(0), "Grouping isn't (yet) compatible with nested fields " + new AttributeSet(nested).names())); nested.clear(); } - + // check in having p.forEachDown(f -> { if (f.child() instanceof Aggregate) { f.condition().forEachUp(match, FieldAttribute.class); } }, Filter.class); - + if (!nested.isEmpty()) { localFailures.add( fail(nested.get(0), "HAVING isn't (yet) compatible with nested fields " + new AttributeSet(nested).names())); @@ -758,6 +759,9 @@ public final class Verifier { if (fa.field().getDataType() == GEO_SHAPE) { localFailures.add(fail(fa, "geo shapes cannot be used for filtering")); } + if (fa.field().getDataType() == SHAPE) { + localFailures.add(fail(fa, "shapes cannot be used for filtering")); + } }, FieldAttribute.class); }, Filter.class); @@ -766,6 +770,9 @@ public final class Verifier { if (fa.field().getDataType() == GEO_SHAPE) { localFailures.add(fail(fa, "geo shapes cannot be used in grouping")); } + if (fa.field().getDataType() == SHAPE) { + localFailures.add(fail(fa, "shapes cannot be used in grouping")); + } }, FieldAttribute.class)), Aggregate.class); @@ -774,6 +781,9 @@ public final class Verifier { if (fa.field().getDataType() == GEO_SHAPE) { localFailures.add(fail(fa, "geo shapes cannot be used for sorting")); } + if (fa.field().getDataType() == SHAPE) { + localFailures.add(fail(fa, "shapes cannot be used for sorting")); + } }, FieldAttribute.class)), OrderBy.class); } @@ -831,7 +841,7 @@ public final class Verifier { } } }); - + }, Pivot.class); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java index d7609ebc8f9..9f61775cedc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -190,6 +190,13 @@ public class FieldHitExtractor implements HitExtractor { throw new SqlIllegalArgumentException("Cannot read geo_shape value [{}] (returned by [{}])", values, fieldName); } } + if (dataType == DataType.SHAPE) { + try { + return new GeoShape(values); + } catch (IOException ex) { + throw new SqlIllegalArgumentException("Cannot read shape value [{}] (returned by [{}])", values, fieldName); + } + } if (values instanceof Map) { throw new SqlIllegalArgumentException("Objects (returned by [{}]) are not supported", fieldName); } @@ -198,7 +205,7 @@ public class FieldHitExtractor implements HitExtractor { return DateUtils.asDateTime(Long.parseLong(values.toString()), zoneId); } } - + // The Jackson json parser can generate for numerics - Integers, Longs, BigIntegers (if Long is not enough) // and BigDecimal (if Double is not enough) if (values instanceof Number || values instanceof String || values instanceof Boolean) { @@ -266,7 +273,7 @@ public class FieldHitExtractor implements HitExtractor { for (int i = idx + 1; i < path.length; i++) { sj.add(path[i]); Object node = subMap.get(sj.toString()); - + if (node instanceof List) { List listOfValues = (List) node; // we can only do this optimization until the last element of our pass since geo points are using arrays @@ -281,7 +288,7 @@ public class FieldHitExtractor implements HitExtractor { return unwrapMultiValue(node); } } - + if (node instanceof Map) { if (i < path.length - 1) { // Add the sub-map to the queue along with the current path index @@ -318,7 +325,7 @@ public class FieldHitExtractor implements HitExtractor { public String fieldName() { return fieldName; } - + public String fullFieldName() { return fullFieldName; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java index 3006a08fad2..a43ffe745d2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java @@ -65,7 +65,7 @@ public class GeoShape implements ToXContentFragment, NamedWriteable { try { shape = parse(value); } catch (ParseException ex) { - throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } @@ -74,7 +74,7 @@ public class GeoShape implements ToXContentFragment, NamedWriteable { try { shape = parse(value); } catch (ParseException ex) { - throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 2ce3c1fac96..4a783edae58 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -59,6 +59,8 @@ public enum DataType { GEO_POINT( ExtTypes.GEOMETRY, Double.BYTES*2, Integer.MAX_VALUE, 25 * 2 + 8, false, false, false), // IP can be v4 or v6. The latter has 2^128 addresses or 340,282,366,920,938,463,463,374,607,431,768,211,456 // aka 39 chars + SHAPE( ExtTypes.GEOMETRY, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, false, false, false), + // display size = 2 doubles + len("POINT( )") IP( "ip", JDBCType.VARCHAR, 39, 39, 0, false, false, true), // // INTERVALS @@ -254,7 +256,7 @@ public enum DataType { } public boolean isGeo() { - return this == GEO_POINT || this == GEO_SHAPE; + return this == GEO_POINT || this == GEO_SHAPE || this == SHAPE; } public boolean isDateBased() { @@ -268,7 +270,7 @@ public enum DataType { public boolean isDateOrTimeBased() { return isDateBased() || isTimeBased(); } - + // data type extract-able from _source or from docvalue_fields public boolean isFromDocValuesOnly() { return this == KEYWORD // because of ignore_above. Extracting this from _source wouldn't make sense if it wasn't indexed at all. @@ -276,13 +278,14 @@ public enum DataType { || this == DATETIME || this == SCALED_FLOAT // because of scaling_factor || this == GEO_POINT - || this == GEO_SHAPE; + || this == GEO_SHAPE + || this == SHAPE; } - + public static DataType fromOdbcType(String odbcType) { return ODBC_TO_ES.get(odbcType); } - + public static DataType fromSqlOrEsType(String typeName) { return SQL_TO_ES.get(typeName.toUpperCase(Locale.ROOT)); } @@ -305,7 +308,7 @@ public enum DataType { public String format() { return isDateOrTimeBased() ? DateUtils.DATE_PARSE_FORMAT : null; } - + /** * Returns the appropriate NumberType enum corresponding to this es type */ diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index b7404b8412a..2544a029260 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -396,7 +396,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map)); assertThat(ex.getMessage(), is("Multiple values (returned by [a.b.c.d.e.f.g]) are not supported")); } - + public void testFieldsWithSingleValueArrayAsSubfield() { FieldHitExtractor fe = getFieldHitExtractor("a.b", false); Object value = randomNonNullValue(); @@ -405,7 +405,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); @@ -414,7 +414,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map)); assertThat(ex.getMessage(), is("Arrays (returned by [a.b]) are not supported")); } - + public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists() { FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); Object value = randomNonNullValue(); @@ -423,7 +423,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); @@ -432,7 +432,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map)); assertThat(ex.getMessage(), is("Arrays (returned by [a.b.c]) are not supported")); } - + public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists2() { FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); Object value = randomNonNullValue(); @@ -462,7 +462,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); map.put(fieldName, "POINT (1 2)"); assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); @@ -474,7 +474,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); map.put(fieldName, "POINT (1 2)"); assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); @@ -487,7 +487,8 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map2)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); - FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false, true); + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, + randomBoolean() ? DataType.GEO_SHAPE : DataType.SHAPE, UTC, false, true); assertEquals(new GeoShape(1, 2), lenientFe.extractFromSource(map2)); } @@ -605,7 +606,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase new BigDecimal("20012312345621343256123456254.20012312345621343256123456254"))); return value.get(); } - + private void assertFieldHitEquals(Object expected, Object actual) { if (expected instanceof BigDecimal) { // parsing will, by default, build a Double even if the initial value is BigDecimal diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java index 818897dce34..82a580d159c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java @@ -33,11 +33,11 @@ public class StWkttosqlProcessorTests extends ESTestCase { assertEquals("Cannot parse [some random string] as a geo_shape value", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (foo bar)")); - assertEquals("Cannot parse [point (foo bar)] as a geo_shape value", siae.getMessage()); + assertEquals("Cannot parse [point (foo bar)] as a geo_shape or shape value", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (10 10")); - assertEquals("Cannot parse [point (10 10] as a geo_shape value", siae.getMessage()); + assertEquals("Cannot parse [point (10 10] as a geo_shape or shape value", siae.getMessage()); } public void testCoerce() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index 6b7500cab66..adf1ad5b4d1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -49,7 +49,7 @@ public class SysTypesTests extends ESTestCase { "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", - "GEO_SHAPE", "GEO_POINT", "UNSUPPORTED", "OBJECT", "NESTED"); + "GEO_SHAPE", "GEO_POINT", "SHAPE", "UNSUPPORTED", "OBJECT", "NESTED"); cmd.execute(session(), wrap(p -> { SchemaRowSet r = (SchemaRowSet) p.rowSet(); @@ -62,7 +62,7 @@ public class SysTypesTests extends ESTestCase { assertFalse(r.column(10, Boolean.class)); // no auto-increment assertFalse(r.column(11, Boolean.class)); - + for (int i = 0; i < r.size(); i++) { assertEquals(names.get(i), r.column(0)); r.advanceRow(); From 9b4f37747472894538640381e2369dfa97598712 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 26 Sep 2019 10:31:25 -0700 Subject: [PATCH 84/94] Change Painless function node to use a block instead of raw statements (#46884) This change improves the node structure of SFunction. SFunction now uses an SBlock instead of a List of AStatments reducing code duplication and gives a future target for symbol table scoping. --- .../elasticsearch/painless/antlr/Walker.java | 2 +- .../elasticsearch/painless/node/ELambda.java | 4 +- .../painless/node/ENewArrayFunctionRef.java | 7 ++- .../elasticsearch/painless/node/SBlock.java | 2 +- .../painless/node/SFunction.java | 48 ++++++------------- 5 files changed, 23 insertions(+), 40 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index f5ff7881fd9..ec56bf2845a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -272,7 +272,7 @@ public final class Walker extends PainlessParserBaseVisitor { statements.add((AStatement)visit(ctx.block().dstatement())); } - return new SFunction(location(ctx), rtnType, name, paramTypes, paramNames, statements, false); + return new SFunction(location(ctx), rtnType, name, paramTypes, paramNames, new SBlock(location(ctx), statements), false); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 139d7ce097f..bd247caf8d5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -175,8 +175,8 @@ public final class ELambda extends AExpression implements ILambda { // desugar lambda body into a synthetic method String name = locals.getNextSyntheticName(); - desugared = new SFunction( - location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, statements, true); + desugared = new SFunction(location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, + new SBlock(location, statements), true); desugared.storeSettings(settings); desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), desugared.name, returnType, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java index 68fa5b1aec0..39a4b9eadd0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java @@ -28,6 +28,7 @@ import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Type; import java.util.Arrays; +import java.util.Collections; import java.util.Objects; import java.util.Set; @@ -61,9 +62,11 @@ public final class ENewArrayFunctionRef extends AExpression implements ILambda { @Override void analyze(Locals locals) { - SReturn code = new SReturn(location, new ENewArray(location, type, Arrays.asList(new EVariable(location, "size")), false)); + SReturn code = new SReturn(location, + new ENewArray(location, type, Arrays.asList(new EVariable(location, "size")), false)); function = new SFunction(location, type, locals.getNextSyntheticName(), - Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true); + Collections.singletonList("int"), Collections.singletonList("size"), + new SBlock(location, Collections.singletonList(code)), true); function.storeSettings(settings); function.generateSignature(locals.getPainlessLookup()); function.extractVariables(null); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java index b00d2305d4b..8cfd32a2844 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java @@ -36,7 +36,7 @@ import static java.util.Collections.emptyList; */ public final class SBlock extends AStatement { - private final List statements; + final List statements; public SBlock(Location location, List statements) { super(location); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 4f8c68c4088..21f64f16352 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -50,7 +50,7 @@ public final class SFunction extends AStatement { public final String name; private final List paramTypeStrs; private final List paramNameStrs; - private final List statements; + private final SBlock block; public final boolean synthetic; private CompilerSettings settings; @@ -65,7 +65,7 @@ public final class SFunction extends AStatement { private Variable loop = null; public SFunction(Location location, String rtnType, String name, - List paramTypes, List paramNames, List statements, + List paramTypes, List paramNames, SBlock block, boolean synthetic) { super(location); @@ -73,27 +73,23 @@ public final class SFunction extends AStatement { this.name = Objects.requireNonNull(name); this.paramTypeStrs = Collections.unmodifiableList(paramTypes); this.paramNameStrs = Collections.unmodifiableList(paramNames); - this.statements = Collections.unmodifiableList(statements); + this.block = Objects.requireNonNull(block); this.synthetic = synthetic; } @Override void storeSettings(CompilerSettings settings) { - for (AStatement statement : statements) { - statement.storeSettings(settings); - } + block.storeSettings(settings); this.settings = settings; } @Override void extractVariables(Set variables) { - for (AStatement statement : statements) { - // we reset the list for function scope - // note this is not stored for this node - // but still required for lambdas - statement.extractVariables(new HashSet<>()); - } + // we reset the list for function scope + // note this is not stored for this node + // but still required for lambdas + block.extractVariables(new HashSet<>()); } void generateSignature(PainlessLookup painlessLookup) { @@ -131,28 +127,14 @@ public final class SFunction extends AStatement { @Override void analyze(Locals locals) { - if (statements == null || statements.isEmpty()) { + if (block.statements.isEmpty()) { throw createError(new IllegalArgumentException("Cannot generate an empty function [" + name + "].")); } locals = Locals.newLocalScope(locals); - - AStatement last = statements.get(statements.size() - 1); - - for (AStatement statement : statements) { - // Note that we do not need to check after the last statement because - // there is no statement that can be unreachable after the last. - if (allEscape) { - throw createError(new IllegalArgumentException("Unreachable statement.")); - } - - statement.lastSource = statement == last; - - statement.analyze(locals); - - methodEscape = statement.methodEscape; - allEscape = statement.allEscape; - } + block.lastSource = true; + block.analyze(locals); + methodEscape = block.methodEscape; if (!methodEscape && returnType != void.class) { throw createError(new IllegalArgumentException("Not all paths provide a return value for method [" + name + "].")); @@ -184,9 +166,7 @@ public final class SFunction extends AStatement { function.visitVarInsn(Opcodes.ISTORE, loop.getSlot()); } - for (AStatement statement : statements) { - statement.write(function, globals); - } + block.write(function, globals); if (!methodEscape) { if (returnType == void.class) { @@ -205,6 +185,6 @@ public final class SFunction extends AStatement { if (false == (paramTypeStrs.isEmpty() && paramNameStrs.isEmpty())) { description.add(joinWithName("Args", pairwiseToString(paramTypeStrs, paramNameStrs), emptyList())); } - return multilineToString(description, statements); + return multilineToString(description, block.statements); } } From b29a2cb360a07b768acbb704e885096f77d65bc4 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 26 Sep 2019 19:47:55 +0300 Subject: [PATCH 85/94] SQL: Check case where the pivot limit is reached (#47121) In some cases, the fetch size affects the way the groups are returned causing the last page to go beyond the limit. Add dedicated check to prevent extra data from being returned. Fix #47002 (cherry picked from commit f4c29646f097bbd29855300342823ef4cef61c05) --- .../xpack/sql/qa/jdbc/FetchSizeTestCase.java | 61 ++++++++++++++----- .../sql/qa/src/main/resources/pivot.csv-spec | 48 +++++++-------- .../sql/execution/search/PivotRowSet.java | 20 ++++-- 3 files changed, 83 insertions(+), 46 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java index f12f069a3b3..61cd6e93c18 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java @@ -156,20 +156,7 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase { * page size affects the result not the intermediate query. */ public void testPivotPaging() throws Exception { - Request request = new Request("PUT", "/test_pivot/_bulk"); - request.addParameter("refresh", "true"); - StringBuilder bulk = new StringBuilder(); - String[] continent = new String[] { "AF", "AS", "EU", "NA", "SA", "AQ", "AU" }; - for (int i = 0; i <= 100; i++) { - bulk.append("{\"index\":{}}\n"); - bulk.append("{\"item\":").append(i % 10) - .append(", \"entry\":").append(i) - .append(", \"amount\" : ").append(randomInt(999)) - .append(", \"location\" : \"").append(continent[i % (continent.length)]).append("\"") - .append("}\n"); - } - request.setJsonEntity(bulk.toString()); - assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + addPivotData(); try (Connection c = esJdbc(); Statement s = c.createStatement()) { @@ -204,4 +191,50 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase { } assertNoSearchContexts(); } + + + public void testPivotPagingWithLimit() throws Exception { + addPivotData(); + + try (Connection c = esJdbc(); + Statement s = c.createStatement()) { + + // run a query with a limit that is not a multiple of the fetch size + String query = "SELECT * FROM " + + "(SELECT item, amount, location FROM test_pivot)" + + " PIVOT (AVG(amount) FOR location IN ( 'EU', 'NA' ) ) LIMIT 5"; + // set size smaller than an agg page + s.setFetchSize(20); + try (ResultSet rs = s.executeQuery(query)) { + assertEquals(3, rs.getMetaData().getColumnCount()); + for (int i = 0; i < 4; i++) { + assertTrue(rs.next()); + assertEquals(2, rs.getFetchSize()); + assertEquals(Long.valueOf(i), rs.getObject("item")); + } + // last entry + assertTrue(rs.next()); + assertEquals(1, rs.getFetchSize()); + assertFalse("LIMIT should be reached", rs.next()); + } + } + assertNoSearchContexts(); + } + + private void addPivotData() throws Exception { + Request request = new Request("PUT", "/test_pivot/_bulk"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); + String[] continent = new String[] { "AF", "AS", "EU", "NA", "SA", "AQ", "AU" }; + for (int i = 0; i <= 100; i++) { + bulk.append("{\"index\":{}}\n"); + bulk.append("{\"item\":").append(i % 10) + .append(", \"entry\":").append(i) + .append(", \"amount\" : ").append(randomInt(999)) + .append(", \"location\" : \"").append(continent[i % (continent.length)]).append("\"") + .append("}\n"); + } + request.setJsonEntity(bulk.toString()); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec index ae761b6432e..0baa18765ff 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec @@ -114,18 +114,16 @@ null |48396.28571428572|62140.666666666664 1 |49767.22222222222|47073.25 ; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/47002 -// averageWithOneValueAndOrder -// schema::languages:bt|'F':d -// SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) ORDER BY languages DESC LIMIT 4; -// -// languages | 'F' -// ---------------+------------------ -// 5 |46705.555555555555 -// 4 |49291.5 -// 3 |53660.0 -// 2 |50684.4 -// ; +averageWithOneValueAndOrder +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) ORDER BY languages DESC LIMIT 4; + languages | 'F' +---------------+------------------ +5 |46705.555555555555 +4 |49291.5 +3 |53660.0 +2 |50684.4 +; averageWithTwoValuesAndOrderDesc schema::languages:bt|'M':d|'F':d @@ -165,20 +163,18 @@ null |48396.28571428572|62140.666666666664 5 |39052.875 |46705.555555555555 ; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/47002 -// sumWithoutSubquery -// schema::birth_date:ts|emp_no:i|first_name:s|gender:s|hire_date:ts|last_name:s|1:i|2:i -// SELECT * FROM test_emp PIVOT (SUM(salary) FOR languages IN (1, 2)) LIMIT 5; -// -// birth_date | emp_no | first_name | gender | hire_date | last_name | 1 | 2 -// ---------------------+---------------+---------------+---------------+---------------------+---------------+---------------+--------------- -// null |10041 |Uri |F |1989-11-12 00:00:00.0|Lenart |56415 |null -// null |10043 |Yishay |M |1990-10-20 00:00:00.0|Tzvieli |34341 |null -// null |10044 |Mingsen |F |1994-05-21 00:00:00.0|Casley |39728 |null -// 1952-04-19 00:00:00.0|10009 |Sumant |F |1985-02-18 00:00:00.0|Peac |66174 |null -// 1953-01-07 00:00:00.0|10067 |Claudi |M |1987-03-04 00:00:00.0|Stavenow |null |52044 -// 1953-01-23 00:00:00.0|10019 |Lillian |null |1999-04-30 00:00:00.0|Haddadi |73717 |null -// ; +sumWithoutSubquery +schema::birth_date:ts|emp_no:i|first_name:s|gender:s|hire_date:ts|last_name:s|1:i|2:i +SELECT * FROM test_emp PIVOT (SUM(salary) FOR languages IN (1, 2)) LIMIT 5; + + birth_date | emp_no | first_name | gender | hire_date | last_name | 1 | 2 +---------------------+---------------+---------------+---------------+---------------------+---------------+---------------+--------------- +null |10041 |Uri |F |1989-11-12 00:00:00.0|Lenart |56415 |null +null |10043 |Yishay |M |1990-10-20 00:00:00.0|Tzvieli |34341 |null +null |10044 |Mingsen |F |1994-05-21 00:00:00.0|Casley |39728 |null +1952-04-19 00:00:00.0|10009 |Sumant |F |1985-02-18 00:00:00.0|Peac |66174 |null +1953-01-07 00:00:00.0|10067 |Claudi |M |1987-03-04 00:00:00.0|Stavenow |null |52044 +; averageWithOneValueAndMath schema::languages:bt|'F':d diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java index 6839e7275ae..3d7e12b3d9b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java @@ -60,6 +60,10 @@ class PivotRowSet extends SchemaCompositeAggRowSet { currentRowGroupKey = key; // save the data data.add(currentRow); + + if (limit > 0 && data.size() == limit) { + break; + } // create a new row currentRow = new Object[columnCount()]; } @@ -76,19 +80,23 @@ class PivotRowSet extends SchemaCompositeAggRowSet { } } - // add the last group if any of the following matches: - // a. the last key has been sent before (it's the last page) - if ((previousLastKey != null && sameCompositeKey(previousLastKey, currentRowGroupKey))) { + // check the last group using the following: + // a. limit has been reached, the rest of the data is ignored. + if (limit > 0 && data.size() == limit) { + afterKey = null; + } + // b. the last key has been sent before (it's the last page) + else if ((previousLastKey != null && sameCompositeKey(previousLastKey, currentRowGroupKey))) { data.add(currentRow); afterKey = null; } - // b. all the values are initialized (there might be another page but no need to ask for the group again) - // c. or no data was added (typically because there's a null value such as the group) + // c. all the values are initialized (there might be another page but no need to ask for the group again) + // d. or no data was added (typically because there's a null value such as the group) else if (hasNull(currentRow) == false || data.isEmpty()) { data.add(currentRow); afterKey = currentRowGroupKey; } - //otherwise we can't tell whether it's complete or not + // otherwise we can't tell whether it's complete or not // so discard the last group and ask for it on the next page else { afterKey = lastCompletedGroupKey; From 4714f106502a1b1e0dbff084d83ebdc7bbb661a9 Mon Sep 17 00:00:00 2001 From: Nicole Albee <2642763+a03nikki@users.noreply.github.com> Date: Thu, 26 Sep 2019 14:48:54 -0500 Subject: [PATCH 86/94] [DOCS] Remove coming tag from 7.2.0 release highlights (#47183) --- docs/reference/release-notes/highlights-7.2.0.asciidoc | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/reference/release-notes/highlights-7.2.0.asciidoc b/docs/reference/release-notes/highlights-7.2.0.asciidoc index 5388f1c8ef9..b35cb045584 100644 --- a/docs/reference/release-notes/highlights-7.2.0.asciidoc +++ b/docs/reference/release-notes/highlights-7.2.0.asciidoc @@ -4,8 +4,6 @@ 7.2.0 ++++ -coming[7.2.0] - //NOTE: The notable-highlights tagged regions are re-used in the //Installation and Upgrade Guide From 9c60e15ebd02d4b6bc44881f26e0eb20ef0c0d98 Mon Sep 17 00:00:00 2001 From: Pius Date: Thu, 26 Sep 2019 13:14:42 -0700 Subject: [PATCH 87/94] [DOCS] Add 'remote clusters' requirement link to CCR docs (#47185) --- docs/reference/ccr/overview.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/ccr/overview.asciidoc b/docs/reference/ccr/overview.asciidoc index 8f25cb1da47..16f7c7d48df 100644 --- a/docs/reference/ccr/overview.asciidoc +++ b/docs/reference/ccr/overview.asciidoc @@ -17,6 +17,7 @@ Replication is pull-based. This means that replication is driven by the follower index. This simplifies state management on the leader index and means that {ccr} does not interfere with indexing on the leader index. +IMPORTANT: {ccr-cap} requires <>. ==== Configuring replication @@ -213,4 +214,4 @@ following process starts again. You can terminate replication with the {ref}/ccr-post-unfollow.html[unfollow API]. This API converts a follower index -to a regular (non-follower) index. \ No newline at end of file +to a regular (non-follower) index. From 944421627d34f88e967ecd91520887a4923b0e3e Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 27 Sep 2019 09:03:15 +0300 Subject: [PATCH 88/94] Work around error building deb on Windows (#47011) Relates to #47007 . the `gradle-ospackage-plugin` plugin doesn't properly support symlink on windows. This PR changes the way we configure tasks to prevent building these packages as part of a windows check. --- distribution/packages/build.gradle | 291 +++++++++++++++-------------- 1 file changed, 148 insertions(+), 143 deletions(-) diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 136803a5d83..a1ab72890a4 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -407,163 +407,168 @@ subprojects { 'default' buildDist } - // sanity checks if packages can be extracted - final File extractionDir = new File(buildDir, 'extracted') - File packageExtractionDir - if (project.name.contains('deb')) { - packageExtractionDir = new File(extractionDir, 'deb-extracted') - } else { - assert project.name.contains('rpm') - packageExtractionDir = new File(extractionDir, 'rpm-extracted') - } - task checkExtraction(type: LoggedExec) { - dependsOn buildDist - doFirst { - project.delete(extractionDir) - extractionDir.mkdirs() - } - } - check.dependsOn checkExtraction - if (project.name.contains('deb')) { - checkExtraction { - onlyIf dpkgExists - commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", packageExtractionDir - } - } else { - assert project.name.contains('rpm') - checkExtraction { - onlyIf rpmExists - final File rpmDatabase = new File(extractionDir, 'rpm-database') - commandLine 'rpm', - '--badreloc', - '--nodeps', - '--noscripts', - '--notriggers', - '--dbpath', - rpmDatabase, - '--relocate', - "/=${packageExtractionDir}", - '-i', - "${-> buildDist.outputs.files.singleFile}" - } - } + if (dpkgExists() || rpmExists()) { - task checkLicense { - dependsOn buildDist, checkExtraction - } - check.dependsOn checkLicense - if (project.name.contains('deb')) { - checkLicense { - onlyIf dpkgExists - doLast { - Path copyrightPath - String expectedLicense - String licenseFilename - if (project.name.contains('oss-')) { - copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright") - expectedLicense = "ASL-2.0" - licenseFilename = "APACHE-LICENSE-2.0.txt" - } else { - copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright") - expectedLicense = "Elastic-License" - licenseFilename = "ELASTIC-LICENSE.txt" - } - final List header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", - "Copyright: Elasticsearch B.V. ", - "License: " + expectedLicense) - final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) - final List expectedLines = header + licenseLines.collect { " " + it } - assertLinesInFile(copyrightPath, expectedLines) + // sanity checks if packages can be extracted + final File extractionDir = new File(buildDir, 'extracted') + File packageExtractionDir + if (project.name.contains('deb')) { + packageExtractionDir = new File(extractionDir, 'deb-extracted') + } else { + assert project.name.contains('rpm') + packageExtractionDir = new File(extractionDir, 'rpm-extracted') + } + task checkExtraction(type: LoggedExec) { + dependsOn buildDist + doFirst { + project.delete(extractionDir) + extractionDir.mkdirs() } } - } else { - assert project.name.contains('rpm') - checkLicense { - onlyIf rpmExists - doLast { - String licenseFilename - if (project.name.contains('oss-')) { - licenseFilename = "APACHE-LICENSE-2.0.txt" - } else { - licenseFilename = "ELASTIC-LICENSE.txt" - } - final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) - final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt") - assertLinesInFile(licensePath, licenseLines) + + check.dependsOn checkExtraction + if (project.name.contains('deb')) { + checkExtraction { + onlyIf dpkgExists + commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", packageExtractionDir + } + } else { + assert project.name.contains('rpm') + checkExtraction { + onlyIf rpmExists + final File rpmDatabase = new File(extractionDir, 'rpm-database') + commandLine 'rpm', + '--badreloc', + '--nodeps', + '--noscripts', + '--notriggers', + '--dbpath', + rpmDatabase, + '--relocate', + "/=${packageExtractionDir}", + '-i', + "${-> buildDist.outputs.files.singleFile}" } } - } - task checkNotice { - dependsOn buildDist, checkExtraction - onlyIf { (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) } - doLast { - final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") - final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt") - assertLinesInFile(noticePath, noticeLines) + task checkLicense { + dependsOn buildDist, checkExtraction } - } - check.dependsOn checkNotice - - task checkLicenseMetadata(type: LoggedExec) { - dependsOn buildDist, checkExtraction - } - check.dependsOn checkLicenseMetadata - if (project.name.contains('deb')) { - checkLicenseMetadata { LoggedExec exec -> - onlyIf dpkgExists - final ByteArrayOutputStream output = new ByteArrayOutputStream() - exec.commandLine 'dpkg-deb', '--info', "${ -> buildDist.outputs.files.filter(debFilter).singleFile}" - exec.standardOutput = output - doLast { - String expectedLicense - if (project.name.contains('oss-')) { - expectedLicense = "ASL-2.0" - } else { - expectedLicense = "Elastic-License" + check.dependsOn checkLicense + if (project.name.contains('deb')) { + checkLicense { + onlyIf dpkgExists + doLast { + Path copyrightPath + String expectedLicense + String licenseFilename + if (project.name.contains('oss-')) { + copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright") + expectedLicense = "ASL-2.0" + licenseFilename = "APACHE-LICENSE-2.0.txt" + } else { + copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright") + expectedLicense = "Elastic-License" + licenseFilename = "ELASTIC-LICENSE.txt" + } + final List header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", + "Copyright: Elasticsearch B.V. ", + "License: " + expectedLicense) + final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) + final List expectedLines = header + licenseLines.collect { " " + it } + assertLinesInFile(copyrightPath, expectedLines) } - final Pattern pattern = Pattern.compile("\\s*License: (.+)") - final String info = output.toString('UTF-8') - final String[] actualLines = info.split("\n") - int count = 0 - for (final String actualLine : actualLines) { - final Matcher matcher = pattern.matcher(actualLine) - if (matcher.matches()) { - count++ - final String actualLicense = matcher.group(1) - if (expectedLicense != actualLicense) { - throw new GradleException("expected license [${expectedLicense} for package info but found [${actualLicense}]") + } + } else { + assert project.name.contains('rpm') + checkLicense { + onlyIf rpmExists + doLast { + String licenseFilename + if (project.name.contains('oss-')) { + licenseFilename = "APACHE-LICENSE-2.0.txt" + } else { + licenseFilename = "ELASTIC-LICENSE.txt" + } + final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) + final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt") + assertLinesInFile(licensePath, licenseLines) + } + } + } + + task checkNotice { + dependsOn buildDist, checkExtraction + onlyIf { + (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) + } + doLast { + final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") + final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt") + assertLinesInFile(noticePath, noticeLines) + } + } + check.dependsOn checkNotice + + task checkLicenseMetadata(type: LoggedExec) { + dependsOn buildDist, checkExtraction + } + check.dependsOn checkLicenseMetadata + if (project.name.contains('deb')) { + checkLicenseMetadata { LoggedExec exec -> + onlyIf dpkgExists + final ByteArrayOutputStream output = new ByteArrayOutputStream() + exec.commandLine 'dpkg-deb', '--info', "${-> buildDist.outputs.files.filter(debFilter).singleFile}" + exec.standardOutput = output + doLast { + String expectedLicense + if (project.name.contains('oss-')) { + expectedLicense = "ASL-2.0" + } else { + expectedLicense = "Elastic-License" + } + final Pattern pattern = Pattern.compile("\\s*License: (.+)") + final String info = output.toString('UTF-8') + final String[] actualLines = info.split("\n") + int count = 0 + for (final String actualLine : actualLines) { + final Matcher matcher = pattern.matcher(actualLine) + if (matcher.matches()) { + count++ + final String actualLicense = matcher.group(1) + if (expectedLicense != actualLicense) { + throw new GradleException("expected license [${expectedLicense} for package info but found [${actualLicense}]") + } } } - } - if (count == 0) { - throw new GradleException("expected license [${expectedLicense}] for package info but found none in:\n${info}") - } - if (count > 1) { - throw new GradleException("expected a single license for package info but found [${count}] in:\n${info}") + if (count == 0) { + throw new GradleException("expected license [${expectedLicense}] for package info but found none in:\n${info}") + } + if (count > 1) { + throw new GradleException("expected a single license for package info but found [${count}] in:\n${info}") + } } } - } - } else { - assert project.name.contains('rpm') - checkLicenseMetadata { LoggedExec exec -> - onlyIf rpmExists - final ByteArrayOutputStream output = new ByteArrayOutputStream() - exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}" - exec.standardOutput = output - doLast { - String license = output.toString('UTF-8') - String expectedLicense - if (project.name.contains('oss-')) { - expectedLicense = "ASL 2.0" - } else { - expectedLicense = "Elastic License" - } - if (license != expectedLicense) { - throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.outputs.files.singleFile}] but was [${license}]") + } else { + assert project.name.contains('rpm') + checkLicenseMetadata { LoggedExec exec -> + onlyIf rpmExists + final ByteArrayOutputStream output = new ByteArrayOutputStream() + exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}" + exec.standardOutput = output + doLast { + String license = output.toString('UTF-8') + String expectedLicense + if (project.name.contains('oss-')) { + expectedLicense = "ASL 2.0" + } else { + expectedLicense = "Elastic License" + } + if (license != expectedLicense) { + throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.outputs.files.singleFile}] but was [${license}]") + } } } } } - } From 813b130e08ce0a6f08368e4ec831981f8dbb3ecf Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 27 Sep 2019 10:34:51 +0300 Subject: [PATCH 89/94] Exclude the demo folder form the JDK (#47161) The folder contains jars with source code that fail the lintian test on debian (based) distributions. --- distribution/build.gradle | 1 + .../java/org/elasticsearch/packaging/test/DebMetadataTests.java | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index 3b5777fc71b..8026f596bfa 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -395,6 +395,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } return copySpec { from project.jdks."bundled_${platform}" + exclude "demo/**" eachFile { FileCopyDetails details -> if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { details.mode = 0755 diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java index d484e92a302..0a291a9c40d 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.packaging.util.Distribution; import org.elasticsearch.packaging.util.FileUtils; import org.elasticsearch.packaging.util.Shell; import org.junit.Before; -import org.junit.Ignore; import java.util.regex.Pattern; @@ -38,7 +37,6 @@ public class DebMetadataTests extends PackagingTestCase { assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB); } - @Ignore public void test05CheckLintian() { sh.run("lintian --fail-on-warnings " + FileUtils.getDistributionFile(distribution())); } From 42ae76ab7ca711fbb6adc2c4a9a01f37aa21fa2e Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 27 Sep 2019 09:30:00 +0200 Subject: [PATCH 90/94] Injected response errors in Azure repository tests should have a body (#47176) The Azure SDK client expects server errors to have a body, something that looks like: string-value string-value I've forgot to add such errors in Azure tests and that triggers some NPE in the client like the one reported in #47120. Closes #47120 --- .../azure/AzureBlobContainerRetriesTests.java | 16 ++-- .../azure/AzureBlobStoreRepositoryTests.java | 13 +++- .../repositories/azure/TestUtils.java | 74 +++++++++++++++++++ ...ESMockAPIBasedRepositoryIntegTestCase.java | 2 +- 4 files changed, 91 insertions(+), 14 deletions(-) create mode 100644 plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/TestUtils.java diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java index c5fe0c5e72f..0aa7a3b0922 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -24,7 +24,6 @@ import com.microsoft.azure.storage.RetryPolicyFactory; import com.microsoft.azure.storage.blob.BlobRequestOptions; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpServer; -import org.apache.http.HttpStatus; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; @@ -164,7 +163,7 @@ public class AzureBlobContainerRetriesTests extends ESTestCase { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(bytes.length)); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); - exchange.sendResponseHeaders(HttpStatus.SC_OK, -1); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); exchange.close(); return; } @@ -176,15 +175,14 @@ public class AzureBlobContainerRetriesTests extends ESTestCase { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(length)); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); - exchange.sendResponseHeaders(HttpStatus.SC_OK, length); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); exchange.getResponseBody().write(bytes, rangeStart, length); exchange.close(); return; } } if (randomBoolean()) { - exchange.sendResponseHeaders(randomFrom(HttpStatus.SC_INTERNAL_SERVER_ERROR, HttpStatus.SC_BAD_GATEWAY, - HttpStatus.SC_SERVICE_UNAVAILABLE, HttpStatus.SC_GATEWAY_TIMEOUT), -1); + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); } exchange.close(); }); @@ -209,7 +207,7 @@ public class AzureBlobContainerRetriesTests extends ESTestCase { if (Objects.deepEquals(bytes, BytesReference.toBytes(body))) { exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); } else { - exchange.sendResponseHeaders(HttpStatus.SC_BAD_REQUEST, -1); + TestUtils.sendError(exchange, RestStatus.BAD_REQUEST); } exchange.close(); return; @@ -220,8 +218,7 @@ public class AzureBlobContainerRetriesTests extends ESTestCase { Streams.readFully(exchange.getRequestBody(), new byte[randomIntBetween(1, Math.max(1, bytes.length - 1))]); } else { Streams.readFully(exchange.getRequestBody()); - exchange.sendResponseHeaders(randomFrom(HttpStatus.SC_INTERNAL_SERVER_ERROR, HttpStatus.SC_BAD_GATEWAY, - HttpStatus.SC_SERVICE_UNAVAILABLE, HttpStatus.SC_GATEWAY_TIMEOUT), -1); + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); } } exchange.close(); @@ -283,8 +280,7 @@ public class AzureBlobContainerRetriesTests extends ESTestCase { if (randomBoolean()) { Streams.readFully(exchange.getRequestBody()); - exchange.sendResponseHeaders(randomFrom(HttpStatus.SC_INTERNAL_SERVER_ERROR, HttpStatus.SC_BAD_GATEWAY, - HttpStatus.SC_SERVICE_UNAVAILABLE, HttpStatus.SC_GATEWAY_TIMEOUT), -1); + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); } exchange.close(); }); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 2f12b1c61ff..28993bd475a 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -171,7 +171,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg } else if (Regex.simpleMatch("HEAD /container/*", request)) { final BytesReference blob = blobs.get(exchange.getRequestURI().getPath()); if (blob == null) { - exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); + TestUtils.sendError(exchange, RestStatus.NOT_FOUND); return; } exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(blob.length())); @@ -181,7 +181,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg } else if (Regex.simpleMatch("GET /container/*", request)) { final BytesReference blob = blobs.get(exchange.getRequestURI().getPath()); if (blob == null) { - exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); + TestUtils.sendError(exchange, RestStatus.NOT_FOUND); return; } @@ -228,7 +228,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg exchange.getResponseBody().write(response); } else { - exchange.sendResponseHeaders(RestStatus.BAD_REQUEST.getStatus(), -1); + TestUtils.sendError(exchange, RestStatus.BAD_REQUEST); } } finally { exchange.close(); @@ -249,6 +249,13 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg super(delegate, maxErrorsPerRequest); } + @Override + protected void handleAsError(final HttpExchange exchange) throws IOException { + Streams.readFully(exchange.getRequestBody()); + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); + exchange.close(); + } + @Override protected String requestUniqueId(final HttpExchange exchange) { final String requestId = exchange.getRequestHeaders().getFirst(Constants.HeaderConstants.CLIENT_REQUEST_ID_HEADER); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/TestUtils.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/TestUtils.java new file mode 100644 index 00000000000..cdb64ecbcf5 --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/TestUtils.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import com.microsoft.azure.storage.Constants; +import com.microsoft.azure.storage.StorageErrorCodeStrings; +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpExchange; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +final class TestUtils { + + private TestUtils() {} + + @SuppressForbidden(reason = "use HttpExchange and Headers") + static void sendError(final HttpExchange exchange, final RestStatus status) throws IOException { + final Headers headers = exchange.getResponseHeaders(); + headers.add("Content-Type", "application/xml"); + + final String requestId = exchange.getRequestHeaders().getFirst(Constants.HeaderConstants.CLIENT_REQUEST_ID_HEADER); + if (requestId != null) { + headers.add(Constants.HeaderConstants.REQUEST_ID_HEADER, requestId); + } + + final String errorCode = toAzureErrorCode(status); + if (errorCode != null) { + headers.add(Constants.HeaderConstants.ERROR_CODE, errorCode); + } + + if (errorCode == null || "HEAD".equals(exchange.getRequestMethod())) { + exchange.sendResponseHeaders(status.getStatus(), -1L); + } else { + final byte[] response = ("" + errorCode + "" + + status + "").getBytes(StandardCharsets.UTF_8); + exchange.sendResponseHeaders(status.getStatus(), response.length); + exchange.getResponseBody().write(response); + } + } + + // See https://docs.microsoft.com/en-us/rest/api/storageservices/common-rest-api-error-codes + private static String toAzureErrorCode(final RestStatus status) { + assert status.getStatus() >= 400; + switch (status) { + case BAD_REQUEST: + return StorageErrorCodeStrings.INVALID_METADATA; + case NOT_FOUND: + return StorageErrorCodeStrings.BLOB_NOT_FOUND; + case INTERNAL_SERVER_ERROR: + return StorageErrorCodeStrings.INTERNAL_ERROR; + default: + return null; + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index 21880d13683..e47bdeee3c2 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -165,7 +165,7 @@ public abstract class ESMockAPIBasedRepositoryIntegTestCase extends ESBlobStoreR } } - private void handleAsError(final HttpExchange exchange) throws IOException { + protected void handleAsError(final HttpExchange exchange) throws IOException { Streams.readFully(exchange.getRequestBody()); exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); exchange.close(); From 2cd6bed9c8a09831ab2afdd0abb784348c9b90b8 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Fri, 27 Sep 2019 09:18:59 +0100 Subject: [PATCH 91/94] Updates 7.4.0 release notes --- docs/reference/release-notes/7.4.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/release-notes/7.4.asciidoc b/docs/reference/release-notes/7.4.asciidoc index 9714eb582fd..e6974aaf7bb 100644 --- a/docs/reference/release-notes/7.4.asciidoc +++ b/docs/reference/release-notes/7.4.asciidoc @@ -329,6 +329,7 @@ CRUD:: * Do not allow version in Rest Update API {pull}43516[#43516] (issue: {issue}42497[#42497]) Cluster Coordination:: +* Assert no exceptions during state application {pull}47090[#47090] (issue: {issue}47038[#47038]) * Avoid counting votes from master-ineligible nodes {pull}43688[#43688] Data Frame:: From 0ab7132c4741ab8d275cb87cd684744265a6e642 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Fri, 27 Sep 2019 10:34:30 +0200 Subject: [PATCH 92/94] [DOCS] Reformats Profile API (#47168) * [DOCS] Reformats Profile API. * [DOCS] Fixes failing docs test. --- docs/reference/search/profile.asciidoc | 326 +++++++++++++++---------- 1 file changed, 194 insertions(+), 132 deletions(-) diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 0d83d293052..0b959f87e0e 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1,20 +1,30 @@ [[search-profile]] === Profile API -WARNING: The Profile API is a debugging tool and adds significant overhead to search execution. +WARNING: The Profile API is a debugging tool and adds significant overhead to search execution. -The Profile API provides detailed timing information about the execution of individual components -in a search request. It gives the user insight into how search requests are executed at a low level so that -the user can understand why certain requests are slow, and take steps to improve them. -Note that the Profile API, <>, doesn't measure -network latency, time spent in the search fetch phase, time spent while the requests spends -in queues or while merging shard responses on the coordinating node. +Provides detailed timing information about the execution of individual +components in a search request. -The output from the Profile API is *very* verbose, especially for complicated requests executed across -many shards. Pretty-printing the response is recommended to help understand the output -[float] -==== Usage +[[search-profile-api-desc]] +==== {api-description-title} + +The Profile API gives the user insight into how search requests are executed at +a low level so that the user can understand why certain requests are slow, and +take steps to improve them. Note that the Profile API, +<>, doesn't measure network latency, +time spent in the search fetch phase, time spent while the requests spends in +queues or while merging shard responses on the coordinating node. + +The output from the Profile API is *very* verbose, especially for complicated +requests executed across many shards. Pretty-printing the response is +recommended to help understand the output. + + +[[search-profile-api-example]] +==== {api-examples-title} + Any `_search` request can be profiled by adding a top-level `profile` parameter: @@ -31,9 +41,10 @@ GET /twitter/_search // TEST[setup:twitter] <1> Setting the top-level `profile` parameter to `true` will enable profiling -for the search +for the search. -This will yield the following result: + +The API returns the following result: [source,console-result] -------------------------------------------------- @@ -167,12 +178,13 @@ This will yield the following result: // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/\[2aE02wS1R8q_QFnYu6vDVQ\]\[twitter\]\[0\]/$body.$_path/] -<1> Search results are returned, but were omitted here for brevity +<1> Search results are returned, but were omitted here for brevity. -Even for a simple query, the response is relatively complicated. Let's break it down piece-by-piece before moving -to more complex examples. +Even for a simple query, the response is relatively complicated. Let's break it +down piece-by-piece before moving to more complex examples. -First, the overall structure of the profile response is as follows: + +The overall structure of the profile response is as follows: [source,console-result] -------------------------------------------------- @@ -200,27 +212,37 @@ First, the overall structure of the profile response is as follows: // TESTRESPONSE[s/"query": \[...\]/"query": $body.$_path/] // TESTRESPONSE[s/"collector": \[...\]/"collector": $body.$_path/] // TESTRESPONSE[s/"aggregations": \[...\]/"aggregations": []/] -<1> A profile is returned for each shard that participated in the response, and is identified -by a unique ID -<2> Each profile contains a section which holds details about the query execution -<3> Each profile has a single time representing the cumulative rewrite time -<4> Each profile also contains a section about the Lucene Collectors which run the search -<5> Each profile contains a section which holds the details about the aggregation execution +<1> A profile is returned for each shard that participated in the response, and +is identified by a unique ID. +<2> Each profile contains a section which holds details about the query +execution. +<3> Each profile has a single time representing the cumulative rewrite time. +<4> Each profile also contains a section about the Lucene Collectors which run +the search. +<5> Each profile contains a section which holds the details about the +aggregation execution. -Because a search request may be executed against one or more shards in an index, and a search may cover -one or more indices, the top level element in the profile response is an array of `shard` objects. -Each shard object lists its `id` which uniquely identifies the shard. The ID's format is +Because a search request may be executed against one or more shards in an index, +and a search may cover one or more indices, the top level element in the profile +response is an array of `shard` objects. Each shard object lists its `id` which +uniquely identifies the shard. The ID's format is `[nodeID][indexName][shardID]`. -The profile itself may consist of one or more "searches", where a search is a query executed against the underlying -Lucene index. Most search requests submitted by the user will only execute a single `search` against the Lucene index. -But occasionally multiple searches will be executed, such as including a global aggregation (which needs to execute -a secondary "match_all" query for the global context). +The profile itself may consist of one or more "searches", where a search is a +query executed against the underlying Lucene index. Most search requests +submitted by the user will only execute a single `search` against the Lucene +index. But occasionally multiple searches will be executed, such as including a +global aggregation (which needs to execute a secondary "match_all" query for the +global context). Inside each `search` object there will be two arrays of profiled information: -a `query` array and a `collector` array. Alongside the `search` object is an `aggregations` object that contains the profile information for the aggregations. In the future, more sections may be added, such as `suggest`, `highlight`, etc. +a `query` array and a `collector` array. Alongside the `search` object is an +`aggregations` object that contains the profile information for the +aggregations. In the future, more sections may be added, such as `suggest`, +`highlight`, etc. -There will also be a `rewrite` metric showing the total time spent rewriting the query (in nanoseconds). +There will also be a `rewrite` metric showing the total time spent rewriting the +query (in nanoseconds). NOTE: As with other statistics apis, the Profile API supports human readable outputs. This can be turned on by adding `?human=true` to the query string. In this case, the output contains the additional `time` field containing rounded, @@ -245,10 +267,11 @@ the `advance` phase of that query is the cause, for example. [[query-section]] ===== `query` Section -The `query` section contains detailed timing of the query tree executed by Lucene on a particular shard. -The overall structure of this query tree will resemble your original Elasticsearch query, but may be slightly -(or sometimes very) different. It will also use similar but not always identical naming. Using our previous -`match` query example, let's analyze the `query` section: +The `query` section contains detailed timing of the query tree executed by +Lucene on a particular shard. The overall structure of this query tree will +resemble your original Elasticsearch query, but may be slightly (or sometimes +very) different. It will also use similar but not always identical naming. +Using our previous `match` query example, let's analyze the `query` section: [source,console-result] -------------------------------------------------- @@ -279,25 +302,30 @@ The overall structure of this query tree will resemble your original Elasticsear // TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"breakdown": \{...\}/"breakdown": $body.$_path/] -<1> The breakdown timings are omitted for simplicity +<1> The breakdown timings are omitted for simplicity. -Based on the profile structure, we can see that our `match` query was rewritten by Lucene into a BooleanQuery with two -clauses (both holding a TermQuery). The `type` field displays the Lucene class name, and often aligns with -the equivalent name in Elasticsearch. The `description` field displays the Lucene explanation text for the query, and -is made available to help differentiating between parts of your query (e.g. both `message:search` and `message:test` -are TermQuery's and would appear identical otherwise. +Based on the profile structure, we can see that our `match` query was rewritten +by Lucene into a BooleanQuery with two clauses (both holding a TermQuery). The +`type` field displays the Lucene class name, and often aligns with the +equivalent name in Elasticsearch. The `description` field displays the Lucene +explanation text for the query, and is made available to help differentiating +between parts of your query (e.g. both `message:search` and `message:test` are +TermQuery's and would appear identical otherwise. -The `time_in_nanos` field shows that this query took ~1.8ms for the entire BooleanQuery to execute. The recorded time is inclusive -of all children. +The `time_in_nanos` field shows that this query took ~1.8ms for the entire +BooleanQuery to execute. The recorded time is inclusive of all children. -The `breakdown` field will give detailed stats about how the time was spent, we'll look at -that in a moment. Finally, the `children` array lists any sub-queries that may be present. Because we searched for two -values ("search test"), our BooleanQuery holds two children TermQueries. They have identical information (type, time, -breakdown, etc). Children are allowed to have their own children. +The `breakdown` field will give detailed stats about how the time was spent, +we'll look at that in a moment. Finally, the `children` array lists any +sub-queries that may be present. Because we searched for two values ("search +test"), our BooleanQuery holds two children TermQueries. They have identical +information (type, time, breakdown, etc). Children are allowed to have their +own children. -====== Timing Breakdown +===== Timing Breakdown -The `breakdown` component lists detailed timing statistics about low-level Lucene execution: +The `breakdown` component lists detailed timing statistics about low-level +Lucene execution: [source,console-result] -------------------------------------------------- @@ -326,10 +354,12 @@ The `breakdown` component lists detailed timing statistics about low-level Lucen // TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] -Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall -`time_in_nanos` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is -actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, -the breakdown is inclusive of all children times. +Timings are listed in wall-clock nanoseconds and are not normalized at all. All +caveats about the overall `time_in_nanos` apply here. The intention of the +breakdown is to give you a feel for A) what machinery in Lucene is actually +eating time, and B) the magnitude of differences in times between the various +components. Like the overall time, the breakdown is inclusive of all children +times. The meaning of the stats are as follows: @@ -399,13 +429,15 @@ The meaning of the stats are as follows: means the `nextDoc()` method was called on two different documents. This can be used to help judge how selective queries are, by comparing counts between different query components. + [[collectors-section]] ===== `collectors` Section -The Collectors portion of the response shows high-level execution details. Lucene works by defining a "Collector" -which is responsible for coordinating the traversal, scoring, and collection of matching documents. Collectors -are also how a single query can record aggregation results, execute unscoped "global" queries, execute post-query -filters, etc. +The Collectors portion of the response shows high-level execution details. +Lucene works by defining a "Collector" which is responsible for coordinating the +traversal, scoring, and collection of matching documents. Collectors are also +how a single query can record aggregation results, execute unscoped "global" +queries, execute post-query filters, etc. Looking at the previous example: @@ -430,15 +462,20 @@ Looking at the previous example: // TESTRESPONSE[s/]$/]}], "aggregations": []}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] -We see a single collector named `SimpleTopScoreDocCollector` wrapped into `CancellableCollector`. `SimpleTopScoreDocCollector` is the default "scoring and sorting" -`Collector` used by Elasticsearch. The `reason` field attempts to give a plain English description of the class name. The -`time_in_nanos` is similar to the time in the Query tree: a wall-clock time inclusive of all children. Similarly, `children` lists -all sub-collectors. The `CancellableCollector` that wraps `SimpleTopScoreDocCollector` is used by Elasticsearch to detect if the current -search was cancelled and stop collecting documents as soon as it occurs. -It should be noted that Collector times are **independent** from the Query times. They are calculated, combined, -and normalized independently! Due to the nature of Lucene's execution, it is impossible to "merge" the times -from the Collectors into the Query section, so they are displayed in separate portions. +We see a single collector named `SimpleTopScoreDocCollector` wrapped into +`CancellableCollector`. `SimpleTopScoreDocCollector` is the default "scoring and +sorting" `Collector` used by {es}. The `reason` field attempts to give a plain +English description of the class name. The `time_in_nanos` is similar to the +time in the Query tree: a wall-clock time inclusive of all children. Similarly, +`children` lists all sub-collectors. The `CancellableCollector` that wraps +`SimpleTopScoreDocCollector` is used by {es} to detect if the current search was +cancelled and stop collecting documents as soon as it occurs. + +It should be noted that Collector times are **independent** from the Query +times. They are calculated, combined, and normalized independently! Due to the +nature of Lucene's execution, it is impossible to "merge" the times from the +Collectors into the Query section, so they are displayed in separate portions. For reference, the various collector reasons are: @@ -489,20 +526,22 @@ For reference, the various collector reasons are: [[rewrite-section]] ===== `rewrite` Section -All queries in Lucene undergo a "rewriting" process. A query (and its sub-queries) may be rewritten one or -more times, and the process continues until the query stops changing. This process allows Lucene to perform -optimizations, such as removing redundant clauses, replacing one query for a more efficient execution path, -etc. For example a Boolean -> Boolean -> TermQuery can be rewritten to a TermQuery, because all the Booleans -are unnecessary in this case. +All queries in Lucene undergo a "rewriting" process. A query (and its +sub-queries) may be rewritten one or more times, and the process continues until +the query stops changing. This process allows Lucene to perform optimizations, +such as removing redundant clauses, replacing one query for a more efficient +execution path, etc. For example a Boolean -> Boolean -> TermQuery can be +rewritten to a TermQuery, because all the Booleans are unnecessary in this case. -The rewriting process is complex and difficult to display, since queries can change drastically. Rather than -showing the intermediate results, the total rewrite time is simply displayed as a value (in nanoseconds). This -value is cumulative and contains the total time for all queries being rewritten. +The rewriting process is complex and difficult to display, since queries can +change drastically. Rather than showing the intermediate results, the total +rewrite time is simply displayed as a value (in nanoseconds). This value is +cumulative and contains the total time for all queries being rewritten. ===== A more complex example - -To demonstrate a slightly more complex query and the associated results, we can profile the following query: +To demonstrate a slightly more complex query and the associated results, we can +profile the following query: [source,console] -------------------------------------------------- @@ -550,7 +589,8 @@ This example has: - A global aggregation - A post_filter -And the response: + +The API returns the following result: [source,console-result] -------------------------------------------------- @@ -660,47 +700,58 @@ And the response: // TESTRESPONSE[s/\.\.\.//] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[test\]\[0\]"/"id": $body.profile.shards.0.id/] -<1> The `"aggregations"` portion has been omitted because it will be covered in the next section +<1> The `"aggregations"` portion has been omitted because it will be covered in +the next section. -As you can see, the output is significantly more verbose than before. All the major portions of the query are -represented: +As you can see, the output is significantly more verbose than before. All the +major portions of the query are represented: -1. The first `TermQuery` (user:test) represents the main `term` query -2. The second `TermQuery` (message:some) represents the `post_filter` query +1. The first `TermQuery` (user:test) represents the main `term` query. +2. The second `TermQuery` (message:some) represents the `post_filter` query. -The Collector tree is fairly straightforward, showing how a single CancellableCollector wraps a MultiCollector - which also wraps a FilteredCollector to execute the post_filter (and in turn wraps the normal scoring SimpleCollector), - a BucketCollector to run all scoped aggregations. +The Collector tree is fairly straightforward, showing how a single +CancellableCollector wraps a MultiCollector which also wraps a FilteredCollector +to execute the post_filter (and in turn wraps the normal scoring +SimpleCollector), a BucketCollector to run all scoped aggregations. ===== Understanding MultiTermQuery output -A special note needs to be made about the `MultiTermQuery` class of queries. This includes wildcards, regex, and fuzzy -queries. These queries emit very verbose responses, and are not overly structured. +A special note needs to be made about the `MultiTermQuery` class of queries. +This includes wildcards, regex, and fuzzy queries. These queries emit very +verbose responses, and are not overly structured. -Essentially, these queries rewrite themselves on a per-segment basis. If you imagine the wildcard query `b*`, it technically -can match any token that begins with the letter "b". It would be impossible to enumerate all possible combinations, -so Lucene rewrites the query in context of the segment being evaluated, e.g., one segment may contain the tokens -`[bar, baz]`, so the query rewrites to a BooleanQuery combination of "bar" and "baz". Another segment may only have the -token `[bakery]`, so the query rewrites to a single TermQuery for "bakery". +Essentially, these queries rewrite themselves on a per-segment basis. If you +imagine the wildcard query `b*`, it technically can match any token that begins +with the letter "b". It would be impossible to enumerate all possible +combinations, so Lucene rewrites the query in context of the segment being +evaluated, e.g., one segment may contain the tokens `[bar, baz]`, so the query +rewrites to a BooleanQuery combination of "bar" and "baz". Another segment may +only have the token `[bakery]`, so the query rewrites to a single TermQuery for +"bakery". -Due to this dynamic, per-segment rewriting, the clean tree structure becomes distorted and no longer follows a clean -"lineage" showing how one query rewrites into the next. At present time, all we can do is apologize, and suggest you -collapse the details for that query's children if it is too confusing. Luckily, all the timing statistics are correct, -just not the physical layout in the response, so it is sufficient to just analyze the top-level MultiTermQuery and +Due to this dynamic, per-segment rewriting, the clean tree structure becomes +distorted and no longer follows a clean "lineage" showing how one query rewrites +into the next. At present time, all we can do is apologize, and suggest you +collapse the details for that query's children if it is too confusing. Luckily, +all the timing statistics are correct, just not the physical layout in the +response, so it is sufficient to just analyze the top-level MultiTermQuery and ignore its children if you find the details too tricky to interpret. -Hopefully this will be fixed in future iterations, but it is a tricky problem to solve and still in-progress :) +Hopefully this will be fixed in future iterations, but it is a tricky problem to +solve and still in-progress. :) [[profiling-aggregations]] -==== Profiling Aggregations +===== Profiling Aggregations + [[agg-section]] -===== `aggregations` Section +====== `aggregations` Section -The `aggregations` section contains detailed timing of the aggregation tree executed by a particular shard. -The overall structure of this aggregation tree will resemble your original Elasticsearch request. Let's -execute the previous query again and look at the aggregation profile this time: +The `aggregations` section contains detailed timing of the aggregation tree +executed by a particular shard. The overall structure of this aggregation tree +will resemble your original {es} request. Let's execute the previous query again +and look at the aggregation profile this time: [source,console] -------------------------------------------------- @@ -741,6 +792,7 @@ GET /twitter/_search // TEST[s/_search/_search\?filter_path=profile.shards.aggregations/] // TEST[continued] + This yields the following aggregation profile output: [source,console-result] @@ -807,16 +859,20 @@ This yields the following aggregation profile output: // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[test\]\[0\]"/"id": $body.profile.shards.0.id/] -From the profile structure we can see that the `my_scoped_agg` is internally being run as a `LongTermsAggregator` (because the field it is -aggregating, `likes`, is a numeric field). At the same level, we see a `GlobalAggregator` which comes from `my_global_agg`. That -aggregation then has a child `LongTermsAggregator` which comes from the second term's aggregation on `likes`. +From the profile structure we can see that the `my_scoped_agg` is internally +being run as a `LongTermsAggregator` (because the field it is aggregating, +`likes`, is a numeric field). At the same level, we see a `GlobalAggregator` +which comes from `my_global_agg`. That aggregation then has a child +`LongTermsAggregator` which comes from the second term's aggregation on `likes`. -The `time_in_nanos` field shows the time executed by each aggregation, and is inclusive of all children. While the overall time is useful, -the `breakdown` field will give detailed stats about how the time was spent. +The `time_in_nanos` field shows the time executed by each aggregation, and is +inclusive of all children. While the overall time is useful, the `breakdown` +field will give detailed stats about how the time was spent. -====== Timing Breakdown +===== Timing Breakdown -The `breakdown` component lists detailed timing statistics about low-level Lucene execution: +The `breakdown` component lists detailed timing statistics about low-level +Lucene execution: [source,js] -------------------------------------------------- @@ -834,10 +890,11 @@ The `breakdown` component lists detailed timing statistics about low-level Lucen -------------------------------------------------- // NOTCONSOLE -Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall -`time` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Elasticsearch is -actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, -the breakdown is inclusive of all children times. +Timings are listed in wall-clock nanoseconds and are not normalized at all. All +caveats about the overall `time` apply here. The intention of the breakdown is +to give you a feel for A) what machinery in {es} is actually eating time, and B) +the magnitude of differences in times between the various components. Like the +overall time, the breakdown is inclusive of all children times. The meaning of the stats are as follows: @@ -866,28 +923,33 @@ The meaning of the stats are as follows: means the `collect()` method was called on two different documents. [[profiling-considerations]] -==== Profiling Considerations +===== Profiling Considerations -===== Performance Notes +Like any profiler, the Profile API introduces a non-negligible overhead to +search execution. The act of instrumenting low-level method calls such as +`collect`, `advance`, and `next_doc` can be fairly expensive, since these +methods are called in tight loops. Therefore, profiling should not be enabled +in production settings by default, and should not be compared against +non-profiled query times. Profiling is just a diagnostic tool. -Like any profiler, the Profile API introduces a non-negligible overhead to search execution. The act of instrumenting -low-level method calls such as `collect`, `advance`, and `next_doc` can be fairly expensive, since these methods are called -in tight loops. Therefore, profiling should not be enabled in production settings by default, and should not -be compared against non-profiled query times. Profiling is just a diagnostic tool. - -There are also cases where special Lucene optimizations are disabled, since they are not amenable to profiling. This -could cause some queries to report larger relative times than their non-profiled counterparts, but in general should -not have a drastic effect compared to other components in the profiled query. +There are also cases where special Lucene optimizations are disabled, since they +are not amenable to profiling. This could cause some queries to report larger +relative times than their non-profiled counterparts, but in general should not +have a drastic effect compared to other components in the profiled query. [[profile-limitations]] ===== Limitations -- Profiling currently does not measure the search fetch phase nor the network overhead -- Profiling also does not account for time spent in the queue, merging shard responses on the coordinating node, or -additional work such as building global ordinals (an internal data structure used to speed up search) -- Profiling statistics are currently not available for suggestions, highlighting, `dfs_query_then_fetch` -- Profiling of the reduce phase of aggregation is currently not available -- The Profiler is still highly experimental. The Profiler is instrumenting parts of Lucene that were -never designed to be exposed in this manner, and so all results should be viewed as a best effort to provide detailed -diagnostics. We hope to improve this over time. If you find obviously wrong numbers, strange query structures, or -other bugs, please report them! +- Profiling currently does not measure the search fetch phase nor the network +overhead. +- Profiling also does not account for time spent in the queue, merging shard +responses on the coordinating node, or additional work such as building global +ordinals (an internal data structure used to speed up search). +- Profiling statistics are currently not available for suggestions, +highlighting, `dfs_query_then_fetch`. +- Profiling of the reduce phase of aggregation is currently not available. +- The Profiler is still highly experimental. The Profiler is instrumenting parts +of Lucene that were never designed to be exposed in this manner, and so all +results should be viewed as a best effort to provide detailed diagnostics. We +hope to improve this over time. If you find obviously wrong numbers, strange +query structures, or other bugs, please report them! From a1e2e208ce5cad4829e7ac160e95d995f31c5870 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Fri, 27 Sep 2019 12:05:41 +0200 Subject: [PATCH 93/94] Mute Snapshot/Restore with repository-azure (#47204) Relates #47201 --- .../rest-api-spec/test/repository_azure/10_repository.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml b/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml index 92866190959..735915adaa6 100644 --- a/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml +++ b/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml @@ -15,6 +15,9 @@ setup: --- "Snapshot/Restore with repository-azure": + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/47201" # Get repository - do: From 3fbd58d156c420bfbf3e1070b314ac0f465e6c14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Fri, 27 Sep 2019 13:01:51 +0200 Subject: [PATCH 94/94] [7.x] Allow evaluation to consist of multiple steps. (#46653) (#47194) --- .../ml/action/EvaluateDataFrameAction.java | 19 ++++- .../ml/dataframe/evaluation/Evaluation.java | 57 ++++++++++++-- .../evaluation/EvaluationMetric.java | 28 +++++++ .../evaluation/EvaluationMetricResult.java | 4 +- .../regression/MeanSquaredError.java | 28 ++++--- .../evaluation/regression/RSquared.java | 26 ++++--- .../evaluation/regression/Regression.java | 43 +++++------ .../regression/RegressionMetric.java | 19 ++--- .../AbstractConfusionMatrixMetric.java | 27 ++++++- .../evaluation/softclassification/AucRoc.java | 36 ++++++--- .../BinarySoftClassification.java | 52 ++++++------- .../softclassification/ConfusionMatrix.java | 4 +- .../softclassification/Precision.java | 2 +- .../evaluation/softclassification/Recall.java | 4 +- .../ScoreByThresholdResult.java | 2 +- .../SoftClassificationMetric.java | 19 ++--- .../regression/MeanSquaredErrorTests.java | 7 +- .../evaluation/regression/RSquaredTests.java | 36 ++++++--- .../regression/RegressionTests.java | 20 ++--- .../BinarySoftClassificationTests.java | 10 ++- .../TransportEvaluateDataFrameAction.java | 75 ++++++++++++++----- 21 files changed, 342 insertions(+), 176 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java index b3b2a3b6666..7f848622392 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java @@ -105,28 +105,31 @@ public class EvaluateDataFrameAction extends ActionType indices) { + public final Request setIndices(List indices) { ExceptionsHelper.requireNonNull(indices, INDEX); if (indices.isEmpty()) { throw ExceptionsHelper.badRequestException("At least one index must be specified"); } this.indices = indices.toArray(new String[indices.size()]); + return this; } public QueryBuilder getParsedQuery() { return Optional.ofNullable(queryProvider).orElseGet(QueryProvider::defaultQuery).getParsedQuery(); } - public final void setQueryProvider(QueryProvider queryProvider) { + public final Request setQueryProvider(QueryProvider queryProvider) { this.queryProvider = queryProvider; + return this; } public Evaluation getEvaluation() { return evaluation; } - public final void setEvaluation(Evaluation evaluation) { + public final Request setEvaluation(Evaluation evaluation) { this.evaluation = ExceptionsHelper.requireNonNull(evaluation, EVALUATION); + return this; } @Override @@ -203,6 +206,14 @@ public class EvaluateDataFrameAction extends ActionType getMetrics() { + return metrics; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(evaluationName); @@ -214,7 +225,7 @@ public class EvaluateDataFrameAction extends ActionType getMetrics(); /** - * Computes the evaluation result - * @param searchResponse The search response required to compute the result - * @param listener A listener of the results + * Builds the search required to collect data to compute the evaluation result + * @param userProvidedQueryBuilder User-provided query that must be respected when collecting data */ - void evaluate(SearchResponse searchResponse, ActionListener> listener); + SearchSourceBuilder buildSearch(QueryBuilder userProvidedQueryBuilder); + + /** + * Builds the search that verifies existence of required fields and applies user-provided query + * @param requiredFields fields that must exist + * @param userProvidedQueryBuilder user-provided query + */ + default SearchSourceBuilder newSearchSourceBuilder(List requiredFields, QueryBuilder userProvidedQueryBuilder) { + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + for (String requiredField : requiredFields) { + boolQuery.filter(QueryBuilders.existsQuery(requiredField)); + } + boolQuery.filter(userProvidedQueryBuilder); + return new SearchSourceBuilder().size(0).query(boolQuery); + } + + /** + * Processes {@link SearchResponse} from the search action + * @param searchResponse response from the search action + */ + void process(SearchResponse searchResponse); + + /** + * @return true iff all the metrics have their results computed + */ + default boolean hasAllResults() { + return getMetrics().stream().map(EvaluationMetric::getResult).allMatch(Optional::isPresent); + } + + /** + * Returns the list of evaluation results + * @return list of evaluation results + */ + default List getResults() { + return getMetrics().stream() + .map(EvaluationMetric::getResult) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java new file mode 100644 index 00000000000..54934b64652 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.dataframe.evaluation; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.xcontent.ToXContentObject; + +import java.util.Optional; + +/** + * {@link EvaluationMetric} class represents a metric to evaluate. + */ +public interface EvaluationMetric extends ToXContentObject, NamedWriteable { + + /** + * Returns the name of the metric (which may differ to the writeable name) + */ + String getName(); + + /** + * Gets the evaluation result for this metric. + * @return {@code Optional.empty()} if the result is not available yet, {@code Optional.of(result)} otherwise + */ + Optional getResult(); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java index 36b8adf9d4e..06c7719a401 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; public interface EvaluationMetricResult extends ToXContentObject, NamedWriteable { /** - * Returns the name of the metric + * Returns the name of the metric (which may differ to the writeable name) */ - String getName(); + String getMetricName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java index e48cb46b5c0..dc8de45f7bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java @@ -20,10 +20,12 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResu import java.io.IOException; import java.text.MessageFormat; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; +import java.util.Optional; /** * Calculates the mean squared error between two known numerical fields. @@ -48,28 +50,34 @@ public class MeanSquaredError implements RegressionMetric { return PARSER.apply(parser, null); } - public MeanSquaredError(StreamInput in) { + private EvaluationMetricResult result; - } + public MeanSquaredError(StreamInput in) {} - public MeanSquaredError() { - - } + public MeanSquaredError() {} @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @Override public List aggs(String actualField, String predictedField) { - return Collections.singletonList(AggregationBuilders.avg(AGG_NAME).script(new Script(buildScript(actualField, predictedField)))); + if (result != null) { + return Collections.emptyList(); + } + return Arrays.asList(AggregationBuilders.avg(AGG_NAME).script(new Script(buildScript(actualField, predictedField)))); } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public void process(Aggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); - return value == null ? new Result(0.0) : new Result(value.value()); + result = value == null ? new Result(0.0) : new Result(value.value()); + } + + @Override + public Optional getResult() { + return Optional.ofNullable(result); } @Override @@ -121,7 +129,7 @@ public class MeanSquaredError implements RegressionMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java index a5530656183..9307d5ae0ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java @@ -23,9 +23,11 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResu import java.io.IOException; import java.text.MessageFormat; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; +import java.util.Optional; /** * Calculates R-Squared between two known numerical fields. @@ -53,36 +55,42 @@ public class RSquared implements RegressionMetric { return PARSER.apply(parser, null); } - public RSquared(StreamInput in) { + private EvaluationMetricResult result; - } + public RSquared(StreamInput in) {} - public RSquared() { - - } + public RSquared() {} @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @Override public List aggs(String actualField, String predictedField) { + if (result != null) { + return Collections.emptyList(); + } return Arrays.asList( AggregationBuilders.sum(SS_RES).script(new Script(buildScript(actualField, predictedField))), AggregationBuilders.extendedStats(ExtendedStatsAggregationBuilder.NAME + "_actual").field(actualField)); } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public void process(Aggregations aggs) { NumericMetricsAggregation.SingleValue residualSumOfSquares = aggs.get(SS_RES); ExtendedStats extendedStats = aggs.get(ExtendedStatsAggregationBuilder.NAME + "_actual"); // extendedStats.getVariance() is the statistical sumOfSquares divided by count - return residualSumOfSquares == null || extendedStats == null || extendedStats.getCount() == 0 ? + result = residualSumOfSquares == null || extendedStats == null || extendedStats.getCount() == 0 ? new Result(0.0) : new Result(1 - (residualSumOfSquares.value() / (extendedStats.getVariance() * extendedStats.getCount()))); } + @Override + public Optional getResult() { + return Optional.ofNullable(result); + } + @Override public String getWriteableName() { return NAME.getPreferredName(); @@ -132,7 +140,7 @@ public class RSquared implements RegressionMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java index bb2540a8691..4741a033ae5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.regression; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -14,17 +13,15 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -86,19 +83,16 @@ public class Regression implements Evaluation { } private static List initMetrics(@Nullable List parsedMetrics) { - List metrics = parsedMetrics == null ? defaultMetrics() : parsedMetrics; + List metrics = parsedMetrics == null ? defaultMetrics() : new ArrayList<>(parsedMetrics); if (metrics.isEmpty()) { throw ExceptionsHelper.badRequestException("[{}] must have one or more metrics", NAME.getPreferredName()); } - Collections.sort(metrics, Comparator.comparing(RegressionMetric::getMetricName)); + Collections.sort(metrics, Comparator.comparing(RegressionMetric::getName)); return metrics; } private static List defaultMetrics() { - List defaultMetrics = new ArrayList<>(2); - defaultMetrics.add(new MeanSquaredError()); - defaultMetrics.add(new RSquared()); - return defaultMetrics; + return Arrays.asList(new MeanSquaredError(), new RSquared()); } @Override @@ -107,12 +101,15 @@ public class Regression implements Evaluation { } @Override - public SearchSourceBuilder buildSearch(QueryBuilder queryBuilder) { - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() - .filter(QueryBuilders.existsQuery(actualField)) - .filter(QueryBuilders.existsQuery(predictedField)) - .filter(queryBuilder); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0).query(boolQuery); + public List getMetrics() { + return metrics; + } + + @Override + public SearchSourceBuilder buildSearch(QueryBuilder userProvidedQueryBuilder) { + ExceptionsHelper.requireNonNull(userProvidedQueryBuilder, "userProvidedQueryBuilder"); + SearchSourceBuilder searchSourceBuilder = + newSearchSourceBuilder(Arrays.asList(actualField, predictedField), userProvidedQueryBuilder); for (RegressionMetric metric : metrics) { List aggs = metric.aggs(actualField, predictedField); aggs.forEach(searchSourceBuilder::aggregation); @@ -121,18 +118,14 @@ public class Regression implements Evaluation { } @Override - public void evaluate(SearchResponse searchResponse, ActionListener> listener) { - List results = new ArrayList<>(metrics.size()); + public void process(SearchResponse searchResponse) { + ExceptionsHelper.requireNonNull(searchResponse, "searchResponse"); if (searchResponse.getHits().getTotalHits().value == 0) { - listener.onFailure(ExceptionsHelper.badRequestException("No documents found containing both [{}, {}] fields", - actualField, - predictedField)); - return; + throw ExceptionsHelper.badRequestException("No documents found containing both [{}, {}] fields", actualField, predictedField); } for (RegressionMetric metric : metrics) { - results.add(metric.evaluate(searchResponse.getAggregations())); + metric.process(searchResponse.getAggregations()); } - listener.onResponse(results); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java index 1da48e2f305..08dfbfab4aa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java @@ -5,20 +5,14 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.regression; -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; +import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetric; import java.util.List; -public interface RegressionMetric extends ToXContentObject, NamedWriteable { - - /** - * Returns the name of the metric (which may differ to the writeable name) - */ - String getMetricName(); +public interface RegressionMetric extends EvaluationMetric { /** * Builds the aggregation that collect required data to compute the metric @@ -29,9 +23,8 @@ public interface RegressionMetric extends ToXContentObject, NamedWriteable { List aggs(String actualField, String predictedField); /** - * Calculates the metric result - * @param aggs the aggregations - * @return the metric result + * Processes given aggregations as a step towards computing result + * @param aggs aggregations from {@link SearchResponse} */ - EvaluationMetricResult evaluate(Aggregations aggs); + void process(Aggregations aggs); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java index facdcceea19..45faec8512d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java @@ -13,27 +13,31 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Optional; abstract class AbstractConfusionMatrixMetric implements SoftClassificationMetric { public static final ParseField AT = new ParseField("at"); protected final double[] thresholds; + private EvaluationMetricResult result; protected AbstractConfusionMatrixMetric(double[] thresholds) { this.thresholds = ExceptionsHelper.requireNonNull(thresholds, AT); if (thresholds.length == 0) { - throw ExceptionsHelper.badRequestException("[" + getMetricName() + "." + AT.getPreferredName() - + "] must have at least one value"); + throw ExceptionsHelper.badRequestException("[" + getName() + "." + AT.getPreferredName() + "] must have at least one value"); } for (double threshold : thresholds) { if (threshold < 0 || threshold > 1.0) { - throw ExceptionsHelper.badRequestException("[" + getMetricName() + "." + AT.getPreferredName() + throw ExceptionsHelper.badRequestException("[" + getName() + "." + AT.getPreferredName() + "] values must be in [0.0, 1.0]"); } } @@ -58,6 +62,9 @@ abstract class AbstractConfusionMatrixMetric implements SoftClassificationMetric @Override public final List aggs(String actualField, List classInfos) { + if (result != null) { + return Collections.emptyList(); + } List aggs = new ArrayList<>(); for (double threshold : thresholds) { aggs.addAll(aggsAt(actualField, classInfos, threshold)); @@ -65,14 +72,26 @@ abstract class AbstractConfusionMatrixMetric implements SoftClassificationMetric return aggs; } + @Override + public void process(ClassInfo classInfo, Aggregations aggs) { + result = evaluate(classInfo, aggs); + } + + @Override + public Optional getResult() { + return Optional.ofNullable(result); + } + protected abstract List aggsAt(String labelField, List classInfos, double threshold); + protected abstract EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs); + protected enum Condition { TP, FP, TN, FN; } protected String aggName(ClassInfo classInfo, double threshold, Condition condition) { - return getMetricName() + "_" + classInfo.getName() + "_at_" + threshold + "_" + condition.name(); + return getName() + "_" + classInfo.getName() + "_at_" + threshold + "_" + condition.name(); } protected AggregationBuilder buildAgg(ClassInfo classInfo, double threshold, Condition condition) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java index 228dac00bfb..7f126b1ec2d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java @@ -30,6 +30,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.stream.IntStream; /** @@ -70,6 +71,7 @@ public class AucRoc implements SoftClassificationMetric { } private final boolean includeCurve; + private EvaluationMetricResult result; public AucRoc(Boolean includeCurve) { this.includeCurve = includeCurve == null ? false : includeCurve; @@ -98,7 +100,7 @@ public class AucRoc implements SoftClassificationMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @@ -117,6 +119,9 @@ public class AucRoc implements SoftClassificationMetric { @Override public List aggs(String actualField, List classInfos) { + if (result != null) { + return Collections.emptyList(); + } double[] percentiles = IntStream.range(1, 100).mapToDouble(v -> (double) v).toArray(); List aggs = new ArrayList<>(); for (ClassInfo classInfo : classInfos) { @@ -134,22 +139,31 @@ public class AucRoc implements SoftClassificationMetric { return aggs; } - private String evaluatedLabelAggName(ClassInfo classInfo) { - return getMetricName() + "_" + classInfo.getName(); - } - - private String restLabelsAggName(ClassInfo classInfo) { - return getMetricName() + "_non_" + classInfo.getName(); + @Override + public void process(ClassInfo classInfo, Aggregations aggs) { + result = evaluate(classInfo, aggs); } @Override - public EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs) { + public Optional getResult() { + return Optional.ofNullable(result); + } + + private String evaluatedLabelAggName(ClassInfo classInfo) { + return getName() + "_" + classInfo.getName(); + } + + private String restLabelsAggName(ClassInfo classInfo) { + return getName() + "_non_" + classInfo.getName(); + } + + private EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs) { Filter classAgg = aggs.get(evaluatedLabelAggName(classInfo)); Filter restAgg = aggs.get(restLabelsAggName(classInfo)); double[] tpPercentiles = percentilesArray(classAgg.getAggregations().get(PERCENTILES), - "[" + getMetricName() + "] requires at least one actual_field to have the value [" + classInfo.getName() + "]"); + "[" + getName() + "] requires at least one actual_field to have the value [" + classInfo.getName() + "]"); double[] fpPercentiles = percentilesArray(restAgg.getAggregations().get(PERCENTILES), - "[" + getMetricName() + "] requires at least one actual_field to have a different value than [" + classInfo.getName() + "]"); + "[" + getName() + "] requires at least one actual_field to have a different value than [" + classInfo.getName() + "]"); List aucRocCurve = buildAucRocCurve(tpPercentiles, fpPercentiles); double aucRocScore = calculateAucScore(aucRocCurve); return new Result(aucRocScore, includeCurve ? aucRocCurve : Collections.emptyList()); @@ -326,7 +340,7 @@ public class AucRoc implements SoftClassificationMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java index 20731eba5e8..386919edec8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -14,18 +13,14 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; @@ -87,17 +82,16 @@ public class BinarySoftClassification implements Evaluation { if (metrics.isEmpty()) { throw ExceptionsHelper.badRequestException("[{}] must have one or more metrics", NAME.getPreferredName()); } - Collections.sort(metrics, Comparator.comparing(SoftClassificationMetric::getMetricName)); + Collections.sort(metrics, Comparator.comparing(SoftClassificationMetric::getName)); return metrics; } private static List defaultMetrics() { - List defaultMetrics = new ArrayList<>(4); - defaultMetrics.add(new AucRoc(false)); - defaultMetrics.add(new Precision(Arrays.asList(0.25, 0.5, 0.75))); - defaultMetrics.add(new Recall(Arrays.asList(0.25, 0.5, 0.75))); - defaultMetrics.add(new ConfusionMatrix(Arrays.asList(0.25, 0.5, 0.75))); - return defaultMetrics; + return Arrays.asList( + new AucRoc(false), + new Precision(Arrays.asList(0.25, 0.5, 0.75)), + new Recall(Arrays.asList(0.25, 0.5, 0.75)), + new ConfusionMatrix(Arrays.asList(0.25, 0.5, 0.75))); } public BinarySoftClassification(StreamInput in) throws IOException { @@ -126,7 +120,7 @@ public class BinarySoftClassification implements Evaluation { builder.startObject(METRICS.getPreferredName()); for (SoftClassificationMetric metric : metrics) { - builder.field(metric.getMetricName(), metric); + builder.field(metric.getName(), metric); } builder.endObject(); @@ -155,34 +149,34 @@ public class BinarySoftClassification implements Evaluation { } @Override - public SearchSourceBuilder buildSearch(QueryBuilder queryBuilder) { - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() - .filter(QueryBuilders.existsQuery(actualField)) - .filter(QueryBuilders.existsQuery(predictedProbabilityField)) - .filter(queryBuilder); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0).query(boolQuery); + public List getMetrics() { + return metrics; + } + + @Override + public SearchSourceBuilder buildSearch(QueryBuilder userProvidedQueryBuilder) { + ExceptionsHelper.requireNonNull(userProvidedQueryBuilder, "userProvidedQueryBuilder"); + SearchSourceBuilder searchSourceBuilder = + newSearchSourceBuilder(Arrays.asList(actualField, predictedProbabilityField), userProvidedQueryBuilder); + BinaryClassInfo binaryClassInfo = new BinaryClassInfo(); for (SoftClassificationMetric metric : metrics) { - List aggs = metric.aggs(actualField, Collections.singletonList(new BinaryClassInfo())); + List aggs = metric.aggs(actualField, Collections.singletonList(binaryClassInfo)); aggs.forEach(searchSourceBuilder::aggregation); } return searchSourceBuilder; } @Override - public void evaluate(SearchResponse searchResponse, ActionListener> listener) { + public void process(SearchResponse searchResponse) { + ExceptionsHelper.requireNonNull(searchResponse, "searchResponse"); if (searchResponse.getHits().getTotalHits().value == 0) { - listener.onFailure(ExceptionsHelper.badRequestException("No documents found containing both [{}, {}] fields", actualField, - predictedProbabilityField)); - return; + throw ExceptionsHelper.badRequestException( + "No documents found containing both [{}, {}] fields", actualField, predictedProbabilityField); } - - List results = new ArrayList<>(); - Aggregations aggs = searchResponse.getAggregations(); BinaryClassInfo binaryClassInfo = new BinaryClassInfo(); for (SoftClassificationMetric metric : metrics) { - results.add(metric.evaluate(binaryClassInfo, aggs)); + metric.process(binaryClassInfo, searchResponse.getAggregations()); } - listener.onResponse(results); } private class BinaryClassInfo implements SoftClassificationMetric.ClassInfo { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java index 54f245962d5..6fc05809245 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java @@ -50,7 +50,7 @@ public class ConfusionMatrix extends AbstractConfusionMatrixMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @@ -132,7 +132,7 @@ public class ConfusionMatrix extends AbstractConfusionMatrixMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java index d38a52bb203..a0fcda5f90c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java @@ -48,7 +48,7 @@ public class Precision extends AbstractConfusionMatrixMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java index f7103aceeda..53b3f1a24a2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java @@ -48,7 +48,7 @@ public class Recall extends AbstractConfusionMatrixMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @@ -68,7 +68,7 @@ public class Recall extends AbstractConfusionMatrixMetric { @Override protected List aggsAt(String actualField, List classInfos, double threshold) { List aggs = new ArrayList<>(); - for (ClassInfo classInfo: classInfos) { + for (ClassInfo classInfo : classInfos) { aggs.add(buildAgg(classInfo, threshold, Condition.TP)); aggs.add(buildAgg(classInfo, threshold, Condition.FN)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java index bd6b6e7db25..0ad99a83cf2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java @@ -40,7 +40,7 @@ public class ScoreByThresholdResult implements EvaluationMetricResult { } @Override - public String getName() { + public String getMetricName() { return name; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java index dfb256e9b52..a5b072632c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java @@ -5,16 +5,15 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification; -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; +import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetric; import java.util.List; -public interface SoftClassificationMetric extends ToXContentObject, NamedWriteable { +public interface SoftClassificationMetric extends EvaluationMetric { /** * The information of a specific class @@ -37,11 +36,6 @@ public interface SoftClassificationMetric extends ToXContentObject, NamedWriteab String getProbabilityField(); } - /** - * Returns the name of the metric (which may differ to the writeable name) - */ - String getMetricName(); - /** * Builds the aggregation that collect required data to compute the metric * @param actualField the field that stores the actual class @@ -51,10 +45,9 @@ public interface SoftClassificationMetric extends ToXContentObject, NamedWriteab List aggs(String actualField, List classInfos); /** - * Calculates the metric result for a given class + * Processes given aggregations as a step towards computing result * @param classInfo the class to calculate the metric for - * @param aggs the aggregations - * @return the metric result + * @param aggs aggregations from {@link SearchResponse} */ - EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs); + void process(ClassInfo classInfo, Aggregations aggs); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java index a22c499220c..2516b2fea94 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java @@ -49,8 +49,9 @@ public class MeanSquaredErrorTests extends AbstractSerializingTestCase { )); RSquared rSquared = new RSquared(); - EvaluationMetricResult result = rSquared.evaluate(aggs); + rSquared.process(aggs); + EvaluationMetricResult result = rSquared.getResult().get(); String expected = "{\"value\":0.9348643947690524}"; assertThat(Strings.toString(result), equalTo(expected)); } @@ -67,35 +68,48 @@ public class RSquaredTests extends AbstractSerializingTestCase { )); RSquared rSquared = new RSquared(); - EvaluationMetricResult result = rSquared.evaluate(aggs); + rSquared.process(aggs); + + EvaluationMetricResult result = rSquared.getResult().get(); assertThat(result, equalTo(new RSquared.Result(0.0))); } public void testEvaluate_GivenMissingAggs() { - EvaluationMetricResult zeroResult = new RSquared.Result(0.0); Aggregations aggs = new Aggregations(Collections.singletonList( createSingleMetricAgg("some_other_single_metric_agg", 0.2377) )); RSquared rSquared = new RSquared(); - EvaluationMetricResult result = rSquared.evaluate(aggs); - assertThat(result, equalTo(zeroResult)); + rSquared.process(aggs); - aggs = new Aggregations(Arrays.asList( + EvaluationMetricResult result = rSquared.getResult().get(); + assertThat(result, equalTo(new RSquared.Result(0.0))); + } + + public void testEvaluate_GivenMissingExtendedStatsAgg() { + Aggregations aggs = new Aggregations(Arrays.asList( createSingleMetricAgg("some_other_single_metric_agg", 0.2377), createSingleMetricAgg("residual_sum_of_squares", 0.2377) )); - result = rSquared.evaluate(aggs); - assertThat(result, equalTo(zeroResult)); + RSquared rSquared = new RSquared(); + rSquared.process(aggs); - aggs = new Aggregations(Arrays.asList( + EvaluationMetricResult result = rSquared.getResult().get(); + assertThat(result, equalTo(new RSquared.Result(0.0))); + } + + public void testEvaluate_GivenMissingResidualSumOfSquaresAgg() { + Aggregations aggs = new Aggregations(Arrays.asList( createSingleMetricAgg("some_other_single_metric_agg", 0.2377), createExtendedStatsAgg("extended_stats_actual",100, 50) )); - result = rSquared.evaluate(aggs); - assertThat(result, equalTo(zeroResult)); + RSquared rSquared = new RSquared(); + rSquared.process(aggs); + + EvaluationMetricResult result = rSquared.getResult().get(); + assertThat(result, equalTo(new RSquared.Result(0.0))); } private static NumericMetricsAggregation.SingleValue createSingleMetricAgg(String name, double value) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java index 7f089ab18cd..077998b66ae 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; @@ -22,6 +23,7 @@ import java.util.Collections; import java.util.List; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; public class RegressionTests extends AbstractSerializingTestCase { @@ -43,13 +45,7 @@ public class RegressionTests extends AbstractSerializingTestCase { if (randomBoolean()) { metrics.add(RSquaredTests.createRandom()); } - return new Regression(randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomBoolean() ? - null : - metrics.isEmpty() ? - null : - metrics); + return new Regression(randomAlphaOfLength(10), randomAlphaOfLength(10), metrics.isEmpty() ? null : metrics); } @Override @@ -74,7 +70,6 @@ public class RegressionTests extends AbstractSerializingTestCase { } public void testBuildSearch() { - Regression evaluation = new Regression("act", "prob", Arrays.asList(new MeanSquaredError())); QueryBuilder userProvidedQuery = QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("field_A", "some-value")) @@ -82,10 +77,15 @@ public class RegressionTests extends AbstractSerializingTestCase { QueryBuilder expectedSearchQuery = QueryBuilders.boolQuery() .filter(QueryBuilders.existsQuery("act")) - .filter(QueryBuilders.existsQuery("prob")) + .filter(QueryBuilders.existsQuery("pred")) .filter(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("field_A", "some-value")) .filter(QueryBuilders.termQuery("field_B", "some-other-value"))); - assertThat(evaluation.buildSearch(userProvidedQuery).query(), equalTo(expectedSearchQuery)); + + Regression evaluation = new Regression("act", "pred", Arrays.asList(new MeanSquaredError())); + + SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(userProvidedQuery); + assertThat(searchSourceBuilder.query(), equalTo(expectedSearchQuery)); + assertThat(searchSourceBuilder.aggregations().count(), greaterThan(0)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java index 6a589c0d055..e63e88f6f84 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; @@ -22,6 +23,7 @@ import java.util.Collections; import java.util.List; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; public class BinarySoftClassificationTests extends AbstractSerializingTestCase { @@ -81,7 +83,6 @@ public class BinarySoftClassificationTests extends AbstractSerializingTestCase listener) { - Evaluation evaluation = request.getEvaluation(); - SearchRequest searchRequest = new SearchRequest(request.getIndices()); - searchRequest.source(evaluation.buildSearch(request.getParsedQuery())); - - ActionListener> resultsListener = ActionListener.wrap( - results -> listener.onResponse(new EvaluateDataFrameAction.Response(evaluation.getName(), results)), + ActionListener> resultsListener = ActionListener.wrap( + unused -> { + EvaluateDataFrameAction.Response response = + new EvaluateDataFrameAction.Response(request.getEvaluation().getName(), request.getEvaluation().getResults()); + listener.onResponse(response); + }, listener::onFailure ); - client.execute(SearchAction.INSTANCE, searchRequest, ActionListener.wrap( - searchResponse -> threadPool.generic().execute(() -> { - try { - evaluation.evaluate(searchResponse, resultsListener); - } catch (Exception e) { - listener.onFailure(e); - }; - }), - listener::onFailure - )); + EvaluationExecutor evaluationExecutor = new EvaluationExecutor(threadPool, client, request); + evaluationExecutor.execute(resultsListener); + } + + /** + * {@link EvaluationExecutor} class allows for serial execution of evaluation steps. + * + * Each step consists of the following phases: + * 1. build search request with aggs requested by individual metrics + * 2. execute search action with the request built in (1.) + * 3. make all individual metrics process the search response obtained in (2.) + * 4. check if all the metrics have their results computed + * a) If so, call the final listener and finish + * b) Otherwise, add another step to the queue + * + * To avoid infinite loop it is essential that every metric *does* compute its result at some point. + * */ + private static final class EvaluationExecutor extends TypedChainTaskExecutor { + + private final Client client; + private final EvaluateDataFrameAction.Request request; + private final Evaluation evaluation; + + EvaluationExecutor(ThreadPool threadPool, Client client, EvaluateDataFrameAction.Request request) { + super(threadPool.generic(), unused -> true, unused -> true); + this.client = client; + this.request = request; + this.evaluation = request.getEvaluation(); + // Add one task only. Other tasks will be added as needed by the nextTask method itself. + add(nextTask()); + } + + private TypedChainTaskExecutor.ChainTask nextTask() { + return listener -> { + SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(request.getParsedQuery()); + SearchRequest searchRequest = new SearchRequest(request.getIndices()).source(searchSourceBuilder); + client.execute( + SearchAction.INSTANCE, + searchRequest, + ActionListener.wrap( + searchResponse -> { + evaluation.process(searchResponse); + if (evaluation.hasAllResults() == false) { + add(nextTask()); + } + listener.onResponse(null); + }, + listener::onFailure)); + }; + } } }